commit c6c758207b4c38278aaaf3d824378514143a98d6 Author: Paul Date: Sat Nov 14 23:47:31 2020 +0100 inital commit diff --git a/ML_U3_2.ipynb b/ML_U3_2.ipynb new file mode 100644 index 0000000..2b10c73 --- /dev/null +++ b/ML_U3_2.ipynb @@ -0,0 +1,195 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Maschinelles Lernen (ML) - Übung 3\n", + "# Perzeptronen und mehrschichtige Perzeptronen\n", + "# 3.2 Loss Function, Backpropagation und Gradient Descent\n", + "\n", + "In dieser Aufgabe wird die Aktualisierungsfunktion (Lernalgorithmus) aus der Vorlesung durch das Verfahren der Fehlerrückführung (engl. backpropagation) und des Gradientenverfahrens (engl. gradient descent) ersetzt.\n", + "\n", + "In dieser Übung sollen Sie das bereits ausgearbeitete Perzeptron anpassen." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# matplotlib: Modul zum Plotten von Daten\n", + "from matplotlib import pyplot as plt \n", + "\n", + "# numpy: Mathematikbibliothek\n", + "import numpy as np \n", + "import pandas as pd\n", + "import time" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Aufgabe 1:** Ersetzen Sie die bisherige Aktivierung in der Methode *predict* durch die Sigmoidfunktion.\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Aufgabe 2:** Implementieren Sie das beschriebene Gradientenlernverfahren mit Backpropagation in die Methode *fit*." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Aufgabe 3:** Schauen Sie sich die Ausgabe des Perzeptrons (perceptron.predict(.)) auf einem der bisher verwendeten Datensätze (z.B. *AND*, Iris) an. Was fällt gegenüber einem Perzeptron mit der Signum-Funktion als Aktivierung auf? Was bedeutet das für den Einsatz als binärer Klassifikator?" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "class Perceptron(object):\n", + " def __init__(self, number_of_inputs, epochs, eta):\n", + " \"\"\"\n", + " Beispielaufruf des Konstruktors:\n", + " >>> Perceptron(2, 100, 0.1)\n", + " \"\"\"\n", + " ### Dein Code kommt hierhin:\n", + " \n", + " ##########################\n", + " pass\n", + " \n", + " def predict(self, inputs):\n", + " \"\"\"\n", + " Beispiel des Funktionsaufrufes:\n", + " >>> inputs = np.array([0, 1])\n", + " >>> h = perceptron.predict(inputs) \n", + " \"\"\"\n", + " \n", + " # Dein Code kommt hierhin: \n", + " \n", + " \n", + " ##########################\n", + " \n", + " \n", + " pass\n", + "\n", + " def fit(self, training_inputs, labels):\n", + " \"\"\"\n", + " Beispiel des Funktionsaufrufs:\n", + " >>> perceptron.fit(train_input, labels)\n", + " \"\"\"\n", + " \n", + " # Dein Code kommt hierhin:\n", + " \n", + "\n", + " ##########################\n", + " pass\n", + " \n", + " def status(self):\n", + " \"\"\"\n", + " Die Methode status(...) gibt die aktuellen Gewichte aus.\n", + "\n", + " Beispiel des Funktionsaufrufes und der Ausgabe:\n", + " >>> perceptron.status()\n", + " Perceptron weights: [0. 1. 1.]\n", + " \"\"\"\n", + " print(\"Perceptron weights: \", self.weights)\n", + " \n", + " def getWeights(self):\n", + " return self.weights" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# AND-Datensatz\n", + "train_input = np.array([\n", + " [0, 0],\n", + " [0, 1],\n", + " [1, 0],\n", + " [1, 1]\n", + " ])\n", + "\n", + "labels_AND = np.array([0, 0, 0, 1])\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Beispiel mit OR\n", + "labels_OR = np.array([0, 1, 1, 1])\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Hier wird der Iris-Datensatz geladen und vorbereitet (siehe Übung 2)\n", + "\n", + "# Datensatz laden\n", + "names = [\"sepal-length\", \"sepal-width\", \"petal-length\", \"petal-width\", \"class\"]\n", + "iris_data = pd.read_csv(\"iris.csv\", names = names)\n", + "\n", + "# Klassen auswählen (Bei Bedarf ändern)\n", + "iris_data = iris_data.loc[lambda x: x['class'] != 'Iris-setosa']\n", + "\n", + "# Merkmale auswählen (Bei Bedarf ändern)\n", + "iris_features = ['petal-length', 'petal-width']\n", + "X = iris_data[iris_features]\n", + "# Pandas-Datenformat in reine Liste umwandeln\n", + "X = X.values\n", + "\n", + "# Label vorbereiten\n", + "from sklearn.preprocessing import LabelEncoder\n", + "lb_make = LabelEncoder()\n", + "iris_data[\"class_code\"] = lb_make.fit_transform(iris_data[\"class\"])\n", + "y = iris_data.class_code\n", + "y = y.values\n", + " \n", + "# Trainings- und Testdatensplit\n", + "from sklearn.model_selection import train_test_split\n", + "X_train, X_test, y_train, y_test = (\n", + " train_test_split(X, y, test_size=.2, random_state=np.random.seed(42)))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Readme.txt b/Readme.txt new file mode 100644 index 0000000..ba17bfa --- /dev/null +++ b/Readme.txt @@ -0,0 +1,2 @@ +Name: Paul Lödige +Matrikel: 15405036 diff --git a/iris.csv b/iris.csv new file mode 100644 index 0000000..5c4316c --- /dev/null +++ b/iris.csv @@ -0,0 +1,151 @@ +5.1,3.5,1.4,0.2,Iris-setosa +4.9,3.0,1.4,0.2,Iris-setosa +4.7,3.2,1.3,0.2,Iris-setosa +4.6,3.1,1.5,0.2,Iris-setosa +5.0,3.6,1.4,0.2,Iris-setosa +5.4,3.9,1.7,0.4,Iris-setosa +4.6,3.4,1.4,0.3,Iris-setosa +5.0,3.4,1.5,0.2,Iris-setosa +4.4,2.9,1.4,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +5.4,3.7,1.5,0.2,Iris-setosa +4.8,3.4,1.6,0.2,Iris-setosa +4.8,3.0,1.4,0.1,Iris-setosa +4.3,3.0,1.1,0.1,Iris-setosa +5.8,4.0,1.2,0.2,Iris-setosa +5.7,4.4,1.5,0.4,Iris-setosa +5.4,3.9,1.3,0.4,Iris-setosa +5.1,3.5,1.4,0.3,Iris-setosa +5.7,3.8,1.7,0.3,Iris-setosa +5.1,3.8,1.5,0.3,Iris-setosa +5.4,3.4,1.7,0.2,Iris-setosa +5.1,3.7,1.5,0.4,Iris-setosa +4.6,3.6,1.0,0.2,Iris-setosa +5.1,3.3,1.7,0.5,Iris-setosa +4.8,3.4,1.9,0.2,Iris-setosa +5.0,3.0,1.6,0.2,Iris-setosa +5.0,3.4,1.6,0.4,Iris-setosa +5.2,3.5,1.5,0.2,Iris-setosa +5.2,3.4,1.4,0.2,Iris-setosa +4.7,3.2,1.6,0.2,Iris-setosa +4.8,3.1,1.6,0.2,Iris-setosa +5.4,3.4,1.5,0.4,Iris-setosa +5.2,4.1,1.5,0.1,Iris-setosa +5.5,4.2,1.4,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +5.0,3.2,1.2,0.2,Iris-setosa +5.5,3.5,1.3,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +4.4,3.0,1.3,0.2,Iris-setosa +5.1,3.4,1.5,0.2,Iris-setosa +5.0,3.5,1.3,0.3,Iris-setosa +4.5,2.3,1.3,0.3,Iris-setosa +4.4,3.2,1.3,0.2,Iris-setosa +5.0,3.5,1.6,0.6,Iris-setosa +5.1,3.8,1.9,0.4,Iris-setosa +4.8,3.0,1.4,0.3,Iris-setosa +5.1,3.8,1.6,0.2,Iris-setosa +4.6,3.2,1.4,0.2,Iris-setosa +5.3,3.7,1.5,0.2,Iris-setosa +5.0,3.3,1.4,0.2,Iris-setosa +7.0,3.2,4.7,1.4,Iris-versicolor +6.4,3.2,4.5,1.5,Iris-versicolor +6.9,3.1,4.9,1.5,Iris-versicolor +5.5,2.3,4.0,1.3,Iris-versicolor +6.5,2.8,4.6,1.5,Iris-versicolor +5.7,2.8,4.5,1.3,Iris-versicolor +6.3,3.3,4.7,1.6,Iris-versicolor +4.9,2.4,3.3,1.0,Iris-versicolor +6.6,2.9,4.6,1.3,Iris-versicolor +5.2,2.7,3.9,1.4,Iris-versicolor +5.0,2.0,3.5,1.0,Iris-versicolor +5.9,3.0,4.2,1.5,Iris-versicolor +6.0,2.2,4.0,1.0,Iris-versicolor +6.1,2.9,4.7,1.4,Iris-versicolor +5.6,2.9,3.6,1.3,Iris-versicolor +6.7,3.1,4.4,1.4,Iris-versicolor +5.6,3.0,4.5,1.5,Iris-versicolor +5.8,2.7,4.1,1.0,Iris-versicolor +6.2,2.2,4.5,1.5,Iris-versicolor +5.6,2.5,3.9,1.1,Iris-versicolor +5.9,3.2,4.8,1.8,Iris-versicolor +6.1,2.8,4.0,1.3,Iris-versicolor +6.3,2.5,4.9,1.5,Iris-versicolor +6.1,2.8,4.7,1.2,Iris-versicolor +6.4,2.9,4.3,1.3,Iris-versicolor +6.6,3.0,4.4,1.4,Iris-versicolor +6.8,2.8,4.8,1.4,Iris-versicolor +6.7,3.0,5.0,1.7,Iris-versicolor +6.0,2.9,4.5,1.5,Iris-versicolor +5.7,2.6,3.5,1.0,Iris-versicolor +5.5,2.4,3.8,1.1,Iris-versicolor +5.5,2.4,3.7,1.0,Iris-versicolor +5.8,2.7,3.9,1.2,Iris-versicolor +6.0,2.7,5.1,1.6,Iris-versicolor +5.4,3.0,4.5,1.5,Iris-versicolor +6.0,3.4,4.5,1.6,Iris-versicolor +6.7,3.1,4.7,1.5,Iris-versicolor +6.3,2.3,4.4,1.3,Iris-versicolor +5.6,3.0,4.1,1.3,Iris-versicolor +5.5,2.5,4.0,1.3,Iris-versicolor +5.5,2.6,4.4,1.2,Iris-versicolor +6.1,3.0,4.6,1.4,Iris-versicolor +5.8,2.6,4.0,1.2,Iris-versicolor +5.0,2.3,3.3,1.0,Iris-versicolor +5.6,2.7,4.2,1.3,Iris-versicolor +5.7,3.0,4.2,1.2,Iris-versicolor +5.7,2.9,4.2,1.3,Iris-versicolor +6.2,2.9,4.3,1.3,Iris-versicolor +5.1,2.5,3.0,1.1,Iris-versicolor +5.7,2.8,4.1,1.3,Iris-versicolor +6.3,3.3,6.0,2.5,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +7.1,3.0,5.9,2.1,Iris-virginica +6.3,2.9,5.6,1.8,Iris-virginica +6.5,3.0,5.8,2.2,Iris-virginica +7.6,3.0,6.6,2.1,Iris-virginica +4.9,2.5,4.5,1.7,Iris-virginica +7.3,2.9,6.3,1.8,Iris-virginica +6.7,2.5,5.8,1.8,Iris-virginica +7.2,3.6,6.1,2.5,Iris-virginica +6.5,3.2,5.1,2.0,Iris-virginica +6.4,2.7,5.3,1.9,Iris-virginica +6.8,3.0,5.5,2.1,Iris-virginica +5.7,2.5,5.0,2.0,Iris-virginica +5.8,2.8,5.1,2.4,Iris-virginica +6.4,3.2,5.3,2.3,Iris-virginica +6.5,3.0,5.5,1.8,Iris-virginica +7.7,3.8,6.7,2.2,Iris-virginica +7.7,2.6,6.9,2.3,Iris-virginica +6.0,2.2,5.0,1.5,Iris-virginica +6.9,3.2,5.7,2.3,Iris-virginica +5.6,2.8,4.9,2.0,Iris-virginica +7.7,2.8,6.7,2.0,Iris-virginica +6.3,2.7,4.9,1.8,Iris-virginica +6.7,3.3,5.7,2.1,Iris-virginica +7.2,3.2,6.0,1.8,Iris-virginica +6.2,2.8,4.8,1.8,Iris-virginica +6.1,3.0,4.9,1.8,Iris-virginica +6.4,2.8,5.6,2.1,Iris-virginica +7.2,3.0,5.8,1.6,Iris-virginica +7.4,2.8,6.1,1.9,Iris-virginica +7.9,3.8,6.4,2.0,Iris-virginica +6.4,2.8,5.6,2.2,Iris-virginica +6.3,2.8,5.1,1.5,Iris-virginica +6.1,2.6,5.6,1.4,Iris-virginica +7.7,3.0,6.1,2.3,Iris-virginica +6.3,3.4,5.6,2.4,Iris-virginica +6.4,3.1,5.5,1.8,Iris-virginica +6.0,3.0,4.8,1.8,Iris-virginica +6.9,3.1,5.4,2.1,Iris-virginica +6.7,3.1,5.6,2.4,Iris-virginica +6.9,3.1,5.1,2.3,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +6.8,3.2,5.9,2.3,Iris-virginica +6.7,3.3,5.7,2.5,Iris-virginica +6.7,3.0,5.2,2.3,Iris-virginica +6.3,2.5,5.0,1.9,Iris-virginica +6.5,3.0,5.2,2.0,Iris-virginica +6.2,3.4,5.4,2.3,Iris-virginica +5.9,3.0,5.1,1.8,Iris-virginica +