diff --git a/00_numpy_pandas_matplotlib_intro.ipynb b/00_numpy_pandas_matplotlib_intro.ipynb
index 41ccde65fd6d3d80e0b24375cf9ab9bd352050fa..64fbbce5017339d8010116807e08e28579cbf896 100644
--- a/00_numpy_pandas_matplotlib_intro.ipynb
+++ b/00_numpy_pandas_matplotlib_intro.ipynb
@@ -182,7 +182,7 @@
    ],
    "source": [
     "# show content of csv file, only works in notebook:\n",
-    "!cat example.csv"
+    "!cat data/example.csv"
    ]
   },
   {
@@ -286,7 +286,7 @@
     "\n",
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"example.csv\")\n",
+    "df = pd.read_csv(\"data/example.csv\")\n",
     "df"
    ]
   },
diff --git a/01_introduction.ipynb b/01_introduction.ipynb
index e6f1b6e70fd7244f923b7755c9a775383bf184ed..a448e2f1222a0cb300836001d6ed828f43139aa2 100644
--- a/01_introduction.ipynb
+++ b/01_introduction.ipynb
@@ -358,7 +358,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "features = pd.read_csv(\"beers.csv\")\n",
+    "features = pd.read_csv(\"data/beers.csv\")\n",
     "features.head()"
    ]
   },
@@ -869,7 +869,7 @@
     "import pandas as pd\n",
     "\n",
     "# read some data\n",
-    "beer_data = pd.read_csv(\"beers.csv\")\n",
+    "beer_data = pd.read_csv(\"data/beers.csv\")\n",
     "print(beer_data.shape)"
    ]
   },
diff --git a/02_classification.ipynb b/02_classification.ipynb
index 88ea2b61d6e1167701e56a443698228403b9572f..320ce895872a431916ae5e71c38f4d25a94009b0 100644
--- a/02_classification.ipynb
+++ b/02_classification.ipynb
@@ -308,7 +308,7 @@
     "import seaborn as sns\n",
     "sns.set(style=\"ticks\")\n",
     "\n",
-    "beer_data = pd.read_csv(\"beers.csv\")\n",
+    "beer_data = pd.read_csv(\"data/beers.csv\")\n",
     "\n",
     "for_plot = beer_data.copy()\n",
     "\n",
@@ -1004,7 +1004,7 @@
     }
    ],
    "source": [
-    "df = pd.read_csv(\"2d_points.csv\")\n",
+    "df = pd.read_csv(\"data/circle.csv\")\n",
     "df.head(3)"
    ]
   },
@@ -1220,7 +1220,7 @@
     }
    ],
    "source": [
-    "xor = pd.read_csv(\"xor.csv\")\n",
+    "xor = pd.read_csv(\"data/xor.csv\")\n",
     "xor.head()"
    ]
   },
@@ -1484,7 +1484,7 @@
     "from sklearn.preprocessing import PolynomialFeatures\n",
     "\n",
     "# using first 10 samples from XOR data\n",
-    "df = pd.read_csv(\"xor.csv\")\n",
+    "df = pd.read_csv(\"data/xor.csv\")\n",
     "features = df.iloc[:, :-1]\n",
     "features.head()"
    ]
@@ -1721,7 +1721,7 @@
     "from sklearn.linear_model import LogisticRegression\n",
     "from sklearn.preprocessing import PolynomialFeatures\n",
     "\n",
-    "df = pd.read_csv(\"xor.csv\")\n",
+    "df = pd.read_csv(\"data/xor.csv\")\n",
     "features = df.iloc[:, :-1]\n",
     "labels = df.iloc[:, -1]\n",
     "\n",
@@ -1780,7 +1780,7 @@
     "from sklearn.linear_model import LogisticRegression\n",
     "from sklearn.preprocessing import PolynomialFeatures\n",
     "\n",
-    "df = pd.read_csv(\"xor.csv\")\n",
+    "df = pd.read_csv(\"data/xor.csv\")\n",
     "features = df.iloc[:, :-1]\n",
     "labels = df.iloc[:, -1]\n",
     "\n",
@@ -1798,7 +1798,7 @@
    "source": [
     "### b) Comparison of decision surfaces for different classifiers and datasets\n",
     "\n",
-    "Compare decision surfaces for different classifiers listed below for both `\"xor.csv\"` and `\"2d_points.csv\"` (circle) datasets. For which classifiers does it help to add polynomial features? How many degrees suffice?"
+    "Compare decision surfaces for different classifiers listed below for both `\"data/xor.csv\"` and `\"data/circle.csv\"` (circle) datasets. For which classifiers does it help to add polynomial features? How many degrees suffice?"
    ]
   },
   {
@@ -1896,7 +1896,7 @@
     "    train_and_plot_decision_surface(\"KNeighborsClassifier\", clf, features, labels, preproc=preproc, N=300)\n",
     "\n",
     "\n",
-    "try_dataset(\"xor.csv\", PolynomialFeatures(2, include_bias=False))\n"
+    "try_dataset(\"data/xor.csv\", PolynomialFeatures(2, include_bias=False))\n"
    ]
   },
   {
@@ -1910,7 +1910,7 @@
    },
    "outputs": [],
    "source": [
-    "try_dataset(\"2d_points.csv\", PolynomialFeatures(2, include_bias=False))"
+    "try_dataset(\"data/circle.csv\", PolynomialFeatures(2, include_bias=False))"
    ]
   },
   {
diff --git a/03_overfitting_and_cross_validation.ipynb b/03_overfitting_and_cross_validation.ipynb
index 91ff789889c0a2e1511ae7d0b86fbc2fe01de468..0e90b81d1f06039b112a110612ad8354a70ca8b6 100644
--- a/03_overfitting_and_cross_validation.ipynb
+++ b/03_overfitting_and_cross_validation.ipynb
@@ -167,7 +167,7 @@
     "import pandas as pd\n",
     "\n",
     "# reading the beer dataset\n",
-    "beer_data = pd.read_csv(\"beers.csv\")\n",
+    "beer_data = pd.read_csv(\"data/beers.csv\")\n",
     "print(beer_data.shape)\n",
     "\n",
     "# all columns up to the last one:\n",
@@ -211,7 +211,7 @@
     }
    ],
    "source": [
-    "eval_data = pd.read_csv(\"beers_eval.csv\")\n",
+    "eval_data = pd.read_csv(\"data/beers_eval.csv\")\n",
     "print(eval_data.shape)"
    ]
   },
@@ -277,7 +277,7 @@
     }
    ],
    "source": [
-    "data = pd.read_csv(\"2d_points.csv\")\n",
+    "data = pd.read_csv(\"data/circle.csv\")\n",
     "features = data.iloc[:, :-1]\n",
     "labels = data.iloc[:, -1]\n",
     "\n",
@@ -397,7 +397,7 @@
    "source": [
     "from sklearn.svm import SVC\n",
     "\n",
-    "df = pd.read_csv(\"2d_points.csv\")\n",
+    "df = pd.read_csv(\"data/circle.csv\")\n",
     "features = df.iloc[:, :-1]\n",
     "labels = df.iloc[:, -1]\n",
     "\n",
@@ -719,8 +719,8 @@
     "#from sklearn.utils import shuffle\n",
     "import pandas as pd\n",
     "\n",
-    "beer = pd.read_csv(\"beers.csv\")\n",
-    "beer_eval = pd.read_csv(\"beers_eval.csv\")\n",
+    "beer = pd.read_csv(\"data/beers.csv\")\n",
+    "beer_eval = pd.read_csv(\"data/beers_eval.csv\")\n",
     "\n",
     "all_beer = pd.concat((beer, beer_eval))\n",
     "\n",
@@ -873,13 +873,13 @@
     }
    ],
    "source": [
-    "beer_data = pd.read_csv(\"beers.csv\")\n",
+    "beer_data = pd.read_csv(\"data/beers.csv\")\n",
     "\n",
     "# all columns up to the last one:\n",
     "input_features = beer_data.iloc[:, :-1]\n",
     "input_labels = beer_data.iloc[:, -1]\n",
     "\n",
-    "eval_data = pd.read_csv(\"beers_eval.csv\")\n",
+    "eval_data = pd.read_csv(\"data/beers_eval.csv\")\n",
     "\n",
     "eval_features = eval_data.iloc[:, :-1]\n",
     "eval_labels = eval_data.iloc[:, -1]\n",
diff --git a/04_measuring_quality_of_a_classifier.ipynb b/04_measuring_quality_of_a_classifier.ipynb
index 0c9edbf632f6465b2e110b5aaf9467fdbd86c257..1225cde7961e8cfe9bb41d05280aa7626ab380aa 100644
--- a/04_measuring_quality_of_a_classifier.ipynb
+++ b/04_measuring_quality_of_a_classifier.ipynb
@@ -651,7 +651,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "beer_data = pd.read_csv(\"beers.csv\")\n",
+    "beer_data = pd.read_csv(\"data/beers.csv\")\n",
     "print(beer_data.shape)"
    ]
   },
@@ -763,13 +763,13 @@
     }
    ],
    "source": [
-    "beer_data = pd.read_csv(\"beers.csv\")\n",
+    "beer_data = pd.read_csv(\"data/beers.csv\")\n",
     "\n",
     "# all columns up to the last one:\n",
     "features = beer_data.iloc[:, :-1]\n",
     "labels = beer_data.iloc[:, -1]\n",
     "\n",
-    "eval_data = pd.read_csv(\"beers_eval.csv\")\n",
+    "eval_data = pd.read_csv(\"data/beers_eval.csv\")\n",
     "\n",
     "eval_features = eval_data.iloc[:, :-1]\n",
     "eval_labels = eval_data.iloc[:, -1]\n",
diff --git a/05_classifiers_overview.ipynb b/05_classifiers_overview.ipynb
index dd66da5a638713f0d2d8cb286c3285f215f4ed9e..9a29e61c84d84efe70ce33ddf55093832ba3d296 100644
--- a/05_classifiers_overview.ipynb
+++ b/05_classifiers_overview.ipynb
@@ -286,7 +286,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"xor.csv\")\n",
+    "df = pd.read_csv(\"data/xor.csv\")\n",
     "df.head(2)"
    ]
   },
@@ -653,7 +653,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"line_separable_2d.csv\")\n",
+    "df = pd.read_csv(\"data/line_separable_2d.csv\")\n",
     "df.head(2)"
    ]
   },
@@ -831,7 +831,7 @@
     "### Exercise section\n",
     "\n",
     "1. Why did the test score drop when we penalized more misclassifications?\n",
-    "2. Experiment with higher dimensional \"beers.csv\" dataset and both parameters `C` and `penalty` of the linear regression classfier. Compare scores and the resulting weights. What does the `l1` penalty do? What is the sweet spot of the \"inverse regularization\" `C`?\n",
+    "2. Experiment with higher dimensional \"data/beers.csv\" dataset and both parameters `C` and `penalty` of the linear regression classfier. Compare scores and the resulting weights. What does the `l1` penalty do? What is the sweet spot of the \"inverse regularization\" `C`?\n",
     "  "
    ]
   },
@@ -934,7 +934,7 @@
     "# SOLUTION\n",
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"beers.csv\")\n",
+    "df = pd.read_csv(\"data/beers.csv\")\n",
     "print(df.head(2))\n",
     "\n",
     "features_4d = df.iloc[:, :-1]\n",
@@ -1088,7 +1088,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"line_separable_2d.csv\")\n",
+    "df = pd.read_csv(\"data/line_separable_2d.csv\")\n",
     "df.head(2)"
    ]
   },
@@ -1274,7 +1274,7 @@
     "## Exercise section\n",
     "\n",
     "1. It looks like we did train our classifier \"perfectly\" (no point within the margin) with \"harder\" margins. Why is the score then lower then previously?\n",
-    "2. Experiment with higher dimensional \"beers.csv\" dataset and both parameters `C` and `penalty` of the linear SVM classfier (note: set `dual=False` to work with `penalty='l1'`). Compare scores and the resulting weights.\n",
+    "2. Experiment with higher dimensional \"data/beers.csv\" dataset and both parameters `C` and `penalty` of the linear SVM classfier (note: set `dual=False` to work with `penalty='l1'`). Compare scores and the resulting weights.\n",
     "  "
    ]
   },
@@ -1284,7 +1284,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "df = pd.read_csv(\"beers.csv\")\n",
+    "df = pd.read_csv(\"data/beers.csv\")\n",
     "\n",
     "C_values = [0.01, 0.1, 1, 10, 100]\n",
     "penalty_values = ['l1', 'l2']\n",
@@ -1383,7 +1383,7 @@
     "# SOLUTION\n",
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"beers.csv\")\n",
+    "df = pd.read_csv(\"data/beers.csv\")\n",
     "print(df.head(2))\n",
     "\n",
     "features_4d = df.iloc[:, :-1]\n",
@@ -1480,7 +1480,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"circle.csv\")\n",
+    "df = pd.read_csv(\"data/circle.csv\")\n",
     "df.head(2)"
    ]
   },
@@ -2029,7 +2029,7 @@
     "import pandas as pd\n",
     "from sklearn.model_selection import train_test_split\n",
     "\n",
-    "df = pd.read_csv(\"circle.csv\")\n",
+    "df = pd.read_csv(\"data/circle.csv\")\n",
     "labelv = df[\"label\"]\n",
     "\n",
     "# circle interior is the `True` class\n",
@@ -2197,7 +2197,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"xor.csv\")\n",
+    "df = pd.read_csv(\"data/xor.csv\")\n",
     "df.head(2)"
    ]
   },
@@ -2226,7 +2226,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"xor.csv\")\n",
+    "df = pd.read_csv(\"data/xor.csv\")\n",
     "features_2d = df.loc[:, (\"x\", \"y\")]\n",
     "labelv = df[\"label\"]\n",
     "\n",
@@ -2272,7 +2272,7 @@
     }
    ],
    "source": [
-    "from sklearn.ensemble import DecisionTreeClassifier\n",
+    "from sklearn.tree import DecisionTreeClassifier\n",
     "from sklearn.model_selection import train_test_split\n",
     "\n",
     "X_train, X_test, y_train, y_test = train_test_split(features_2d, labelv, random_state=10)\n",
@@ -2366,7 +2366,7 @@
     "### Exercise section\n",
     "\n",
     "1. In principle for our XOR dataset it should suffice to use each feature exactly once. Try to built a smaller tree using different values for `max_depth` or `min_samples_leaf` parameters.\n",
-    "2. Build a decision tree for the `\"beers.csv\"` dataset."
+    "2. Build a decision tree for the `\"data/beers.csv\"` dataset."
    ]
   },
   {
@@ -2534,7 +2534,7 @@
     "from sklearn.tree import DecisionTreeClassifier\n",
     "from sklearn.model_selection import train_test_split\n",
     "\n",
-    "df = pd.read_csv(\"beers.csv\")\n",
+    "df = pd.read_csv(\"data/beers.csv\")\n",
     "print(df.head(2))\n",
     "\n",
     "features_4d = df.iloc[:, :-1]\n",
@@ -2667,7 +2667,7 @@
     "from sklearn.ensemble import RandomForestClassifier\n",
     "from sklearn.model_selection import train_test_split\n",
     "\n",
-    "df = pd.read_csv(\"beers.csv\")\n",
+    "df = pd.read_csv(\"data/beers.csv\")\n",
     "print(df.head(2))\n",
     "\n",
     "features_4d = df.iloc[:, :-1]\n",
@@ -2824,7 +2824,7 @@
     "from sklearn.ensemble import AdaBoostClassifier\n",
     "from sklearn.model_selection import train_test_split\n",
     "\n",
-    "df = pd.read_csv(\"beers.csv\")\n",
+    "df = pd.read_csv(\"data/beers.csv\")\n",
     "print(df.head(2))\n",
     "\n",
     "features_4d = df.iloc[:, :-1]\n",
@@ -2896,7 +2896,7 @@
     "from sklearn.tree import DecisionTreeClassifier\n",
     "from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n",
     "\n",
-    "df = pd.read_csv(\"beers.csv\")\n",
+    "df = pd.read_csv(\"data/beers.csv\")\n",
     "features_4d = df.iloc[:, :-1]\n",
     "labelv = df.iloc[:, -1]\n",
     "\n",
@@ -2974,7 +2974,7 @@
     "    AdaBoostClassifier(n_estimators=20, random_state=0),\n",
     "] \n",
     "\n",
-    "df = pd.read_csv(\"beers.csv\")\n",
+    "df = pd.read_csv(\"data/beers.csv\")\n",
     "features_4d = df.iloc[:, :-1]\n",
     "labelv = df.iloc[:, -1]\n",
     "\n",
diff --git a/06_preprocessing_pipelines_and_hyperparameter_optimization.ipynb b/06_preprocessing_pipelines_and_hyperparameter_optimization.ipynb
index e8952a178aabf26ba504e08669fbee9ba7a60c7b..cc17beace2f9b15ab1886103ae509831bfb95b3b 100644
--- a/06_preprocessing_pipelines_and_hyperparameter_optimization.ipynb
+++ b/06_preprocessing_pipelines_and_hyperparameter_optimization.ipynb
@@ -529,7 +529,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "beer_data = pd.read_csv(\"beers.csv\")\n",
+    "beer_data = pd.read_csv(\"data/beers.csv\")\n",
     "\n",
     "features = beer_data.iloc[:, :-1]\n",
     "labels = beer_data.iloc[:, -1]\n",
@@ -712,11 +712,11 @@
     "        print(\"{:.3f}\".format(cross_val_score(p, features, labels, scoring=\"accuracy\", cv=5).mean()), end=\" \")\n",
     "        print([pi[0] for pi in p.steps])\n",
     "        \n",
-    "xor_data = pd.read_csv(\"xor.csv\")\n",
+    "xor_data = pd.read_csv(\"data/xor.csv\")\n",
     "check_pipelines(xor_data)\n",
     "print()\n",
     "\n",
-    "circle_data = pd.read_csv(\"2d_points.csv\")\n",
+    "circle_data = pd.read_csv(\"data/circle.csv\")\n",
     "check_pipelines(circle_data)\n"
    ]
   },
@@ -945,7 +945,7 @@
     }
    ],
    "source": [
-    "beer_data = pd.read_csv(\"beers.csv\")\n",
+    "beer_data = pd.read_csv(\"data/beers.csv\")\n",
     "\n",
     "features = beer_data.iloc[:, :-1]\n",
     "labels = beer_data.iloc[:, -1]\n",
diff --git a/07_regression.ipynb b/07_regression.ipynb
index 3f79f3032b29b09dd69da9d8e5fb4f8e4cac21e4..63e5139c93e9e8c7716630d8d6ec0c5fbee7869f 100644
--- a/07_regression.ipynb
+++ b/07_regression.ipynb
@@ -163,7 +163,7 @@
    "source": [
     "## Example: Salmon weight\n",
     "\n",
-    "The dataset `salmon.csv` holds measurements of `circumference`, `length` and `weight` for  `atlantic` and `sockeye` salmons.\n",
+    "The dataset `data/salmon.csv` holds measurements of `circumference`, `length` and `weight` for  `atlantic` and `sockeye` salmons.\n",
     "\n",
     "Our goal is to predict `weight` based on the other three features."
    ]
@@ -257,7 +257,7 @@
    "source": [
     "import pandas as pd\n",
     "\n",
-    "df = pd.read_csv(\"salmon.csv\")\n",
+    "df = pd.read_csv(\"data/salmon.csv\")\n",
     "df.head()"
    ]
   },
diff --git a/08_neural_networks.ipynb b/08_neural_networks.ipynb
index ffc0a04a809ad8f585e77dbaa91e5e89ded7dfbb..343143b5d27aac9a3456b6371c096f057ebc756a 100644
--- a/08_neural_networks.ipynb
+++ b/08_neural_networks.ipynb
@@ -1056,7 +1056,7 @@
     "# Creating a network to solve the XOR problem\n",
     "\n",
     "# Loading and plotting the data\n",
-    "xor = pd.read_csv(\"xor.csv\")\n",
+    "xor = pd.read_csv(\"data/xor.csv\")\n",
     "\n",
     "# Using x and y coordinates as featues\n",
     "features = xor.iloc[:, :-1]\n",
@@ -1815,12 +1815,12 @@
    "outputs": [],
    "source": [
     "# Before we move on forward we see how to save and load a keras model\n",
-    "model.save(\"./my_first_NN.h5\")\n",
+    "model.save(\"./data/my_first_NN.h5\")\n",
     "\n",
     "# Optional: See what is in the hdf5 file we just created above\n",
     "\n",
     "from keras.models import load_model\n",
-    "model = load_model(\"./my_first_NN.h5\")"
+    "model = load_model(\"./data/my_first_NN.h5\")"
    ]
   },
   {
@@ -2218,7 +2218,7 @@
     }
    ],
    "source": [
-    "circle = pd.read_csv(\"2d_points.csv\")\n",
+    "circle = pd.read_csv(\"data/circle.csv\")\n",
     "# Using x and y coordinates as featues\n",
     "features = circle.iloc[:, :-1]\n",
     "# Convert boolean to integer values (True->1 and False->0)\n",
@@ -3049,7 +3049,7 @@
     "import seaborn as sns\n",
     "sns.set_style(\"white\")\n",
     "# Loading the train and test data\n",
-    "digit = np.genfromtxt(\"digit_4_14x14.csv\", delimiter=\",\").astype(np.int16) ;\n",
+    "digit = np.genfromtxt(\"data/digit_4_14x14.csv\", delimiter=\",\").astype(np.int16) ;\n",
     "plt.imshow(digit, \"gray_r\")"
    ]
   },
diff --git a/2d_points.csv b/2d_points.csv
deleted file mode 100644
index 71417000333e463533e71c04bcffb7b9087d86b3..0000000000000000000000000000000000000000
--- a/2d_points.csv
+++ /dev/null
@@ -1,301 +0,0 @@
-x,y,label
--0.50183952461055,1.8028572256396647,False
-0.9279757672456204,0.3946339367881464,True
--1.375925438230254,-1.3760219186551894,False
--1.7676655513272022,1.4647045830997407,False
-0.4044600469728352,0.832290311184182,True
--1.9176620228167902,1.8796394086479773,False
-1.329770563201687,-1.1506435572868954,False
--1.2727001311715975,-1.2663819605862647,False
--0.7830310281618491,0.09902572652895136,True
--0.27221992543153695,-0.8350834392078323,True
-0.4474115788895179,-1.4420245573918327,False
--0.8314214058591274,-0.5345526268252332,True
--0.17572006313185629,1.1407038455720544,True
--1.201304871366561,0.05693775365444642,False
-0.36965827544816987,-1.814198349120009,False
-0.43017940760575346,-1.3179035052508339,False
--1.739793628058882,1.795542149013333,False
-1.8625281322982374,1.2335893924658445,False
--0.7815449233065173,-1.6093115439744645,False
-0.7369321060486276,-0.2393900250415948,True
--1.5118470606208847,-0.01929235955491926,False
--1.8624459155391264,1.6372816083151283,False
--0.9648800735999323,0.6500891374159279,False
--0.7531556956423562,0.08027208471124325,True
-0.1868411173731186,-1.2605821778978918,False
-1.8783385110582342,1.1005312934444582,False
-1.7579957662567565,1.5793094017105953,False
-0.39159991524434057,1.6874969400924673,False
--1.646029991792322,-1.2160685503234192,False
--1.8190908443578477,-0.6986786769469426,False
--0.44529084124207197,-0.9146038729044164,True
-1.3149500366077174,-0.5729866932256429,False
--0.876261961250477,0.17078433263299386,True
--1.4363031001009494,1.2087879230161587,False
--1.7017974252809167,1.9475477464020692,False
-1.0889790771866297,-1.2051372738633104,False
--1.9779115315055904,1.2618457138193366,False
-0.8274293753904685,0.9160286721639492,False
-1.085081386743783,-1.7038213930636386,False
--0.5661370858229096,-1.5365237618994811,False
-1.452413703502374,0.4931925073102317,False
--0.6764079005894033,-1.7457665988559055,False
--0.7560707131373512,-0.6992667118930118,True
-0.9184247133522563,0.5502298854208525,True
-1.5488509703053062,-0.11114029935220282,False
--1.5216230162467932,0.85297914889198,False
-1.0431401944675898,0.24510879027798493,True
-1.083868719818244,-0.024817614542437028,True
-0.09093131752797623,-0.2898359265658015,True
--1.8983234930236192,-1.5684342920267822,False
--1.874283257253063,0.5456416450551216,False
--0.7425760756946933,0.03428276465881108,True
-1.630265895704372,-1.0028310834045002,False
--0.35846830785748107,1.0222045541721947,True
--1.0848073380335101,-1.692080360684828,False
--0.8409941883449279,-1.3551148509839823,False
-1.7187906093702923,1.2324815182576678,False
-0.5336150260416939,1.4858423607508708,False
-1.2146883075964579,-1.2537197644558566,False
-1.570235993959911,0.1573689676626029,False
-1.2297606206562501,1.584365199693973,False
--0.7279861001125445,-1.559792301889293,False
--1.0882593498322333,-0.29156884549497475,True
-1.2720590636899725,1.4429223330253738,False
--1.9721914778752372,0.042989210310262926,False
--0.33035598740488403,-1.111568758117079,True
--1.5205385306652688,-0.6495393143854882,False
-1.7716388156500766,-0.7071882719169791,False
-0.07516248697346439,0.8120758355807114,True
--0.5454815904828241,1.8871283308838427,False
-1.849789179768445,-0.9928708166985434,False
--0.011005976430458198,-0.7964867607329214,True
--0.8606380224901296,-1.8524522105818688,False
-0.4382573359195874,0.010716092915445952,True
--1.7940849950000426,-0.8854141430535543,False
-1.6330635438666148,-1.0417524373321103,False
--1.4204205116351076,-0.04218895888974794,False
-1.9426018164424028,-1.0317789139539983,False
-0.6885421896235142,1.0464784613148703,False
--1.0494498240304013,0.9128653944474383,False
--0.5288674691229871,0.5292233223743179,True
-0.5341188430435788,0.1430987362990339,True
--1.6388409197823668,1.3412099823569519,False
--0.7168797401130567,-1.253925958400583,False
--1.8368994337809443,0.36357177275296726,False
-0.7102574473691297,-1.9336486842885754,False
-0.048372233197123915,-1.0940168992082482,True
-0.5806911616377994,-1.3025342839800342,False
-0.7637509524098638,-0.45305861479785037,True
-1.746919954946938,-1.449916223416027,False
--0.635734595798966,-1.5461059150376437,False
-1.6987744731142511,1.509357413523924,False
--0.9682334891393776,0.6399361841367162,False
-1.2688888008048633,0.22080324639784932,False
-0.11860231342402594,-1.0325908363981933,True
--1.6275889287764032,1.588863031813307,False
-1.6016722286533218,0.5324058290930718,False
--0.6438808358051973,-0.6031617015493564,True
-0.9038227154809575,1.5884410398103084,False
-1.5483456970604692,1.1195021834304955,False
-0.568126584617151,-1.6634401400198047,False
--1.353485143621545,1.594216754108317,False
-0.4257162386383597,-1.9632117935334814,False
--1.5941138285358716,0.6540070764322232,False
--1.9797536646151253,-1.3567677943300054,False
-0.1949351574663445,0.767580790770773,True
-0.6078450380104021,-1.1029227621577609,False
-0.8487168853901434,-1.0510036500127997,False
--0.6984012073629291,0.9859656204720966,False
-0.5985315961888587,1.3968936419767117,False
-0.6304515692013735,0.2732344133418865,True
--1.62530092868763,-0.5291367877622659,False
--0.9391905292730982,-1.0240414264836657,False
-1.8920422190097823,-0.4276091013329584,False
-1.5681862207084531,0.5245545039890516,False
-1.1792452141665937,0.010548372420768448,False
-0.30761553850543644,-0.0299292247245444,True
--1.219028048807822,0.8898084610460213,False
--0.8769105502365768,-1.9027361342741846,False
-0.5818891836286713,-1.2915572823718042,False
-1.7618343374116572,1.8157143080103495,False
-1.659457560881794,-0.5193651989782224,False
--1.9381735338845303,1.7132742503509015,False
--0.2872634067307427,1.8666192761746783,False
-1.8544799083570114,1.4120378218694403,False
--0.8222044317216572,-0.45960908559229896,True
-1.4045466860674276,-0.7323119793748893,False
--1.32202901325563,0.22720504983340062,False
-1.7446190966431239,0.784119186699892,False
-0.28024468035745986,-1.6112940249169259,False
-0.46002890679667896,1.960215400417053,False
--1.4396639390539039,0.07331860945494695,False
-1.5094922877118218,0.9630744710168178,False
-0.7880629639810719,0.8099363359484371,True
--0.5620353951209793,-0.8256326229420266,True
-1.2374446219140545,1.240453578716723,False
-1.4682892743204148,1.6529622102258852,False
-0.04536959544375119,0.0060651787487984166,True
-1.1931807158671006,0.5998557231110606,False
-0.8078675090308134,1.1831706777444042,False
-1.5600213672702652,-0.6480193725938568,False
--0.49766818944022395,-1.624072240636524,False
-0.3131205639846959,-1.8562309048130317,False
--0.13760792747015937,0.1705785388303065,True
--0.8538349914868624,0.363333042276043,True
--1.8779990002438023,-1.8506072450031423,False
-1.290402242638633,-0.5592374343549484,False
--1.4917579493924609,0.08897304021921748,False
-1.0799742123944434,-1.1367158900126273,False
-0.491561903276001,-1.658610140024928,False
--1.7932731153255692,0.12541852627259198,False
-0.16254048644042607,0.5497196059928262,True
-0.9043653348906462,1.9034083178501384,False
-0.06520139320478124,-0.7081741082350161,True
-1.1807447790748147,-0.9166709949517031,False
--0.24411431717745558,-1.6861744746309362,False
--1.89859702633817,1.8505936587117002,False
-1.3439204820488233,0.7838968243747919,False
--0.3641882223429205,-1.306822719716617,False
--1.3742518293156558,-0.9990284073416187,False
-0.19690665882448188,0.8583836908002493,True
-0.6407895068709251,-0.8802644122162286,True
-1.8194611226527764,0.951587666783074,False
-0.21741621004560274,0.44688298493740897,True
--0.32159975028884036,-1.0090760419953702,True
--0.5761092853949537,1.0313844418574765,True
--1.9424260454809765,-1.535709437972335,False
--1.815989431912989,-1.8370847907241195,False
-1.4218423360440289,0.8146314375200947,False
--0.10330468365069922,-1.608663357395994,False
--0.03353649953267057,-0.10611291687773727,True
--1.3071925203599393,-0.26459340304810786,False
--0.40598106241050624,0.46340039220886586,True
-0.5403746034705752,-1.818783960911822,False
--0.5015495414941151,0.5034396628569455,True
-0.012545034320350812,1.425959364753289,False
-0.63477452647578,-1.3482622916742812,False
--1.7177250103982806,0.5696771128252625,False
--1.8939547578335127,0.3431023250938532,False
-1.7609209656998304,0.3018967115035158,False
--0.44732029517391236,0.5731528737694127,True
--0.16698843803393348,0.18246715726373974,True
-1.7658592351061007,-0.455589448796903,False
-1.8447622552956568,1.621402567824255,False
--1.2168354608428142,-1.7225547964993382,False
--1.5968879944902934,-1.927112697393801,False
--1.6222281569762864,0.7320270936654274,False
--1.715245406159084,-0.7240974788249548,False
-1.3795012438778183,-1.9069122570566965,False
-1.2578739303557431,-0.8725809009064003,False
--1.527340689513375,0.7869486614566026,False
-0.515771387119536,1.5098880541082118,False
-0.9402841752155431,1.2139237215393943,False
--0.8718617097147741,-1.2902418248811087,False
-1.0024590065634333,1.227338957069056,False
-1.9620205680026932,-0.3495292923542941,False
--0.5119276568288673,1.1056518429679874,True
--0.6367858389879286,1.723029302414259,False
-1.4336510073720472,-0.2840238904999266,False
-1.0034842711659895,1.0181714963387294,False
--1.5875045246562696,1.6102116267182667,False
-0.02100948979142858,1.3058298644309665,False
--0.719801595877553,1.582092913984802,False
--0.44319328506334754,-1.9566493940788066,False
-1.6215279056770546,-1.6348532928554658,False
--0.7227454496383405,1.8002478682032197,False
-1.8024285877502244,0.29375155249314444,False
-0.5273488486791971,-0.2062179120867209,True
--0.8271569132077419,-0.6853418185203362,True
-0.6900738243081537,1.00949811775072,False
-1.1663161749033941,1.1584725711782156,False
--1.6351755878052385,-0.02231878118967412,False
--1.7697649599334229,0.19811552929494214,False
--0.23387799450649194,1.550816731033199,False
--0.5963399497916853,-1.5317319342895765,False
--1.4280332717886566,1.046042526869889,False
-0.47287225326504423,-1.595509295508839,False
--1.6635727755400103,0.80387652583648,False
--1.708947974543226,1.2874402371614249,False
-0.8249689086259848,-1.674604877432401,False
--1.6606491436592323,1.9465583140047018,False
--0.5029168169755187,-0.5174314117324363,True
-1.2511982690300103,1.7889943095354348,False
-1.9440042552914836,1.0135127410357665,False
--0.49496165787633695,-1.665997133205325,False
-1.1085876637097472,0.2336169989432202,True
--0.30311196301209486,1.625417540378944,False
--1.5552100707753946,-0.0294995828365634,False
--1.9545854209303237,-0.12535743202349492,False
--1.7747868972726506,-1.5247283349277123,False
--1.5298950128915805,0.5968412084642543,False
-0.9841795170616932,0.3334750603886385,True
-1.8486901938981677,-0.5005176819051838,False
--0.8571516548725571,1.4743965127578411,False
--1.1056166459221894,1.8528901577624453,False
--1.9513821012407346,1.8795153068305561,False
--1.8273603521976955,1.5645724547922844,False
-0.11080443634519943,1.9718591844772013,False
--1.7048137410584046,0.21541713760528314,False
-1.8772101424763958,0.09239137668059527,False
-0.5175945525410501,0.7829947559384687,True
--0.18183574092890709,0.5102323203362538,True
-0.33725724769240095,1.604632041963956,False
--1.8182144786341685,-0.8761472416310787,False
-1.801645936306235,1.5610551355636653,False
--0.17737298885714825,0.48053039120614693,True
--0.8904752680754693,-1.2475153611049548,False
--0.14520638024007138,-0.5865910878957887,True
-0.3346244474034883,-1.6890614521400606,False
-1.897579230664666,1.9448429779184115,False
-0.7926468560789806,0.14438546537648156,True
--0.761889534854689,1.2551800788277947,False
-0.7389246902155171,-1.3495322426204348,False
-1.6437087379753699,1.290148971692676,False
-1.7991996531676961,0.9028780335534399,False
-0.45366078374315943,-0.3270278548375245,True
-1.730913933416053,1.4642555580016334,False
--1.8191253195752424,-1.894532102010992,False
--0.4941465324878016,1.2422133231273316,False
-1.9491045172597778,-1.3983324355858873,False
-0.3765228614085405,-0.47643657347591395,True
-1.8796575912584128,1.3684756925428347,False
-1.3533148188445514,-0.12522736082011887,False
--0.3407219906493393,-0.9063717122771751,True
--1.7744980133962915,1.4588895050202129,False
-1.2516040365203103,1.9988706931445224,False
-1.9865473482956215,0.22172682241050978,False
-1.075949660722042,1.779062919529712,False
-1.3985895627096459,-1.0106075930272094,False
--0.19782345875962593,-1.48336233939402,False
-1.8162041090348895,0.4246985378035202,False
--1.0854287779861491,0.6868027376234269,False
-0.4725129618315833,-0.567349127868638,True
--1.545769631201484,0.6862927823711984,False
-0.08123080361517321,1.0892735669425573,True
-0.08065400444797355,1.4087260012741605,False
-0.20762735509794217,0.24375188614154508,True
-1.50661441063338,-0.38606853515041184,False
--1.463939086197437,-1.884869294746644,False
-1.020549022694476,0.48123820541385864,False
-0.8163190723968943,-1.148143353964357,False
--1.454514097652921,-1.9418213373284723,False
--0.5976497647736121,0.35967074741853233,True
--0.4310238196010707,-0.25010031190508375,True
-1.6166347779749937,-0.6069781319067986,False
-0.05595795663924319,1.1346120509645723,True
--0.41382887071491936,0.48834680091149396,True
-1.4494548349869807,1.7980824946305685,False
--1.4117060762838483,1.7063505006459776,False
--0.03153482768184723,-0.9670224468041666,True
--0.16345697504695478,1.9201303011419082,False
--0.029527624028521604,-0.6849935588499672,True
-0.5336034172669031,-1.0394175248872277,True
--1.6965466875653443,-1.484481112357403,False
--1.4878166441689102,-1.3923892259508226,False
--1.4446913094023595,0.5634989792128584,False
--1.2724796624034207,-0.6173308667045472,False
-1.5871536396240473,-0.10415343894851059,False
-0.6702309540841087,-1.3107205151934807,False
--1.230843924765317,-1.8365255349340845,False
diff --git a/data/.gitignore b/data/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..20897f7dc703f6ad01b9cb5745d9835a013ec00e
--- /dev/null
+++ b/data/.gitignore
@@ -0,0 +1 @@
+my_first_NN.h5
diff --git a/beers.csv b/data/beers.csv
similarity index 100%
rename from beers.csv
rename to data/beers.csv
diff --git a/beers_eval.csv b/data/beers_eval.csv
similarity index 100%
rename from beers_eval.csv
rename to data/beers_eval.csv
diff --git a/circle.csv b/data/circle.csv
similarity index 100%
rename from circle.csv
rename to data/circle.csv
diff --git a/digit_4_14x14.csv b/data/digit_4_14x14.csv
similarity index 100%
rename from digit_4_14x14.csv
rename to data/digit_4_14x14.csv
diff --git a/example.csv b/data/example.csv
similarity index 100%
rename from example.csv
rename to data/example.csv
diff --git a/line_separable_2d.csv b/data/line_separable_2d.csv
similarity index 100%
rename from line_separable_2d.csv
rename to data/line_separable_2d.csv
diff --git a/salmon.csv b/data/salmon.csv
similarity index 100%
rename from salmon.csv
rename to data/salmon.csv
diff --git a/xor.csv b/data/xor.csv
similarity index 100%
rename from xor.csv
rename to data/xor.csv
diff --git a/Notes Review 01 and 02.ipynb b/extra_notebooks/Notes Review 01 and 02.ipynb
similarity index 100%
rename from Notes Review 01 and 02.ipynb
rename to extra_notebooks/Notes Review 01 and 02.ipynb
diff --git a/classifier_examples.ipynb b/extra_notebooks/classifier_examples.ipynb
similarity index 100%
rename from classifier_examples.ipynb
rename to extra_notebooks/classifier_examples.ipynb
diff --git a/create_datasets.py.ipynb b/extra_notebooks/create_datasets.py.ipynb
similarity index 100%
rename from create_datasets.py.ipynb
rename to extra_notebooks/create_datasets.py.ipynb
diff --git a/decision_boundaries.ipynb b/extra_notebooks/decision_boundaries.ipynb
similarity index 100%
rename from decision_boundaries.ipynb
rename to extra_notebooks/decision_boundaries.ipynb
diff --git a/graphics.ipynb b/extra_notebooks/graphics.ipynb
similarity index 100%
rename from graphics.ipynb
rename to extra_notebooks/graphics.ipynb
diff --git a/index.ipynb b/extra_notebooks/index.ipynb
similarity index 100%
rename from index.ipynb
rename to extra_notebooks/index.ipynb
diff --git a/machine_learning_workshop_proposal.ipynb b/extra_notebooks/machine_learning_workshop_proposal.ipynb
similarity index 100%
rename from machine_learning_workshop_proposal.ipynb
rename to extra_notebooks/machine_learning_workshop_proposal.ipynb
diff --git a/rock_driving-kernel_trick.jpg b/images/rock_driving-kernel_trick.jpg
similarity index 100%
rename from rock_driving-kernel_trick.jpg
rename to images/rock_driving-kernel_trick.jpg