Skip to content
Snippets Groups Projects
neural_nets_intro.ipynb 436 KiB
Newer Older
  • Learn to ignore specific revisions
  • chadhat's avatar
    chadhat committed
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": 31,
       "metadata": {},
       "outputs": [
        {
         "name": "stdout",
         "output_type": "stream",
         "text": [
          "Epoch 1/20\n",
          "60000/60000 [==============================] - 2s 25us/step - loss: 0.5722 - acc: 0.8494\n",
          "Epoch 2/20\n",
          "60000/60000 [==============================] - 1s 18us/step - loss: 0.2583 - acc: 0.9256\n",
          "Epoch 3/20\n",
          "60000/60000 [==============================] - 1s 17us/step - loss: 0.2006 - acc: 0.9418\n",
          "Epoch 4/20\n",
          "60000/60000 [==============================] - 1s 17us/step - loss: 0.1650 - acc: 0.9516\n",
          "Epoch 5/20\n",
          "60000/60000 [==============================] - 1s 16us/step - loss: 0.1422 - acc: 0.9584\n",
          "Epoch 6/20\n",
          "60000/60000 [==============================] - 2s 29us/step - loss: 0.1235 - acc: 0.9638\n",
          "Epoch 7/20\n",
          "60000/60000 [==============================] - 1s 22us/step - loss: 0.1093 - acc: 0.9666\n",
          "Epoch 8/20\n",
          "60000/60000 [==============================] - 1s 17us/step - loss: 0.0975 - acc: 0.9706\n",
          "Epoch 9/20\n",
          "60000/60000 [==============================] - 1s 18us/step - loss: 0.0891 - acc: 0.9732\n",
          "Epoch 10/20\n",
          "60000/60000 [==============================] - 1s 16us/step - loss: 0.0810 - acc: 0.9757\n",
          "Epoch 11/20\n",
          "60000/60000 [==============================] - 1s 16us/step - loss: 0.0745 - acc: 0.9776\n",
          "Epoch 12/20\n",
          "60000/60000 [==============================] - 1s 24us/step - loss: 0.0677 - acc: 0.9797\n",
          "Epoch 13/20\n",
          "60000/60000 [==============================] - 1s 17us/step - loss: 0.0623 - acc: 0.9813\n",
          "Epoch 14/20\n",
          "60000/60000 [==============================] - 1s 15us/step - loss: 0.0574 - acc: 0.9829\n",
          "Epoch 15/20\n",
          "60000/60000 [==============================] - 1s 22us/step - loss: 0.0537 - acc: 0.9841\n",
          "Epoch 16/20\n",
          "60000/60000 [==============================] - 1s 21us/step - loss: 0.0506 - acc: 0.9845\n",
          "Epoch 17/20\n",
          "60000/60000 [==============================] - 1s 22us/step - loss: 0.0466 - acc: 0.9860\n",
          "Epoch 18/20\n",
          "60000/60000 [==============================] - 1s 20us/step - loss: 0.0439 - acc: 0.9868\n",
          "Epoch 19/20\n",
          "60000/60000 [==============================] - 1s 17us/step - loss: 0.0410 - acc: 0.9877\n",
          "Epoch 20/20\n",
          "60000/60000 [==============================] - 1s 23us/step - loss: 0.0374 - acc: 0.9884\n"
         ]
        }
       ],
    
       "source": [
    
        "# Building the keras model\n",
        "from keras.models import Sequential\n",
        "from keras.layers import Dense\n",
    
    chadhat's avatar
    chadhat committed
        "\n",
    
    chadhat's avatar
    chadhat committed
        "def mnist_model():\n",
        "    model = Sequential()\n",
    
    chadhat's avatar
    chadhat committed
        "\n",
    
    chadhat's avatar
    chadhat committed
        "    model.add(Dense(64, input_shape=(28*28,), activation=\"relu\"))\n",
    
    chadhat's avatar
    chadhat committed
        "\n",
    
    chadhat's avatar
    chadhat committed
        "    model.add(Dense(64, activation=\"relu\"))\n",
    
    chadhat's avatar
    chadhat committed
        "\n",
    
    chadhat's avatar
    chadhat committed
        "    model.add(Dense(10, activation=\"softmax\"))\n",
    
    chadhat's avatar
    chadhat committed
        "\n",
    
    chadhat's avatar
    chadhat committed
        "    model.compile(loss=\"categorical_crossentropy\",\n",
        "                  optimizer=\"rmsprop\", metrics=[\"accuracy\"])\n",
        "    return model\n",
    
    chadhat's avatar
    chadhat committed
        "model = mnist_model()\n",
        "\n",
        "model_run = model.fit(X_train_prep, y_train_onehot, epochs=20,\n",
        "                      batch_size=512)"
    
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": 32,
       "metadata": {},
       "outputs": [
        {
         "name": "stdout",
         "output_type": "stream",
         "text": [
          "10000/10000 [==============================] - 1s 63us/step\n",
          "The [loss, accuracy] on test dataset are:  [0.15624154731309972, 0.95640000000000003]\n"
         ]
        }
       ],
    
    chadhat's avatar
    chadhat committed
       "source": [
    
    chadhat's avatar
    chadhat committed
        "print(\"The [loss, accuracy] on test dataset are: \" , model.evaluate(X_test_prep, y_test_onehot))"
       ]
      },
      {
       "cell_type": "markdown",
       "metadata": {},
       "source": [
        "### Optional exercise: Run the model again with validation dataset, plot the accuracy as a function of epochs, play with number of epochs and observe what is happening."
       ]
      },
      {
       "cell_type": "code",
       "execution_count": null,
       "metadata": {},
       "outputs": [],
       "source": [
        "# Code here"
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": 34,
       "metadata": {},
       "outputs": [
        {
         "name": "stdout",
         "output_type": "stream",
         "text": [
          "Train on 60000 samples, validate on 10000 samples\n",
          "Epoch 1/20\n",
          "60000/60000 [==============================] - 1s 22us/step - loss: 0.0092 - acc: 0.9976 - val_loss: 0.1240 - val_acc: 0.9700\n",
          "Epoch 2/20\n",
          "60000/60000 [==============================] - 1s 18us/step - loss: 0.0088 - acc: 0.9979 - val_loss: 0.1109 - val_acc: 0.9744\n",
          "Epoch 3/20\n",
          "60000/60000 [==============================] - 1s 19us/step - loss: 0.0079 - acc: 0.9981 - val_loss: 0.1234 - val_acc: 0.9727\n",
          "Epoch 4/20\n",
          "60000/60000 [==============================] - 1s 19us/step - loss: 0.0074 - acc: 0.9983 - val_loss: 0.1047 - val_acc: 0.9764\n",
          "Epoch 5/20\n",
          "60000/60000 [==============================] - 1s 20us/step - loss: 0.0074 - acc: 0.9981 - val_loss: 0.1147 - val_acc: 0.9748\n",
          "Epoch 6/20\n",
          "60000/60000 [==============================] - 1s 20us/step - loss: 0.0067 - acc: 0.9983 - val_loss: 0.1150 - val_acc: 0.9765\n",
          "Epoch 7/20\n",
          "60000/60000 [==============================] - 1s 18us/step - loss: 0.0060 - acc: 0.9986 - val_loss: 0.1161 - val_acc: 0.9753\n",
          "Epoch 8/20\n",
          "60000/60000 [==============================] - 1s 16us/step - loss: 0.0062 - acc: 0.9985 - val_loss: 0.1457 - val_acc: 0.9682\n",
          "Epoch 9/20\n",
          "60000/60000 [==============================] - 1s 16us/step - loss: 0.0056 - acc: 0.9986 - val_loss: 0.1162 - val_acc: 0.9758\n",
          "Epoch 10/20\n",
          "60000/60000 [==============================] - 1s 25us/step - loss: 0.0050 - acc: 0.9989 - val_loss: 0.1097 - val_acc: 0.9768\n",
          "Epoch 11/20\n",
          "60000/60000 [==============================] - 2s 25us/step - loss: 0.0054 - acc: 0.9986 - val_loss: 0.1148 - val_acc: 0.9757\n",
          "Epoch 12/20\n",
          "60000/60000 [==============================] - 1s 18us/step - loss: 0.0044 - acc: 0.9990 - val_loss: 0.1148 - val_acc: 0.9772\n",
          "Epoch 13/20\n",
          "60000/60000 [==============================] - 1s 20us/step - loss: 0.0046 - acc: 0.9990 - val_loss: 0.1199 - val_acc: 0.9746\n",
          "Epoch 14/20\n",
          "60000/60000 [==============================] - 1s 23us/step - loss: 0.0042 - acc: 0.9990 - val_loss: 0.1156 - val_acc: 0.9770\n",
          "Epoch 15/20\n",
          "60000/60000 [==============================] - 1s 22us/step - loss: 0.0035 - acc: 0.9992 - val_loss: 0.1206 - val_acc: 0.9757\n",
          "Epoch 16/20\n",
          "60000/60000 [==============================] - 1s 22us/step - loss: 0.0040 - acc: 0.9990 - val_loss: 0.1252 - val_acc: 0.9757\n",
          "Epoch 17/20\n",
          "60000/60000 [==============================] - 1s 24us/step - loss: 0.0038 - acc: 0.9993 - val_loss: 0.1305 - val_acc: 0.9741\n",
          "Epoch 18/20\n",
          "60000/60000 [==============================] - 1s 23us/step - loss: 0.0032 - acc: 0.9994 - val_loss: 0.1391 - val_acc: 0.9723\n",
          "Epoch 19/20\n",
          "60000/60000 [==============================] - 1s 20us/step - loss: 0.0033 - acc: 0.9993 - val_loss: 0.1244 - val_acc: 0.9759\n",
          "Epoch 20/20\n",
          "60000/60000 [==============================] - 1s 18us/step - loss: 0.0031 - acc: 0.9993 - val_loss: 0.1263 - val_acc: 0.9770\n",
          "The history has the following data:  dict_keys(['val_loss', 'val_acc', 'loss', 'acc'])\n"
         ]
        },
        {
         "data": {
          "text/plain": [
           "[<matplotlib.lines.Line2D at 0x7fe6681f74e0>]"
          ]
         },
         "execution_count": 34,
         "metadata": {},
         "output_type": "execute_result"
        },
        {
         "data": {
          "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYQAAAD8CAYAAAB3u9PLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmcFPWd//HXhxluVK6RUy5FIx7BOMFoYnQ9IhAVNcagRo0/Fdn1whwrahKNcY262Rj1Z8JqgsELJIlEV81PDSZxs6vioICgjFwqIOAggUEQh2E+vz++Nenqnm6mZ6aPGeb9fDzq0XXXt6qrv5+qTx1t7o6IiEiHYhdARERaBwUEEREBFBBERCSigCAiIoACgoiIRBQQREQEUEAQEZGIAoKIiAAKCCIiEiktdgGaom/fvj5s2LBiF0NEpE2ZP3/+Rncva2y8NhUQhg0bRkVFRbGLISLSppjZe9mMp5SRiIgAWQQEM5tuZh+a2eIMw83M7jGz5Wa2yMw+Fxs21swqo2FTY/17m9kLZrYs+uyVm9UREZHmyuYM4TfA2N0MHweMjJpJwC8BzKwEuC8aPgo418xGRdNMBea6+0hgbtQtIiJF1GhAcPeXgE27GWUC8JAHrwA9zWwAMAZY7u4r3b0GmBWNWz/NjKh9BnBGc1dARERyIxfXEAYBq2Pda6J+mfoD9HP3dVH7eqBfDsohIiItUPSLyh7+oSfjv/SY2SQzqzCziqqqqgKWTESkfcnFbadrgf1i3YOjfh0z9AfYYGYD3H1dlF76MNPM3f1+4H6A8vJy/b2biOTdp5/Cli2JZts26NwZunVr2HTsCGbFLnFu5CIgPAVcaWazgKOALVFFXwWMNLPhhEAwETgvNs1FwO3R55M5KIeIyD/U1cGGDbB6NaxbB5s3hyZe0W/Zkr7fjh3ZL6ekJASG7t3TB4xu3aBrV+iQg3zMAw+EAJQvjQYEM5sJHA/0NbM1wE2Eo3/cfRrwLDAeWA5sBy6OhtWa2ZXAc0AJMN3dl0SzvR2YbWaXAO8B5+RwnUQkj3buzK5Sjff75BPo1Qv69k1uysqSu3v2zK7irKuDqqpQ2a9ZEz5T29euhdra/G+PXbtg69bQ5Nu0aUUOCO5+biPDHbgiw7BnCQEjtf9HwIlZllGkaNzDUeZHH8HQodCjR2GWuXo1vPFGolm1KvTv0CE0Zk1vr6sL86ira3r7zp1QXR0q9+3b87fuJSXQp0/DoNGjB6xfn1zZ19Tktxw9e8I++4SmR4+QRtq+PbnZti0EhD1Fm3p1hUg+1NaGSmbFitAsX574XLky/OjrDRoEBx2U3Bx4YAgWJSVNX/auXVBZGSr9BQsSAWDT7m703oPt2gUffhiaXOjTB/bbL3xvvXsnKvh4E6/465tu3bK/LrBzZ8NAEQ8Y27eHMyTPwRXQfJ4dgAKCtBM7doSj7NQKf8UKePfd8KPOxtq1oXnxxeT+nTvDAQckB4n69t69E2V4883kI/9Fi0Jl0ZZ06AB77525Mk3Xv2vXkEKqqoKNGxNNand1dfbl6NUrVPaDB4fP1PZBg0LFnm8dOybWs61TQJAW27kzXLRLl8etb9+0KVSaXbtCly6Jz3j77j5LS0OFumNHqEDjn+n6pQ5rSZpjn33Ckeb772fOSX/6KSxZEppUffuG6Zcvzz69sM8+cMQRoRk9Gg45BDp1Sp/WyTb9U58+ak7KqaQkEQR69MjfXTWffhrSc6lBY+tW6NcvueLv3j0/ZWjPFBDagB07wg9l770Lf3vbjh0hh/7BB5kr+/XrQ4XTmJqawlx4a47+/WH//cNRfvxz//3DEb5ZCHyrVoUUT2UlvPNOon3Dhszzrq/YMhk4MFH51zfDhu05tzI2RefOYXsMHFjskrRPCggFVFMDf/97OALatCl8xtszfdanFDp2hH33DRfZ9t13901ZWebT5ZqakKNdvz5UZOk+69u3bCnc9smnDh1gyJCGlf0BB8CIEdldLO7YMaSCDjwQTjstedjmzSFAxINEZSUsW5b4/sxg5Mjkin/06PB9ibQGCgjNtGtXqNzT5UHTdX/0UcuPjnfuTOSws9G9eyJAdO2aCAK5vmBp1vB0PrW9b98QiBpL8WRK+ezcmX16KVNKqjSPe3vPnjBmTGji6urCmdTGjSGQFOIuJZHmUkCIcQ9HxPWVbrxZvz65gt+0KTd3DWSjY8eQP47f7ZKNbdtCimPVquYvu7Q0VPb9+2e+eDdwYChfYwpxga+1qT8zGTKk2CURaVy7CQg7d4ZKPV1lH2/yeY91hw4hH927d7jIWP8Zb0/3WX8Rb/v2EIyqqhK35mVqqqoy36fdoUNIKfXvH5r6Cj/1s3//cCdHLp6wFJHWb48PCA8/DP/6ryEfnusj+viTl6lPXKb279Mn3KHRksq1W7dwv/vQoY2P6x5u4asPENu3h4q+X79QnubcMy8ie7Y9PiCUloYzg2x17x7uX05tBgxIVO5lZeHIPZ856ZYyS9wbPXJksUsjIm1BK67ScmNQ9A8M9Rc+01X28aYYt3aKiLQGe3xAGDMmPFDUv3/+H/sWEWnL9viA0KVLuBNGRER2T/ePiIgIoIAgIiIRBQQREQEUEEREJKKAICIigAKCiIhEFBBERARQQBARkYgCgoiIAAoIIiISySogmNlYM6s0s+VmNjXN8F5mNsfMFpnZPDM7NDbsGjNbbGZLzGxKrP/NZrbWzBZEzfjcrJKIiDRHowHBzEqA+4BxwCjgXDMblTLaDcACdz8cuBC4O5r2UOAyYAzwWeBUMzsgNt1d7j46ap5t8dqIiEizZXOGMAZY7u4r3b0GmAVMSBlnFPAigLsvBYaZWT/gYOBVd9/u7rXAX4GzclZ6ERHJmWwCwiBgdax7TdQvbiFRRW9mY4ChwGBgMXCsmfUxs27AeCD+7tGrojTTdDPrlW7hZjbJzCrMrKKqqiqrlRIRkabL1UXl24GeZrYAuAp4A9jl7m8DdwDPA/8PWADsiqb5JTACGA2sA/4j3Yzd/X53L3f38rKyshwVV0REUmXzfwhrST6qHxz1+wd3rwYuBjAzA1YBK6NhvwZ+HQ27jXCGgbtvqJ/ezB4Anm7uSoiISMtlc4bwGjDSzIabWSdgIvBUfAQz6xkNA7gUeCkKEpjZvtHnEEJa6bGoe0BsFmcS0ksiIlIkjZ4huHutmV0JPAeUANPdfYmZTY6GTyNcPJ5hZg4sAS6JzeL3ZtYH2Alc4e6bo/53mtlowIF3gctztE4iItIM5u7FLkPWysvLvaKiotjFEBFpU8xsvruXNzaenlQWERFAAUFERCIKCCIiAiggiIhIRAFBREQABQQREYkoIIiICKCAICIiEQUEEREBFBBERCSigCAiIoACgoiIRBQQREQEUEAQEZGIAoKIiAAKCCIiElFAEBERQAFBREQiCggiIgIoIIiISEQBQUREAAUEERGJZBUQzGysmVWa2XIzm5pmeC8zm2Nmi8xsnpkdGht2jZktNrMlZjYl1r+3mb1gZsuiz165WSUREWmORgOCmZUA9wHjgFHAuWY2KmW0G4AF7n44cCFwdzTtocBlwBjgs8CpZnZANM1UYK67jwTmRt0iIlIk2ZwhjAGWu/tKd68BZgETUsYZBbwI4O5LgWFm1g84GHjV3be7ey3wV+CsaJoJwIyofQZwRovWREREWiSbgDAIWB3rXhP1i1tIVNGb2RhgKDAYWAwca2Z9zKwbMB7YL5qmn7uvi9rXA/2atQYiIpITpTmaz+3A3Wa2AHgTeAPY5e5vm9kdwPPANmABsCt1Ynd3M/N0MzazScAkgCFDhuSouCIikiqbM4S1JI7qIRz5r42P4O7V7n6xu48mXEMoA1ZGw37t7ke6+5eBvwPvRJNtMLMBANHnh+kW7u73u3u5u5eXlZU1YdVERKQpsgkIrwEjzWy4mXUCJgJPxUcws57RMIBLgZfcvToatm/0OYSQVnosGu8p4KKo/SLgyZasiIiItEyjKSN3rzWzK4HngBJgursvMbPJ0fBphIvHM6K0zxLgktgsfm9mfYCdwBXuvjnqfzsw28wuAd4DzsnVSomISNOZe9rUfatUXl7uFRUVxS6GiEibYmbz3b28sfH0pLKIiAAKCCIiElFAEBERQAFBREQiCggiIgIoIIiISEQBQUREAAUEERGJKCCIiAiggCAiIhEFBBERARQQREQkooAgIiKAAoKIiEQUEEREBFBAEBGRiAKCiIgACggiIhJRQBAREUABQUREIgoIIiICKCCIiEhEAUFERIAsA4KZjTWzSjNbbmZT0wzvZWZzzGyRmc0zs0Njw641syVmttjMZppZl6j/zWa21swWRM343K2WiIg0VaMBwcxKgPuAccAo4FwzG5Uy2g3AAnc/HLgQuDuadhBwNVDu7ocCJcDE2HR3ufvoqHm2xWsjIiLNls0ZwhhgubuvdPcaYBYwIWWcUcCLAO6+FBhmZv2iYaVAVzMrBboBH+Sk5CIiklPZBIRBwOpY95qoX9xC4CwAMxsDDAUGu/ta4KfA+8A6YIu7Px+b7qoozTTdzHo1cx1ERCQHcnVR+Xagp5ktAK4C3gB2RZX8BGA4MBDobmbfjKb5JTACGE0IFv+RbsZmNsnMKsysoqqqKkfFFRGRVNkEhLXAfrHuwVG/f3D3ane/2N1HE64hlAErgZOAVe5e5e47gSeAY6JpNrj7LnevAx4gpKYacPf73b3c3cvLysqauHoiIpKtbALCa8BIMxtuZp0IF4Wfio9gZj2jYQCXAi+5ezUhVfQFM+tmZgacCLwdTTMgNoszgcUtWxUREWmJ0sZGcPdaM7sSeI5wl9B0d19iZpOj4dOAg4EZZubAEuCSaNirZvY74HWglpBKuj+a9Z1mNhpw4F3g8lyumIiINI25e7HLkLXy8nKvqKgodjFERNoUM5vv7uWNjacnlUVEBFBAEBGRiAKCiIgACggiIhJRQBAREUABQUREIgoIIiICKCCIiEhEAUFERAAFBBERiSggiIgIoIAgIiIRBQQREQEUEEREJKKAICIigAKCiIhEFBBERARQQBARkYgCgoiIAAoIIiISUUAQERFAAUFERCIKCCIiAmQZEMxsrJlVmtlyM5uaZngvM5tjZovMbJ6ZHRobdq2ZLTGzxWY208y6RP17m9kLZrYs+uyVu9USEZGmajQgmFkJcB8wDhgFnGtmo1JGuwFY4O6HAxcCd0fTDgKuBsrd/VCgBJgYTTMVmOvuI4G5UbeIiBRJNmcIY4Dl7r7S3WuAWcCElHFGAS8CuPtSYJiZ9YuGlQJdzawU6AZ8EPWfAMyI2mcAZzR7LUREpMWyCQiDgNWx7jVRv7iFwFkAZjYGGAoMdve1wE+B94F1wBZ3fz6app+7r4va1wP9SMPMJplZhZlVVFVVZVFcERFpjlxdVL4d6GlmC4CrgDeAXdF1gQnAcGAg0N3Mvpk6sbs74Olm7O73u3u5u5eXlZXlqLgiIpKqNItx1gL7xboHR/3+wd2rgYsBzMyAVcBK4BRglbtXRcOeAI4BHgE2mNkAd19nZgOAD1u4LiIi0gLZnCG8Bow0s+Fm1olwUfip+Ahm1jMaBnAp8FIUJN4HvmBm3aJAcSLwdjTeU8BFUftFwJMtWxUREWmJRs8Q3L3WzK4EniPcJTTd3ZeY2eRo+DTgYGCGmTmwBLgkGvaqmf0OeB2oJaSS7o9mfTsw28wuAd4DzsnpmomISJNYSN+3DeXl5V5RUVHsYoiItClmNt/dyxsbT08qi4gIoIAgIiIRBQQREQEUEEREJKKAICIigAKCiIhEFBBERARQQBARkYgCgoiIAAoIIiISUUAQae1eeAFuvRXef7/YJZE9nAKCSGu1bBl89avwla/AD34AY8ZAZWXhy7FtG/zP/8AnnxR+2QLV1XD33fDoo3lflAKCSGuzbRvceCMceig8+2yi/4YNcMIJsHx54cpSWQkHHwxf+hKMGAH33gufflq45bdnK1bAlCkweHD4vOkmqKvL6yIVEEQy2bQpHJl99rPQuzd87Wvw0kuQrzcEu8Ps2fCZz8Btt0FNTehvBp07h/YPPghB4d1381OGuIUL4ctfhtXRP+iuXw9XXw0jR8IDD8DOnfkvQ3vjDi++CKefHrbz3XfD1q1h2IoV8Mwz+V6+t5nmyCOPdCmC2lr3jz4qdikKo67O/S9/cT//fPfOnd3DTzS5OeII99/8xn3Hjtwtd/Fi93/6p4bLOuoo99dec3/xRfcuXRL9hw93f//93C0/1csvu/fsmX7965sRI9xnzAj7h7TM9u3uDzzgfuih6bf1wQe7T5vm/vHHzZo9UOFZ1LFFr+Sb0rTbgPDqq+4PPlicSvn5592HDg27yimnuP/1r6HS3NNs2OB+553uBx64+0ow3uy7r/tNN7mvW9f85W7e7D5lintJScN5P/ig+65diXGfe869U6fEOCNHun/wQUvXvKE//9m9e/fEcvbZJwSkn/88lCt1Oxx0kPvMmcllleysWeN+ww3uffqk38fGjw/fewt/cwoIe4o//cm9Q4dEJfHYY4WpkKur3S+/PP1Oeswx7k8/3fYDw65dIeB9/evuHTumX9cjjwxHZq+95j5pUvJRen3TsaP7BRe4V1Q0bdm/+U3DCrakxP2aa9z//vf00z39dHJZDz44BLNceeaZ5HXs29f99dcTwz/+2P2OO9x79264HQ47zH3OnLa/XxTCyy+7T5zoXlracDt27+5+xRXuS5fmbHEKCHuCzZvd99uv4Q4zdqz7qlX5W+7cuYmzgt01hx8eAtTOnfkrSz6sXet+660h7ZJuvfbay33yZPf58xtOu3Gj+09+4j54cPppv/Ql99/+dvfbZP5896OPbjjtcce5v/lm4+WfMye5IjnssFCulpo9OznYDBzo/tZb6cfdssX9Rz9y33vv9EH02WcVGFLV1ITfy1FHpd93hg1z/+lPMx8MtIACwp7goosyV8bduoWdJ5eV8dat4cgkdVlnnRWOaC69NP2R9P77u99/f25z6rlWWxuOridMaJieqW+OPtp9+vTs8rQ1Ne6PP56+Ygf3IUNCCmrTpsQ0GzeGsy6z5HEHDXKfNatpFejs2YkzRwjXNeLLaqoHH0ye3/Dh7itWND7dRx+FlEc8xRTfnnPnNr9Me4rqavd/+7fwPafbV447zv2JJ/J6LUYBoa37wx+Sd5rp092vuqphZXLEEU1LVWTy17+Gi4TxeffuHXLD8Ypq9Wr3a68NASl1xx44MASprVtbXp5c+s//zHxE36uX+9VXZ3dknsmrr7qfd1760/9u3cLZxs9+1jDN0rGj+9Spzd9ejzySvD98/vPhrLKp7r03uVyf+UzIbTfFhx+6f+c76VNqxx8f9q+NG5vftLZ9Klvbt4ffaOo26dTJ/Vvfcn/jjYIUQwGhLfvww+Tc8nnnJYa98kpIEcR3rg4d3L/97eb9aLZtCznr1B329NN3f7G0qsr9hz8MFWrqtL17h4utuUhjtNRbb6UPBMcdFyrU7dtzt6y1a92///2Qd890ZlffjB3rXlnZ8mX++tfJ8z3mmHBEmq3bbkuefvToll2T+OAD9yuvTL74nYvGzP3MM/N7Z1U+pP62+vd3v+WW3F73yYICQltVVxdSNPGj7tRUQE1NyGOnHo0NHRouCmbrb39zP+CA5Hn07On+0EPZpy+qq8NZwYABDX/E3buHQNXUo81cuummRHn69HH/7ndzerEure3bQ0V9+OENt8nw4e5PPpnb/Povf9kw2G3btvtp6urcr78+ebqjj85d/vq999wvuyxzeq65TY8e7vfc0zZudX3uueSy33qr+6efFqUoOQ0IwFigElgOTE0zvBcwB1gEzAMOjfofBCyINdXAlGjYzcDa2LDxjZWjXQSEhx9O3on++MfM4y5b5n7iiQ1/NBMnuq9fn3m67dvD6X1q+mn8+OZX3p98ElIzqWmn+tPjyy4rzmn/qFGJcsyeXdhl19WF2zXPPDNcZ7nlltyekcTdfXfyNj/ppPCdpLNrV0g/xsc/4YT8fD/Ll4fUSP/+4cyxuU3qPjVmjPvChbkvb65s3Jh8kHTaaUW9yJ6zgACUACuAEUAnYCEwKmWcfwduito/A8zNMJ/1wFBPBITvZlPI+maPDwirV4d7vut3ossvb3yaurrwcFDqfcw9e7r/6lcNd8KXXw73jcfH3XvvcI0iFzvszp3hTorUtBa4f+MbLZ9/UyxZklh2167NfqinzbjzzuTtPW5cwwv9tbWhgo6Pd9ppmYNHa/HSS+HaRrzcJSXu113X+NlQoaWe5e+7b8FTRKlyGRCOBp6LdV8PXJ8yzjPAsbHuFUC/lHG+AvxPrFsBIa6uzv0rX0nsRCNGNO2IrarK/cILG1bCxx0XUiSffBJ+PPE7SSAsMx952bo69//6r+S7cEpKCvvDuPnmxLLPPrtwyy2mH/84+fudMCGkGN1DuuLrX28YpOuHt3Y7doTvNPX6xIgR4XmS1uLBB5PL9/TTxS5RTgPC2cCvYt0XAP83ZZzbgLui9jFALXBkyjjTgStj3TcD70VppulAr8bKskcHhF/8IrEDmYUjouZ44YWGaZtOnRr269Ej3Cqa79PYujr3L34xsdx77snv8uLi6aLHHy/ccovt+99P/q7PPjtc6xk/Prn/JZe0jVx8qrffdj/22IYHPxdcEG7IKKYVK8Jvq75MkycXtzyRQgeEvYEHo2sBDwOvAaNjwzsBG+NnDUC/KI3UAfg3YHqG5U8CKoCKIUOGFGDTFcGyZcm3cX73uy2b37Zt4XbGTBf0TjjB/d13c1P2bMQveh51VGGW2d7SRXF1de7f+17yd56aUrzmmrb9qoldu8K7f1Lft9SnT0ihFiNfv3NnuMurviwHHthq9ruCpoxSxjfgXWDvWL8JwPO7mWYYsLixsuyRZwi1tclH0Icckrt87oIF4d70+nl37x7ORApdEWzcmHyP/rJl+V9me0wXxdXVpb+dGMIZxJ7yFPG6dSHtlbqOJ55YmP0sLp6uKy0NrztpJXIZEEqBlcDw2EXlQ1LG6Ql0itovAx5KGT4LuDil34BY+7XArMbKUpSAUFMTTvuOPz7cRpZrd9yRvBOle11CS9TWhpzmdddl9+Rpvpx2WmI9f/Sj/C/vkEMSy2tP6aK4ujr3f/7n5IryjjuKXar8eOaZ8HR4fF27dAnPWRTiGsm8eckHPbfemv9lNkHOAkKYF+OBd6KLxTdG/SYDkz1xFvEO4dbUJ+LXA4DuwEfAPinzfBh4M7qG8FQ8QGRqihIQUu/cmDSpaQ/+7M6iRckXyApRURbLrFmJ9TzwwPweobbndFGqXbvCkeuYMeGW5j3Z1q3huZfUGycOOyzcXZcvH3+c/JbcL36x1V2byWlAaC1NwQPC+vXhRWepp6NDh4a3kLbEp5+Gp0Lr51le3nbu9miObduSt+W8eflbVjxd9LWv5W850jpVVDR8XYRZeOVKPm6vnTw5sZy99nJfuTL3y2ihbAOC/jFtd77//cS/FXXtmuj/3ntw0klwxRXw8cfNm/ctt8CCBaG9c2d46CHo2LFl5W3NunWDs85KdOfz/2F/+9tE+znn5G850jodeSTMmwc//WnY7yBU13fdFf6XevHi3C3r6adh2rRE9z33wPDhuZt/oWUTNVpLU9AzhNdfT36S949/DC96S31qcvjw8A9bTfHKK8mntT/7WX7WobV5/vnEOvfrl5/XZsffXdTe00USjtZPOSX5N9u5c7j9uaVpy/Xr3cvKks9GW+nFepQyaoG6OvcvfznxRY8fnxi2bl148VtqGunqq7OrfLZtS843Hndc2779rylqa8MrDOrXPR8X6X/0o+QfqEhdnft99zV899e4cbt/xUtj8zz11MS8BgxoHS9zzEABoSVmz0580aWlDV+GVlcXLtCl3gN9wAHhhXG7c/XVifF79GiV+ca8uvbaxPpfcEHu5x+/u2jWrNzPX9quJUvcP/vZ5N9sWVnzniSeNi15Pvk4uMkhBYTm2r49+d/CpkzJPO7atQ2f/jQLL45L9xKzuXOTx33ggbytRqtVUZEcEHP5Hpp4uqhLl7b7Dn3Jnx07wu8z9Qz/iiuyf/Hg0qUhHVk/7TXX5LfMOaCA0Fy33pr4ovv0afxfqOrqwn3+qX8leNBBybe6bd6cfJ/0V7/aavONeVVXl/xyvZkzczdvpYskW88/3/CV7aNGhYc5d6emJtwRWD/NIYfk7w22OZRtQNBdRnEffAA/+Umi+8c/hl69dj+NGXzrW+HOhVNOSfSvrIQvfhGmToUdO+Daa+H998Ow3r3hgQfCtO2NGZx/fqI7l3cbxe8u+vrXczdf2fOcfDIsWgQTJiT6vfVWuAvprrugri79dLfcAhUVob1jx7D/xu9AbOuyiRqtpcn7GUL8baGHHdb0u2Dq6kIaKPXZhWHDkrvbe257+fLEtigtDW9qbSmli6Q56urCSx5T/xL25JNDSjjub39LvjvwzjuLU+ZmQGcITfTqq+FZgHo//zmUljZtHmZw6aXw5ptwwgmJ/u++m2j/xjdC057tvz8cfXRor62F2bNbPs/42cH48dCjR8vnKXs+M7jsMnj99fD8Qr0XXoDDD4c//CF0V1fDBRckzhyOPx6+/e2CFzffFBAgxPspUxLdZ5yRXKE31dChYYf6xS+ge/dE//794b77mj/fPUmu00ZKF0lLHHQQ/O//hhRvfSr3o4/gzDPh8svhX/4FVq0K/ffZB2bMgJKS4pU3TyycTbQN5eXlXlGfv8ulRx+Fb34ztHfqFHKJ+++fm3mvXBl2svfeg3vvDTlKgaoqGDAAdu0K3StXNv8Jz6VL4eCDQ3uXLmHeOkOQ5vrLX8LZwJo16Yc/9hice25Bi9RSZjbf3csbG09nCNu2wXXXJbqnTMldMAAYMSKkRF59VcEgrqws+SL8Y481f15KF0kuHX88LFwIZ5/dcNh557W5YNAUCgh33glr14b2fv3gxhuLW572pP6sDOCRR0LqrjmULpJc6907HMg9+GAi7Tt8+B6f8m3fAeG990JAqHfbbbD33sUrT3tz+umJH9vSpfDGG02fx9Kl4SI+hHTRqafmrnzSvtXfUl5ZGdLKb7xM6l5rAAAJTklEQVQBPXsWu1R51b4DwnXXhWcEAD73ufDlS+F07x4u2tVrzsXl+NnBuHFKF0nuDRoUUkX77FPskuRd+w0If/sbPP54ovvnP4cO7XdzFE38bqOZMxMXmbOlV12L5Ez7rAHr6pJvMz3nHDj22OKVpz076STYd9/Qvm5duMMjW5WVSheJ5FD7DAgzZsD8+aG9S5fk6whSWKWlMHFiovuRR7KfVukikZxqfwGhuhquvz7R/b3vhQfJpHjiaaPf/x4++SS76eJPOOvuIpEWa38B4bbbYMOG0D5oUPIzCFIcn/88HHBAaN+6NfwtYWPi6aLOnZUuEsmB9hUQVqwIbzKsd/vtya+WkOJozhtQUx9G22uv3JdLpJ1pXwHhe9+DmprQftRR4VYyaR3iAeHZZ2HTpt2Pr4fRRHKu/QSEP/8Z5sxJdN99t24zbU1Gjky82mPnzuQKP1VlZXiXPShdJJJDWdWIZjbWzCrNbLmZTU0zvJeZzTGzRWY2z8wOjfofZGYLYk21mU2JhvU2sxfMbFn02cg/0bRAbW3ybabf/GY4Q5DWJdu0UerdRUoXieREowHBzEqA+4BxwCjgXDMblTLaDcACdz8cuBC4G8DdK919tLuPBo4EtgP1h+lTgbnuPhKYG3Xnx69+lTii7NYtXDuQ1ucb30i8Uvi//zu8WiQdPYwmkhfZnCGMAZa7+0p3rwFmARNSxhkFvAjg7kuBYWbWL2WcE4EV7l7/K58AzIjaZwBnNKP8jdu8GX7wg0T39deHu4uk9enXL/y1Yb2ZMxuO8847SheJ5Ek2AWEQsDrWvSbqF7cQOAvAzMYAQ4HBKeNMBOK/8H7uvi5qXw+kBpDcuOUW2LgxtA8dCt/5Tl4WIzkSTxulewOq0kUieZOrq6q3Az3NbAFwFfAG8I+X0phZJ+B0IO2Vwug/P9O++9jMJplZhZlVVFVVNb1kJ58c/g0JwhPJe9IfYu+JzjgjpPUAlixJnA3U08NoInmTTUBYC+wX6x4c9fsHd69294ujawUXAmXAytgo44DX3X1DrN8GMxsAEH1+mG7h7n6/u5e7e3lZWVkWxU0xblyoVB59VBVIW9CjB0yIZSTjF5dT00WnnVbYsons4bIJCK8BI81seHSkPxF4Kj6CmfWMhgFcCrzk7tWxUc4lOV1ENI+LovaLgCebWvisdeoUnjmo/69Uad3if5wzc2bij82VLhLJq0YDgrvXAlcCzwFvA7PdfYmZTTazydFoBwOLzayScDZwTf30ZtYdOBl4ImXWtwMnm9ky4KSoWySk+fr2De1r1sBLL4V2PYwmklel2Yzk7s8Cz6b0mxZrfxk4MMO024A+afp/RLjzSCRZx47hFtT6vyt85BEYODD8zy0oXSSSJ3pUV1qn+N1Gv/td8muxx45VukgkDxQQpHX6whdgxIjQvmVL8n9WKF0kkhcKCNI6pb4B9dNPw6fSRSJ5o4AgrVc8INQbOxb23rvwZRFpBxQQpPU66CA48sjkfkoXieSNAoK0bvGzBKWLRPJKAUFat/PPh17Rm9HPO0/pIpE8yuo5BJGi2XdfmDcvPIMwblyxSyOyR1NAkNbvgANCIyJ5pZSRiIgACggiIhJRQBAREUABQUREIgoIIiICKCCIiEhEAUFERACw8P/2bYOZVQHvFbscGfQFNha7ELuh8rWMytcyKl/LtaSMQ9290T+lb1MBoTUzswp3Ly92OTJR+VpG5WsZla/lClFGpYxERARQQBARkYgCQu7cX+wCNELlaxmVr2VUvpbLexl1DUFERACdIYiISEQBoQnMbD8z+7OZvWVmS8zsmjTjHG9mW8xsQdT8sMBlfNfM3oyWXZFmuJnZPWa23MwWmdnnCli2g2LbZYGZVZvZlJRxCrr9zGy6mX1oZotj/Xqb2Qtmtiz67JVh2rFmVhlty6kFLN+/m9nS6PubY2Y9M0y7230hj+W72czWxr7D8RmmLdb2ezxWtnfNbEGGaQux/dLWKUXbB91dTZYNMAD4XNS+F/AOMCplnOOBp4tYxneBvrsZPh74I2DAF4BXi1TOEmA94f7oom0/4MvA54DFsX53AlOj9qnAHRnKvwIYAXQCFqbuC3ks31eA0qj9jnTly2ZfyGP5bga+m8X3X5TtlzL8P4AfFnH7pa1TirUP6gyhCdx9nbu/HrVvBd4GBhW3VE02AXjIg1eAnmY2oAjlOBFY4e5FfdDQ3V8CNqX0ngDMiNpnAGekmXQMsNzdV7p7DTArmi7v5XP35929Nup8BRic6+VmK8P2y0bRtl89MzPgHGBmrpebrd3UKUXZBxUQmsnMhgFHAK+mGXxMdDr/RzM7pKAFAwf+ZGbzzWxSmuGDgNWx7jUUJ6hNJPMPsZjbD6Cfu6+L2tcD/dKM01q24/8hnPGl09i+kE9XRd/h9Azpjtaw/Y4FNrj7sgzDC7r9UuqUouyDCgjNYGY9gN8DU9y9OmXw68AQdz8cuBf4Q4GL9yV3Hw2MA64wsy8XePmNMrNOwOnAb9MMLvb2S+Lh3LxV3opnZjcCtcCjGUYp1r7wS0IaYzSwjpCWaY3OZfdnBwXbfrurUwq5DyogNJGZdSR8cY+6+xOpw9292t0/jtqfBTqaWd9Clc/d10afHwJzCKeVcWuB/WLdg6N+hTQOeN3dN6QOKPb2i2yoT6NFnx+mGaeo29HMvgWcCpwfVRgNZLEv5IW7b3D3Xe5eBzyQYbnF3n6lwFnA45nGKdT2y1CnFGUfVEBogijn+GvgbXf/WYZx+kfjYWZjCNv4owKVr7uZ7VXfTrj4uDhltKeAC6O7jb4AbImdmhZKxiOzYm6/mKeAi6L2i4An04zzGjDSzIZHZzwTo+nyzszGAv8KnO7u2zOMk82+kK/yxa9JnZlhuUXbfpGTgKXuvibdwEJtv93UKcXZB/N5BX1Pa4AvEU7dFgELomY8MBmYHI1zJbCEcMX/FeCYApZvRLTchVEZboz6x8tnwH2EuxPeBMoLvA27Eyr4fWL9irb9CIFpHbCTkIO9BOgDzAWWAX8CekfjDgSejU07nnBXyIr6bV2g8i0n5I7r98FpqeXLtC8UqHwPR/vWIkIFNaA1bb+o/2/q97nYuMXYfpnqlKLsg3pSWUREAKWMREQkooAgIiKAAoKIiEQUEEREBFBAEBGRiAKCiIgACggiIhJRQBAREQD+PxeIb9x7ogtXAAAAAElFTkSuQmCC\n",
          "text/plain": [
           "<matplotlib.figure.Figure at 0x7fe66b03b0f0>"
          ]
         },
         "metadata": {
          "needs_background": "light"
         },
         "output_type": "display_data"
        }
       ],
    
    chadhat's avatar
    chadhat committed
       "source": [
        "# Solution:\n",
        "num_epochs = 20\n",
        "model_run = model.fit(X_train_prep, y_train_onehot, epochs=num_epochs,\n",
        "                      batch_size=512, validation_data=(X_test_prep, y_test_onehot))\n",
    
        "# Evaluating the model on test dataset\n",
    
    chadhat's avatar
    chadhat committed
        "#print(\"The [loss, accuracy] on test dataset are: \" , model.evaluate(X_test_prep, y_test_onehot))\n",
        "history_model = model_run.history\n",
        "print(\"The history has the following data: \", history_model.keys())\n",
        "\n",
        "# Plotting the training and validation accuracy during the training\n",
        "plt.plot(np.arange(1, num_epochs+1), history_model[\"acc\"], \"blue\")\n",
        "\n",
        "plt.plot(np.arange(1, num_epochs+1), history_model[\"val_acc\"], \"red\")"
       ]
      },
    
    chadhat's avatar
    chadhat committed
      {
       "cell_type": "markdown",
       "metadata": {},
       "source": [
        "### Adding regularization"
       ]
      },
    
    chadhat's avatar
    chadhat committed
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": null,
       "metadata": {},
       "outputs": [],
    
    chadhat's avatar
    chadhat committed
       "source": [
    
    chadhat's avatar
    chadhat committed
        "# Adding l2 regularization\n",
    
    chadhat's avatar
    chadhat committed
        "# Building the keras model\n",
        "from keras.models import Sequential\n",
        "from keras.layers import Dense\n",
        "from keras.regularizers import l2\n",
        "\n",
        "def mnist_model():\n",
        "    \n",
        "    model = Sequential()\n",
        "\n",
        "    model.add(Dense(64, input_shape=(28*28,), activation=\"relu\", \n",
        "                   kernel_regularizer=l2(0.01)))\n",
        "\n",
        "    model.add(Dense(64, activation=\"relu\", \n",
        "                   kernel_regularizer=l2(0.01)))\n",
        "\n",
        "    model.add(Dense(10, activation=\"softmax\"))\n",
        "\n",
        "    model.compile(loss=\"categorical_crossentropy\",\n",
        "                  optimizer=\"rmsprop\", metrics=[\"accuracy\"])\n",
        "    return model\n",
        "\n",
    
    chadhat's avatar
    chadhat committed
        "model = mnist_model()\n",
        "\n",
    
    chadhat's avatar
    chadhat committed
        "num_epochs = 50\n",
        "model_run = model.fit(X_train_prep, y_train_onehot, epochs=num_epochs,\n",
        "                      batch_size=512)"
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": null,
       "metadata": {},
       "outputs": [],
    
    chadhat's avatar
    chadhat committed
       "source": [
    
        "print(\"The [loss, accuracy] on test dataset are: \" , model.evaluate(X_test_prep, y_test_onehot))"
    
    chadhat's avatar
    chadhat committed
      {
       "cell_type": "markdown",
       "metadata": {},
       "source": [
        "### Another way to add regularization and to make the network more robust we can add something called \"Dropout\". When we add dropout to a layer a specified percentage of units in that layer are switched off. \n",
    
    chadhat's avatar
    chadhat committed
        "(MAKING MODEL SIMPLER)\n",
    
    chadhat's avatar
    chadhat committed
        "\n",
        "### Exercise: Add dropout instead of l2 regularization in the network above"
       ]
      },
      {
       "cell_type": "code",
       "execution_count": null,
       "metadata": {},
       "outputs": [],
       "source": [
        "# Adding dropout is easy in keras\n",
        "# We import a layer called Dropout and add as follows\n",
        "# model.add(Dropout(0.5)) to randomly drop 50% of the hidden units\n",
        "\n",
        "\n"
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": null,
       "metadata": {},
       "outputs": [],
    
    chadhat's avatar
    chadhat committed
       "source": [
        "# Solution\n",
        "# Adding Dropout\n",
        "# Building the keras model\n",
        "from keras.models import Sequential\n",
        "from keras.layers import Dense, Dropout\n",
        "\n",
        "def mnist_model():\n",
        "    \n",
        "    model = Sequential()\n",
        "\n",
        "    model.add(Dense(64, input_shape=(28*28,), activation=\"relu\"))\n",
        "              \n",
        "    model.add(Dropout(0.4))\n",
        "\n",
        "    model.add(Dense(64, activation=\"relu\"))\n",
        "\n",
        "    model.add(Dense(10, activation=\"softmax\"))\n",
        "\n",
        "    model.compile(loss=\"categorical_crossentropy\",\n",
        "                  optimizer=\"rmsprop\", metrics=[\"accuracy\"])\n",
        "              \n",
        "    return model\n",
        "\n",
        "model = mnist_model()\n",
        "\n",
        "num_epochs = 50\n",
        "model_run = model.fit(X_train_prep, y_train_onehot, epochs=num_epochs,\n",
        "                      batch_size=512)"
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": null,
       "metadata": {},
       "outputs": [],
    
    chadhat's avatar
    chadhat committed
       "source": [
        "print(\"The [loss, accuracy] on test dataset are: \" , model.evaluate(X_test_prep, y_test_onehot))"
       ]
      },
    
       "cell_type": "markdown",
    
    chadhat's avatar
    chadhat committed
       "metadata": {},
       "source": [
    
    chadhat's avatar
    chadhat committed
        "## Network Architecture\n",
        "\n",
        "The neural networks which we have seen till now are the simplest kind of neural networks.\n",
        "There exist more sophisticated network architectures especially designed for specific applications.\n",
        "Some of them are as follows:\n",
        "\n",
        "###  Convolution Neural Networks (CNNs)\n",
        "\n",
    
    chadhat's avatar
    chadhat committed
        "These networks are used mostly for computer vision (EXAMPLES) like tasks. \n",
    
    chadhat's avatar
    chadhat committed
        "One of the old CNN networks is shown below.\n",
        "\n",
        "<center>\n",
        "<figure>\n",
        "<img src=\"./images/neuralnets/CNN_lecun.png\" width=\"800\"/>\n",
        "<figcaption>source: LeCun et al., Gradient-based learning applied to document recognition (1998).</figcaption>\n",
        "</figure>\n",
        "</center>\n",
        "\n",
        "CNNs consist of new type of layers like convolution layer and pooling layers.\n",
        "\n",
        "###  Recurrent Neural Networks (RNNs)\n",
        "\n",
        "These are used for time-series data, speech recognition, translation etc.\n",
    
    chadhat's avatar
    chadhat committed
        "\n",
    
    chadhat's avatar
    chadhat committed
        "IMAGE HERE\n",
        "\n",
        "### Generative adversarial networks (GANs)\n",
        "\n",
        "GANs consist of 2 parts, a generative network and a discriminative network. The generative network produces data which is then fed to the discriminative network which judges if the new data belongs to a specified dataset. Then via feedback loops the generative network becomes better and better at creating images similar to the dataset the discriminative network is judging against. At the same time the discriminative network get better and better at identifyig **fake** instances which are not from the reference dataset. \n",
        "\n",
        "IMAGE HERE"
       ]
      },
      {
       "cell_type": "markdown",
       "metadata": {},
       "source": [
    
    chadhat's avatar
    chadhat committed
        "## CNN example"
    
       ]
      },
      {
       "cell_type": "markdown",
       "metadata": {},
       "source": [
    
    chadhat's avatar
    chadhat committed
        "For this example we will work with a dataset called fashion-MNIST which is quite similar to the MNIST data above.\n",
        "> Fashion-MNIST is a dataset of Zalando's article images—consisting of a training set of 60,000 examples and a test set of 10,000 examples. Each example is a 28x28 grayscale image, associated with a label from 10 classes. We intend Fashion-MNIST to serve as a direct drop-in replacement for the original MNIST dataset for benchmarking machine learning algorithms. It shares the same image size and structure of training and testing splits.\n",
        "source: https://github.com/zalandoresearch/fashion-mnist\n",
    
    chadhat's avatar
    chadhat committed
        "The 10 classes of this dataset are:\n",
    
    chadhat's avatar
    chadhat committed
        "| Label| Item |\n",
        "| --- | --- |\n",
        "| 0 |\tT-shirt/top |\n",
        "| 1\t| Trouser |\n",
        "|2|\tPullover|\n",
        "|3|\tDress|\n",
        "|4|\tCoat|\n",
        "|5|\tSandal|\n",
        "|6|\tShirt|\n",
        "|7|\tSneaker|\n",
        "|8|\tBag|\n",
        "|9|\tAnkle boot|"
    
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": 37,
    
       "metadata": {},
       "outputs": [],
       "source": [
    
    chadhat's avatar
    chadhat committed
        "# Loading the dataset in keras\n",
        "# Later you can explore and play with other datasets with come with Keras\n",
        "from keras.datasets import fashion_mnist\n",
    
    chadhat's avatar
    chadhat committed
        "# Loading the train and test data\n",
    
    chadhat's avatar
    chadhat committed
        "(X_train, y_train), (X_test, y_test) = fashion_mnist.load_data()\n",
    
    chadhat's avatar
    chadhat committed
        "items =['T-shirt/top', 'Trouser', \n",
        "        'Pullover', 'Dress', \n",
        "        'Coat', 'Sandal', \n",
        "        'Shirt', 'Sneaker',\n",
        "        'Bag', 'Ankle boot']"
    
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": 38,
       "metadata": {},
       "outputs": [
        {
         "name": "stdout",
         "output_type": "stream",
         "text": [
          "This item is a:  T-shirt/top\n"
         ]
        },
        {
         "data": {
          "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAEUVJREFUeJzt3VuMVVWex/Hfn6K4qKggBRY0NhrwboaOpZi0GZ2M3dJmEsUYI0bDJGbwoacznXR0jPOgDz6YyXR3fJh0Qo9EkNbuiTaoER0vGWM6MWpBUFG8MFjaIpdCFFHuxX8eamNKrfNfxdnnnH2K9f0kFarO/+w6f07xY9c5a6+1zN0FID9jqm4AQDUIP5Apwg9kivADmSL8QKYIP5Apwg9kivADmSL8QKbGtvLBpk6d6rNnz27lQ2Zh9+7dNWudnZ3hsRMnTgzrZhbWDx06FNb3799fszYwMBAee+qpp4Z1fF9fX5927twZ/9AKpcJvZgskPSCpQ9J/ufv90f1nz56t3t7euh8vuhQ59Y/0ePbMM8/UrHV3d4fHXnDBBWF9zJj4l8Pt27eH9XfffbdmLfpPS5IWLlwY1kezI0eO1KylnvNIT0/PiO9b96OYWYek/5T0M0nnS1pkZufX+/0AtFaZ1/yXStrk7pvd/aCkP0q6tjFtAWi2MuGfKemvQ77+pLjtW8xsiZn1mllvf39/iYcD0EhNf7ff3Ze6e4+793R1dTX74QCMUJnwb5E0a8jXPyhuAzAKlAn/65LmmtmZZjZO0k2SnmxMWwCare6hPnc/bGb/LOl/NDjUt8zd325YZ8OIhvNSKxI1cyjw4MGDYX316tVhfcWKFWH96aefDuunnHJKzVpHR0d47K5du8J6WSeffHLdx15//fVhfc6cOWH91ltvrVm7+uqrw2Pnz58f1ssqM5zXKKXG+d19jaQ1DeoFQAtV/98PgEoQfiBThB/IFOEHMkX4gUwRfiBT1sode3p6erzMlN5meuWVV8L6TTfdVLOWeg5TcxrGjRsX1sePHx/WTzrppJq1VG979+4N66njx46NR4ujv1vq2otoLYCy9dRzeuGFF4b1ZcuWhfWzzz47rDdzSm9vb++ILmrhzA9kivADmSL8QKYIP5Apwg9kivADmWrp0t1lNXP13ttvvz2sf/nllzVrU6ZMCY+dMWNGWI+GfUYiWj77q6++Co9NLZ+dGnY6cOBAWI96Sy0bnpqOHE1lluKlv1PDqxs2bAjrixYtCutr164N6+0wpbf6DgBUgvADmSL8QKYIP5Apwg9kivADmSL8QKZG1Th/Gffcc09Y37ZtW1ifOfN7O5F94+uvvw6PLbuseGp57WjMOjW19LzzzgvrqesAUn/3jRs31qxt2rQpPHbq1KlhPbX9+OHDh+uqSendjTdv3hzWV65cGdZvueWWmrVWLUPPmR/IFOEHMkX4gUwRfiBThB/IFOEHMkX4gUyVWrrbzPok7ZE0IOmwu/dE969y6e7U3PHp06eH9WhueWpudmosPLV89jnnnBPWr7zyypq1008/PTz2ueeeC+tz584N69GceSleByGqSdKaNfEG0KnjUz/TMlLLhqds3bq1QZ1827Es3d2Ii3z+zt13NuD7AGghfu0HMlU2/C7pBTNba2ZLGtEQgNYo+2v/5e6+xcymSXrezN5195eH3qH4T2GJJJ1xxhklHw5Ao5Q687v7luLPHZJWSbp0mPssdfced+/p6uoq83AAGqju8JvZiWY26ejnkn4qKV7yFEDbKPNr/3RJq4rphWMlPeLuzzakKwBNV3f43X2zpL9pYC+lrF69OqxPmDChVD1aWz9am15Kzzu/+OKLw/oNN9wQ1h977LGatYsuuig8NrXufqr31L4Ap512Ws1aal76nXfeGdYffvjhsN7X11ezlro+IfUzTf172b17d1h/9tna58kFCxaExzYKQ31Apgg/kCnCD2SK8AOZIvxApgg/kKlSU3qPVTOn9F522WVh/b333gvrkydPDuuppZ4jqaml8+bNC+uTJk0K69EW4akhq/7+/rCemq68b9++sB5toz1+/Pjw2JTUUOFDDz1U92Onft6p7cNTQ31nnnlmzdq6devCYyPHMqWXMz+QKcIPZIrwA5ki/ECmCD+QKcIPZIrwA5kaVVt0v/rqqzVrqXH8aCx8JKKpryeeeGJ4bGosPbWN9scffxzWV61aVbP2/vvvh8emxrNTY+nR9uBSfJ3AVVddFR6bqr/00kthPVoyPTVOn5rqnDp+2rRpYf2jjz6qWYu2NZfS26qPFGd+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyNarG+aOx0bPOOis89sMPPwzrZdY1OHjwYFifNWtWWP/888/D+sqVK4+5p6O6u7vDemq8emBgoFQ9uo7g8ccfD49NbWN9ySWXhPWoty+++CI8NnX9QmqdhNQW3tE6B88880x4LOP8AEoh/ECmCD+QKcIPZIrwA5ki/ECmCD+QqeS6/Wa2TNI/SNrh7hcWt02R9CdJsyX1SbrR3ePBajV33f6U1157Lazfd999Yf2pp56qWVu4cGF47BtvvBHW9+7dG9ZTc+qjMeOy3ztVL2Ps2Pgyky1btoT1OXPmhPVoW/U9e/aEx6bWUEjttXDHHXeE9Ztvvjms16vR6/Y/JOm7G4bfJelFd58r6cXiawCjSDL87v6ypF3fuflaScuLz5dLuq7BfQFosnpf809396PXXm6TFO9HBaDtlH7DzwffNKj5xoGZLTGzXjPrTa1lB6B16g3/djPrlqTizx217ujuS929x917urq66nw4AI1Wb/iflLS4+HyxpCca0w6AVkmG38welfSKpHPM7BMzu03S/ZJ+YmYfSLqq+BrAKJIc52+kKsf5m+mdd94J6xdccEFYP/fcc8N6am54JDVOH42FS1JnZ2dYT81rj9btL/tvL7W2frQeQGp9h9Q6COPHjw/rVWn0OD+A4xDhBzJF+IFMEX4gU4QfyBThBzI1qpbujqSGjUYwdbnuemp6aEpq+esyvZcdTiu7hXc0lJgaZkyZOHFi3ceecMIJYb1dh/IaiTM/kCnCD2SK8AOZIvxApgg/kCnCD2SK8AOZOm7G+csuQZ2amhpNbd23b1947OTJk8N6lcouzV3mOoLUY6e+d5nrBHbu3BnWp02bFtZT1z+kliVvB5z5gUwRfiBThB/IFOEHMkX4gUwRfiBThB/IVPsPRg7RzDHlMpo9Vt7M3ssq01vq7x0t+z0SU6ZMqVnr6Ogo9b2b+TNv1c+bMz+QKcIPZIrwA5ki/ECmCD+QKcIPZIrwA5lKht/MlpnZDjPbMOS2e81si5mtLz6uaW6b3zxu3R/N5O6lPo5nzfx7V/m8Hg8/05Gc+R+StGCY23/r7vOKjzWNbQtAsyXD7+4vS9rVgl4AtFCZ1/y/MLM3i5cF7btOFYBh1Rv+30k6S9I8SVsl/brWHc1siZn1mllvf39/nQ8HoNHqCr+7b3f3AXc/Iun3ki4N7rvU3Xvcvaerq6vePgE0WF3hN7PuIV8ulLSh1n0BtKfklF4ze1TSlZKmmtknku6RdKWZzZPkkvok3d7EHgE0QTL87r5omJsfbEIvlapyznzZ9eub2XvZMesyx5ed7x8dPzAwUFdPjdIOazRwhR+QKcIPZIrwA5ki/ECmCD+QKcIPZGpULd3dTKNlGmZOyg6HRVt4l9ne+3jBmR/IFOEHMkX4gUwRfiBThB/IFOEHMkX4gUwxzl9ohymWuUldW5GadtvZ2RnWo7F8ruvgzA9ki/ADmSL8QKYIP5Apwg9kivADmSL8QKYY5y9UOe7LmHNzdHR01KxxXQdnfiBbhB/IFOEHMkX4gUwRfiBThB/IFOEHMpUc5zezWZJWSJouySUtdfcHzGyKpD9Jmi2pT9KN7v5581ptX6kx41Q9tYZ8qt7OY9Zlekv9vavcuvx4MJIz/2FJv3L38yVdJunnZna+pLskvejucyW9WHwNYJRIht/dt7r7uuLzPZI2Spop6VpJy4u7LZd0XbOaBNB4x/Sa38xmS/qRpFclTXf3rUVpmwZfFgAYJUYcfjM7SdLjkn7p7l8Orfngi69hX4CZ2RIz6zWz3v7+/lLNAmicEYXfzDo1GPw/uPufi5u3m1l3Ue+WtGO4Y919qbv3uHtPV1dXI3oG0ADJ8NvgW6YPStro7r8ZUnpS0uLi88WSnmh8ewCaZSRTen8s6VZJb5nZ+uK2uyXdL+m/zew2SR9JurE5LY5+o3nKbjN7T33vMWO4DKWZkuF3979IqjVg+veNbQdAq/BfK5Apwg9kivADmSL8QKYIP5Apwg9kiqW7WyDnqaWj+RqH4x1nfiBThB/IFOEHMkX4gUwRfiBThB/IFOEHMsU4f6HMWHzZcfx2HgtvZm85X//QDjjzA5ki/ECmCD+QKcIPZIrwA5ki/ECmCD+QKcb5C2XGs8eOjZ/G0XwdQKr3VG9RPXVsR0dHWC8j9TPLAWd+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcylRzsNLNZklZImi7JJS119wfM7F5J/ySpv7jr3e6+plmNHs86OzvDemqsfdy4cY1s51uOHDlS6vhoLH9gYKDUY5cZq9+/f3/dxx4vRvLsHZb0K3dfZ2aTJK01s+eL2m/d/T+a1x6AZkmG3923StpafL7HzDZKmtnsxgA01zG95jez2ZJ+JOnV4qZfmNmbZrbMzCbXOGaJmfWaWW9/f/9wdwFQgRGH38xOkvS4pF+6+5eSfifpLEnzNPibwa+HO87dl7p7j7v3dHV1NaBlAI0wovCbWacGg/8Hd/+zJLn7dncfcPcjkn4v6dLmtQmg0ZLht8G3mh+UtNHdfzPk9u4hd1soaUPj2wPQLCN5t//Hkm6V9JaZrS9uu1vSIjObp8Hhvz5JtzelwxY5ePBgWI+G41LvZezatSusp6au5vpeyaRJk8L6mDHxuWv37t01a5999lldPR2VGqYcDVOGR/Ju/18kDTfQzJg+MIpxhR+QKcIPZIrwA5ki/ECmCD+QKcIPZKr9ByNbZMKECXUfe8UVV4T1Rx55JKzPmDEjrKeuQYim/KbGwlP1KserDx06FNYPHDgQ1j/99NOatfnz59fV01GjYRw/hTM/kCnCD2SK8AOZIvxApgg/kCnCD2SK8AOZslZu/2xm/ZI+GnLTVEk7W9bAsWnX3tq1L4ne6tXI3n7o7iNaL6+l4f/eg5v1untPZQ0E2rW3du1Lord6VdUbv/YDmSL8QKaqDv/Sih8/0q69tWtfEr3Vq5LeKn3ND6A6VZ/5AVSkkvCb2QIze8/MNpnZXVX0UIuZ9ZnZW2a23sx6K+5lmZntMLMNQ26bYmbPm9kHxZ/DbpNWUW/3mtmW4rlbb2bXVNTbLDP7XzN7x8zeNrN/KW6v9LkL+qrkeWv5r/1m1iHpfUk/kfSJpNclLXL3d1raSA1m1iepx90rHxM2s7+V9JWkFe5+YXHbv0va5e73F/9xTnb3f22T3u6V9FXVOzcXG8p0D91ZWtJ1kv5RFT53QV83qoLnrYoz/6WSNrn7Znc/KOmPkq6toI+25+4vS/rujh/XSlpefL5cg/94Wq5Gb23B3be6+7ri8z2Sju4sXelzF/RViSrCP1PSX4d8/Ynaa8tvl/SCma01syVVNzOM6cW26ZK0TdL0KpsZRnLn5lb6zs7SbfPc1bPjdaPxht/3Xe7u8yT9TNLPi19v25IPvmZrp+GaEe3c3CrD7Cz9jSqfu3p3vG60KsK/RdKsIV//oLitLbj7luLPHZJWqf12H95+dJPU4s8dFffzjXbauXm4naXVBs9dO+14XUX4X5c018zONLNxkm6S9GQFfXyPmZ1YvBEjMztR0k/VfrsPPylpcfH5YklPVNjLt7TLzs21dpZWxc9d2+147e4t/5B0jQbf8f8/Sf9WRQ81+jpL0hvFx9tV9ybpUQ3+GnhIg++N3CbpNEkvSvpA0guSprRRbw9LekvSmxoMWndFvV2uwV/p35S0vvi4purnLuirkueNK/yATPGGH5Apwg9kivADmSL8QKYIP5Apwg9kivADmSL8QKb+H3+fT5X+ci0YAAAAAElFTkSuQmCC\n",
          "text/plain": [
           "<matplotlib.figure.Figure at 0x7fe64931db38>"
          ]
         },
         "metadata": {
          "needs_background": "light"
         },
         "output_type": "display_data"
        }
       ],
    
    chadhat's avatar
    chadhat committed
       "source": [
        "# We can see that the training set consists of 60,000 images of size 28x28 pixels\n",
        "import matplotlib.pyplot as plt\n",
        "import numpy as np\n",
        "i=np.random.randint(0,X_train.shape[0])\n",
        "plt.imshow(X_train[i], cmap=\"gray_r\") ; \n",
        "print(\"This item is a: \" , items[y_train[i]])"
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": 39,
       "metadata": {},
       "outputs": [
        {
         "name": "stdout",
         "output_type": "stream",
         "text": [
          "(60000, 10)\n"
         ]
        }
       ],
    
    chadhat's avatar
    chadhat committed
       "source": [
        "# Also we need to reshape the input data such that each sample is a 4D matrix of dimension\n",
        "# (num_samples, width, height, channels). Even though these images are grayscale we need to add\n",
        "# channel dimension as this is expected by the Conv function\n",
        "X_train_prep = X_train.reshape(X_train.shape[0],28,28,1)/255.\n",
        "X_test_prep = X_test.reshape(X_test.shape[0],28,28,1)/255.\n",
        "\n",
        "from keras.utils.np_utils import to_categorical\n",
        "\n",
        "y_train_onehot = to_categorical(y_train, num_classes=10)\n",
        "y_test_onehot = to_categorical(y_test, num_classes=10)\n",
        "\n",
        "print(y_train_onehot.shape)"
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": null,
       "metadata": {},
       "outputs": [],
    
       "source": [
    
    chadhat's avatar
    chadhat committed
        "# Creating a CNN similar to the one shown in the figure from LeCun paper\n",
        "# In the original implementation Average pooling was used. However, we will use maxpooling as this \n",
        "# is what us used in the more recent architectures and is found to be a better choice\n",
        "# Convolution -> Pooling -> Convolution -> Pooling -> Flatten -> Dense -> Dense -> Output layer\n",
    
        "from keras.models import Sequential\n",
    
    chadhat's avatar
    chadhat committed
        "from keras.layers import Dense, Conv2D, MaxPool2D, Flatten, Dropout, BatchNormalization\n",
    
    chadhat's avatar
    chadhat committed
        "def simple_CNN():\n",
        "    \n",
        "    model = Sequential()\n",
        "    \n",
        "    model.add(Conv2D(6, (3,3), input_shape=(28,28,1), activation='relu'))\n",
        "    \n",
        "    model.add(MaxPool2D((2,2)))\n",
        "    \n",
        "    model.add(Conv2D(16, (3,3), activation='relu'))\n",
        "    \n",
        "    model.add(MaxPool2D((2,2)))\n",
        "    \n",
        "    model.add(Flatten())\n",
        "    \n",
        "    model.add(Dense(120, activation='relu'))\n",
        "    \n",
        "    model.add(Dense(84, activation='relu'))\n",
        "    \n",
        "    model.add(Dense(10, activation='softmax'))\n",
        "    \n",
        "    model.compile(loss=\"categorical_crossentropy\", optimizer=\"rmsprop\", metrics=[\"accuracy\"])\n",
        "    \n",
        "    return model\n",
    
    chadhat's avatar
    chadhat committed
        "model = simple_CNN()\n",
        "model.summary()"
    
       ]
      },
      {
       "cell_type": "code",
    
    chadhat's avatar
    chadhat committed
       "execution_count": null,
       "metadata": {},
       "outputs": [],
    
       "source": [
    
    chadhat's avatar
    chadhat committed
        "num_epochs = 10\n",
        "model_run = model.fit(X_train_prep, y_train_onehot, epochs=num_epochs, \n",
        "                      batch_size=64, validation_data=(X_test_prep, y_test_onehot))"
    
       ]
      },
      {
       "cell_type": "markdown",
       "metadata": {},
       "source": [
    
    chadhat's avatar
    chadhat committed
        "### Exercise: Use the above model or improve it (change number of filters, add more layers etc. on the MNIST example and see if you can get a better accuracy than what we achieved with a vanilla neural network)"
    
       ]
      },
      {
       "cell_type": "markdown",
       "metadata": {},
       "source": [
    
    chadhat's avatar
    chadhat committed
        "### Exercise: Load and play with the CIFAR10 dataset also included with Keras and build+train a simple CNN using it"
    
    chadhat's avatar
    chadhat committed
      }
     ],
     "metadata": {
      "kernelspec": {
       "display_name": "Python 3",
       "language": "python",
       "name": "python3"
      },
      "language_info": {
       "codemirror_mode": {
        "name": "ipython",
        "version": 3
       },
       "file_extension": ".py",
       "mimetype": "text/x-python",
       "name": "python",
       "nbconvert_exporter": "python",
       "pygments_lexer": "ipython3",
    
    chadhat's avatar
    chadhat committed
       "version": "3.6.0"
    
      },
      "latex_envs": {
       "LaTeX_envs_menu_present": true,
       "autoclose": false,
       "autocomplete": true,
       "bibliofile": "biblio.bib",
       "cite_by": "apalike",
       "current_citInitial": 1,
       "eqLabelWithNumbers": true,
       "eqNumInitial": 1,
       "hotkeys": {
        "equation": "Ctrl-E",
        "itemize": "Ctrl-I"
       },
       "labels_anchors": false,
       "latex_user_defs": false,
       "report_style_numbering": false,
       "user_envs_cfg": false
    
    chadhat's avatar
    chadhat committed
      }
     },
     "nbformat": 4,
     "nbformat_minor": 2
    }