Skip to content

Instantly share code, notes, and snippets.

@veb-101
Last active May 16, 2020 17:47
Show Gist options
  • Save veb-101/7b291af64dea2cca8ce6aca0d4c53f56 to your computer and use it in GitHub Desktop.
Save veb-101/7b291af64dea2cca8ce6aca0d4c53f56 to your computer and use it in GitHub Desktop.
Scrambled_image.ipynb
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "Scrambled_image.ipynb",
"provenance": [],
"collapsed_sections": [],
"authorship_tag": "ABX9TyOX2s7wkeuuQ3Xrz3UowHdp",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"accelerator": "GPU"
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/gist/veb-101/7b291af64dea2cca8ce6aca0d4c53f56/scrambled_image.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "ynMiRwZJSTyp",
"colab_type": "code",
"colab": {}
},
"source": [
"import tensorflow as tf\n",
"from tensorflow.keras.datasets import mnist\n",
"from tensorflow.keras.models import Sequential\n",
"from tensorflow.keras.layers import Dense, Dropout, BatchNormalization\n",
"import pandas as pd\n",
"import numpy as np\n",
"from sklearn.model_selection import train_test_split"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "GIa6NhfxpxLW",
"colab_type": "code",
"outputId": "6846f65c-4ddb-4b7e-fa90-82187b9c70dd",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 312
}
},
"source": [
"# the data, split between train and test sets\n",
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
"\n",
"x_train = x_train.reshape(60000, 784)\n",
"x_test = x_test.reshape(10000, 784)\n",
"\n",
"x_train = x_train.astype('float32')\n",
"x_test = x_test.astype('float32')\n",
"\n",
"x_train /= 255\n",
"x_test /= 255\n",
"\n",
"print(x_train.shape, 'train samples')\n",
"print(x_test.shape, 'test samples')\n",
"\n",
"cols = list(range(1, 785))\n",
"\n",
"\n",
"x_train = pd.DataFrame(x_train, columns=cols)\n",
"x_test = pd.DataFrame(x_test, columns=cols)\n",
"\n",
"print(x_train.shape, 'train samples')\n",
"print(x_test.shape, 'test samples')\n",
"x_train.head()"
],
"execution_count": 2,
"outputs": [
{
"output_type": "stream",
"text": [
"(60000, 784) train samples\n",
"(10000, 784) test samples\n",
"(60000, 784) train samples\n",
"(10000, 784) test samples\n"
],
"name": "stdout"
},
{
"output_type": "execute_result",
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>1</th>\n",
" <th>2</th>\n",
" <th>3</th>\n",
" <th>4</th>\n",
" <th>5</th>\n",
" <th>6</th>\n",
" <th>7</th>\n",
" <th>8</th>\n",
" <th>9</th>\n",
" <th>10</th>\n",
" <th>11</th>\n",
" <th>12</th>\n",
" <th>13</th>\n",
" <th>14</th>\n",
" <th>15</th>\n",
" <th>16</th>\n",
" <th>17</th>\n",
" <th>18</th>\n",
" <th>19</th>\n",
" <th>20</th>\n",
" <th>21</th>\n",
" <th>22</th>\n",
" <th>23</th>\n",
" <th>24</th>\n",
" <th>25</th>\n",
" <th>26</th>\n",
" <th>27</th>\n",
" <th>28</th>\n",
" <th>29</th>\n",
" <th>30</th>\n",
" <th>31</th>\n",
" <th>32</th>\n",
" <th>33</th>\n",
" <th>34</th>\n",
" <th>35</th>\n",
" <th>36</th>\n",
" <th>37</th>\n",
" <th>38</th>\n",
" <th>39</th>\n",
" <th>40</th>\n",
" <th>...</th>\n",
" <th>745</th>\n",
" <th>746</th>\n",
" <th>747</th>\n",
" <th>748</th>\n",
" <th>749</th>\n",
" <th>750</th>\n",
" <th>751</th>\n",
" <th>752</th>\n",
" <th>753</th>\n",
" <th>754</th>\n",
" <th>755</th>\n",
" <th>756</th>\n",
" <th>757</th>\n",
" <th>758</th>\n",
" <th>759</th>\n",
" <th>760</th>\n",
" <th>761</th>\n",
" <th>762</th>\n",
" <th>763</th>\n",
" <th>764</th>\n",
" <th>765</th>\n",
" <th>766</th>\n",
" <th>767</th>\n",
" <th>768</th>\n",
" <th>769</th>\n",
" <th>770</th>\n",
" <th>771</th>\n",
" <th>772</th>\n",
" <th>773</th>\n",
" <th>774</th>\n",
" <th>775</th>\n",
" <th>776</th>\n",
" <th>777</th>\n",
" <th>778</th>\n",
" <th>779</th>\n",
" <th>780</th>\n",
" <th>781</th>\n",
" <th>782</th>\n",
" <th>783</th>\n",
" <th>784</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.576471</td>\n",
" <td>0.988235</td>\n",
" <td>0.164706</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>5 rows × 784 columns</p>\n",
"</div>"
],
"text/plain": [
" 1 2 3 4 5 6 7 ... 778 779 780 781 782 783 784\n",
"0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"3 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"\n",
"[5 rows x 784 columns]"
]
},
"metadata": {
"tags": []
},
"execution_count": 2
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "gLNfd58Xd_Ss",
"colab_type": "code",
"outputId": "e7b2248e-7842-4180-d1df-dfd68d49c726",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 245
}
},
"source": [
"# shuffling\n",
"x_train = x_train.sample(frac=1)\n",
"x_test = x_test.sample(frac=1)\n",
"\n",
"x_train.head()"
],
"execution_count": 3,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>1</th>\n",
" <th>2</th>\n",
" <th>3</th>\n",
" <th>4</th>\n",
" <th>5</th>\n",
" <th>6</th>\n",
" <th>7</th>\n",
" <th>8</th>\n",
" <th>9</th>\n",
" <th>10</th>\n",
" <th>11</th>\n",
" <th>12</th>\n",
" <th>13</th>\n",
" <th>14</th>\n",
" <th>15</th>\n",
" <th>16</th>\n",
" <th>17</th>\n",
" <th>18</th>\n",
" <th>19</th>\n",
" <th>20</th>\n",
" <th>21</th>\n",
" <th>22</th>\n",
" <th>23</th>\n",
" <th>24</th>\n",
" <th>25</th>\n",
" <th>26</th>\n",
" <th>27</th>\n",
" <th>28</th>\n",
" <th>29</th>\n",
" <th>30</th>\n",
" <th>31</th>\n",
" <th>32</th>\n",
" <th>33</th>\n",
" <th>34</th>\n",
" <th>35</th>\n",
" <th>36</th>\n",
" <th>37</th>\n",
" <th>38</th>\n",
" <th>39</th>\n",
" <th>40</th>\n",
" <th>...</th>\n",
" <th>745</th>\n",
" <th>746</th>\n",
" <th>747</th>\n",
" <th>748</th>\n",
" <th>749</th>\n",
" <th>750</th>\n",
" <th>751</th>\n",
" <th>752</th>\n",
" <th>753</th>\n",
" <th>754</th>\n",
" <th>755</th>\n",
" <th>756</th>\n",
" <th>757</th>\n",
" <th>758</th>\n",
" <th>759</th>\n",
" <th>760</th>\n",
" <th>761</th>\n",
" <th>762</th>\n",
" <th>763</th>\n",
" <th>764</th>\n",
" <th>765</th>\n",
" <th>766</th>\n",
" <th>767</th>\n",
" <th>768</th>\n",
" <th>769</th>\n",
" <th>770</th>\n",
" <th>771</th>\n",
" <th>772</th>\n",
" <th>773</th>\n",
" <th>774</th>\n",
" <th>775</th>\n",
" <th>776</th>\n",
" <th>777</th>\n",
" <th>778</th>\n",
" <th>779</th>\n",
" <th>780</th>\n",
" <th>781</th>\n",
" <th>782</th>\n",
" <th>783</th>\n",
" <th>784</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>9264</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1473</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>20096</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>50849</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>10858</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>...</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>5 rows × 784 columns</p>\n",
"</div>"
],
"text/plain": [
" 1 2 3 4 5 6 7 ... 778 779 780 781 782 783 784\n",
"9264 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"1473 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"20096 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"50849 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"10858 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n",
"\n",
"[5 rows x 784 columns]"
]
},
"metadata": {
"tags": []
},
"execution_count": 3
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "hWyfMbqFwU9d",
"colab_type": "code",
"outputId": "9ac9ce3a-eba0-4e49-8a43-4463c8360d04",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 84
}
},
"source": [
"# scrambling pixels\n",
"new_cols = cols[:]\n",
"\n",
"np.random.shuffle(new_cols)\n",
"\n",
"S_x_train = x_train[new_cols]\n",
"S_x_test = x_test[new_cols]\n",
"\n",
"print(x_train.shape)\n",
"print(S_x_train.shape)\n",
"print(x_test.shape)\n",
"print(S_x_test.shape)"
],
"execution_count": 4,
"outputs": [
{
"output_type": "stream",
"text": [
"(60000, 784)\n",
"(60000, 784)\n",
"(10000, 784)\n",
"(10000, 784)\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "Zj4Yd_JzhAzw",
"colab_type": "code",
"outputId": "bc041a89-f36f-47f1-8b9f-0b376f8e9256",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 33
}
},
"source": [
"x_train, x_val, S_x_train, S_x_val = train_test_split(x_train, S_x_train, test_size=0.1)\n",
"\n",
"len(x_train), len(x_val), len(S_x_train), len(S_x_val)"
],
"execution_count": 5,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"(54000, 6000, 54000, 6000)"
]
},
"metadata": {
"tags": []
},
"execution_count": 5
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "8nVvyMEy0toy",
"colab_type": "code",
"outputId": "5d5752a8-7eba-498d-cad0-b9d9e282f9c1",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 250
}
},
"source": [
"import matplotlib.pyplot as plt\n",
"\n",
"img_num = np.random.choice(len(x_train))\n",
"\n",
"plt.figure(figsize=(8, 8))\n",
"plt.subplot(1, 2, 1)\n",
"img = x_train.iloc[img_num].values.reshape(28, 28)\n",
"plt.imshow(img, cmap=\"gray\")\n",
"plt.axis(\"off\")\n",
"plt.title(\"Original image\")\n",
"\n",
"plt.subplot(1, 2, 2)\n",
"img = S_x_train.iloc[img_num].values.reshape(28, 28)\n",
"plt.imshow(img, cmap=\"gray\")\n",
"plt.axis(\"off\")\n",
"plt.title(\"Scrambled image\")\n",
"plt.show()"
],
"execution_count": 6,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAcwAAADpCAYAAACpzQe3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAATIElEQVR4nO3df5BV5X3H8c9XlB8rv4yxDL8ER4JRQHcKJQRQmcHSKAZ/DIUANbEZhBrF6mhMG0cLTckkmA6psVWE6dipv9gy2ulIE7Fa2hlxLcN0VcBgsUFRkCACAouIu0//OId6RZ7nPnvOnnvv7r5fM8xc7vee5zx3uc9+7rn3fDnmnBMAAAg7rdoTAACgIyAwAQCIQGACABCBwAQAIAKBCQBABAITAIAIBGYOZvZDM1vV3o+NGMuZ2QhP7Zdm9p322A+AOGb2qJn9VaDuXbMRY7Pea8Tp1Z5ArTCzGyXdKel8SR9JekbSnzvnDvi2cc79OHb8tjw2D+fclZXYD1ANZjZZ0jJJoyS1SHpD0u3OuY1VnViVsN4riyNMSWZ2p6SfSvq+pH6SJkgaJul5M+vu2YY3G0AFmVlfSc9K+oWkL0kaLGmJpGNtHIe1i0y6fGCmi3CJpEXOuV85544753ZImiVpuKQ/Sh+32MzWmNljZvaRpBvT+x4rGevbZva2me0zs3vNbIeZXVGy/WPp7eHpxyzfMbN3zOwDM7unZJzxZvaymR0ws91m9qAvuE/xfNab2fz09o1m9pKZLU/H+l8zm5jev9PMflv6cY6ZTTez/zazj9L64pPGDj2/08zsz8zsrbTeYGZfavM/COA3UpKcc08651qcc0edc+ucc6+deICZ3WRmb5jZITPbama/m96/w8x+YGavSTpiZqeXvF5PPPa6knHatHZSXzaz59Px/sPMhp3qSZhZDzP7Wbr295jZw2bWq6T+/XTd7zKz74Z+IKz3yurygSlpoqSekp4uvdM5d1jSv0r6/ZK7r5G0RlJ/SY+XPt7MLpL0d5LmSRqo5Eh1cJl9T5Z0gaSpku4zswvT+1sk3SHpy5K+nta/18bndcLXJL0m6WxJT0h6StLvSRqh5M3Ag2bWO33sEUnfTp/fdEk3m9m1kc9vkaRrJV0uaZCk/ZL+NuOcgVN5U1KLmf2DmV1pZmeVFs3sDyUtVvIa7itphqR9JQ+Zo+R13d8596mktyRdquS1vETSY2Y2sOTxbVk7UrI2fqRk3TbppN8RJX6iJPzr07EGS7ovfQ7fkHSXkt87X5F0RcTPpRTrvUjOuS79R8mL6H1P7SeSnk9vL5b0nyfVF0t6LL19n6QnS2p1kj6RdMUpHjtckpM0pOTx/yXpW5553C7pmZK/O0kjPI9dL2l+evtGSf9TUhuTbjug5L59kuo9Y/1c0vLI5/eGpKkl9YGSjks6vdr/xvzpPH8kXSjpUUnvSvpU0r+ceD1Lek7Sn3q22yHpu2XGbpJ0TXq7TWsnndNTJbXeSt74Dk3/7pSElikJqvNLHvt1Sb9Jb/+9pJ+U1Eay3mvnD0eY0gdKPko51fcaA9P6CTsD4wwqrTvnmvX5d7en8n7J7WYli0xmNtLMnjWz9y35+PfHSt61ZrGn5PbRdG4n33div18zs383s71mdlDSn5Tst9zzGybpmfSjoANKFlSLpAEZ5w18gXPuDefcjc65IZJGK3ld/jwtD1Vy1OjzufWbfuTYVPKaHa3Pr7PotXPy+C75hOrDdH6lzlESPptK9vur9H7ppHUm6e3A8zkV1nuBCEzpZSUnDVxfemf6scWVkl4ouTt0aZfdkoaUbN9LycciWTwk6deSvuKc6yvph0remRbtCSXv2Ic65/pJerhkv+We305JVzrn+pf86emce68C80YX5Jz7tZIju9HpXTuVnOXu3eTEjfT7xZWSbpV0tnOuv6TNyrfOhpaM31vJiUm7TnrMB0pCa1TJOunnnDsRvLtLx5F0bo75lMN6b6MuH5jOuYNKvr/4hZl9w8zOMLPhkhqUfOzzj5FDrZH0zfRL9u5KPoLNuvj6KGltOWxmX5V0c8Zxsuz3Q+fcx2Y2XtLcklq55/ewpKUnTnQws3PM7JoKzRtdgJl91czuNLMh6d+HKvlesjF9yCpJd5nZWEuM8J14I+lMJQG6Nx3rj/VZ8GZ1lZlNTtfHjyQ1Ouc+d1TrnGtVEtTLzex30n0PNrM/SB/SoOSEwovMrE7SX+ScUwjrvY26fGBKknNumZKjuJ8pCapXlLyDmuqcizpl3Tm3RckX4U8peXd2WNJv1cZT3lN3KXnxHlKyuFZnGCOL70n6SzM7pOQ7jIYThYjn9zdK3q2uS7dvVHICAtBeDil5Tb1iZkeUvMY2K+mflnPunyQtVXLkdEjSPys5yvsC59xWSX+t5BOmPUq+73sp5/yeUBJwH0oaq/QM+1P4gaTtkhrTr1z+TcnJf3LO/VLJR8wvpo95MeecQljvbWTpF7ZoZ+lHMgeUfKz6m2rPp7119ucH4DOs9wRHmO3IzL5pZnVmdqaSo9XXlZyd1yl09ucH4DOs9y8iMNvXNUq+5N+lpIfqW65zHcJ39ucH4DOs95PwkSwAABE4wgQAIAKBCQBAhOD/2m9mfF4LRHDOVeI/lsilGuv5nHPO8db27t1bwZnkN2XKFG9t/fr1FZtHNX366afe2umn++Okrq4uOG5zc3Om+fTq1ctbO3r0aKYxJf965ggTAIAIBCYAABEITAAAIhCYAABEIDABAIhAYAIAECH4P/3QVgLE6QhtJa2trd713K1bt0pORVL2FoVa1NLS4q2FfrajRo0Kjrtly5bMc+pIHnnkEW9twYIFFZxJgrYSAAByIDABAIhAYAIAEIHABAAgAoEJAEAEAhMAgAi0lQDtoCO0ldTaeq6vr/fWmpqaKjiTYmVtOcnjtttu89YeeOCBzOPWWitQUfOhrQQAgBwITAAAIhCYAABEIDABAIhAYAIAEIHABAAgAoEJAEAE+jCBdtDR+zA7U09krfUKzpkzx1t78sknKziTON27d/fWPvnkE2/tjDPO8NaOHz+ea06VRh8mAAA5EJgAAEQgMAEAiEBgAgAQgcAEACACgQkAQATaSoB20BHaSlpaWrzruRrtFrVmw4YNwfrEiRO9tfnz53trq1atyjynjqQalzErCm0lAADkQGACABCBwAQAIAKBCQBABAITAIAIBCYAABFoKwHaQUdoK8m6nidMmBCsNzY2ZppPNdTalUzKaW5u9tbq6uoqOJNER/v5ZUVbCQAAORCYAABEIDABAIhAYAIAEIHABAAgAoEJAEAE2kqqZObMmd7apEmTvLXrr78+OO6QIUO8tdNO878/am1tDY5baaHXpSTNnTvXW2toaGjv6ZTVmdtKQq0EUudqJ0DH8vrrr3trY8aMyTwubSUAAORAYAIAEIHABAAgAoEJAEAEAhMAgAgEJgAAEQhMAAAi0IdZoGXLlnlrt956q7fWvXv3IqYjM3+rYLm+xyLs2bPHW9u0aVNw2xkzZrT3dHLpzH2Y5axfv95bmzJlirc2dOhQb23nzp05ZpTNypUrg/WbbrqpQjOprpdeeslb27x5s7c2depUb23EiBG55lRp9GECAJADgQkAQAQCEwCACAQmAAARCEwAACIQmAAARKCtpEAbNmzw1saPH1/IPrds2eKtHT582FsLnS5elI0bN3prq1atquBM8uvKbSVZhdqK3nzzzeC2l156aXtPBzUqdHm5oi4tR1sJAAA5EJgAAEQgMAEAiEBgAgAQgcAEACACgQkAQIRizslFLlu3bvXWJkyYENw2dAp2qIXo+PHj5SeGDq1MC1kFZ5IYMGBAIeMeOXLEWzvzzDO9tZ49ewbH/fjjjzPPyeeee+4J1pcuXdru+8yjGi0eRY2bBUeYAABEIDABAIhAYAIAEIHABAAgAoEJAEAEAhMAgAi1c75uB9SnT59gvXfv3pnGbW1t9daam5szjQlkbR05ePBgsN6vX79M4+YRuppJqHUkpIi2kXJqrW2knFpq8Sjn7bffDta3bdvW5jE5wgQAIAKBCQBABAITAIAIBCYAABEITAAAIhCYAABEIDABAIjQcZpqatCoUaOC9YsuuqhCMwGKM378+GpP4QtGjhzprRV1CapqXNpq9uzZ3trq1asL2WdI6PKCjY2N3tqgQYOC4+7atSvznHyGDRuWeVvfpfA4wgQAIAKBCQBABAITAIAIBCYAABEITAAAIhCYAABEoK2kBm3cuLHaUwD+X5bLIBUta4tHntaQIlpH6uvrg/WsrSMPPfSQt3bzzTdnGlOSRo8e7a0dPXrUW3v11Vcz77OWcIQJAEAEAhMAgAgEJgAAEQhMAAAiEJgAAEQgMAEAiEBbSQ2qq6ur9hSAaKFWjS1btnhrl1xySRHTCSrqqiIhLS0t3lpRaz3UOnL22WcHt923b5+3tmrVqsxzqrTt27cH6yNGjGjzmBxhAgAQgcAEACACgQkAQAQCEwCACAQmAAARCEwAACLQVlKDrr76am9txYoVwW3vuOMOb625uTnznACfarRq7Nq1q+L7nDBhgrfW2NiYacxjx45lnU5mobaRPPJcCSarUOtIlraRcjjCBAAgAoEJAEAEAhMAgAgEJgAAEQhMAAAiEJgAAEQgMAEAiGDOOX/RzF+EevToEaxv2LDBWyvq0kbvvvuut3bttdd6a01NTUVMp8twzlm151AO6zks1EcoVafftAihflIpe09pUUK/m+rr6wvZp289c4QJAEAEAhMAgAgEJgAAEQhMAAAiEJgAAEQgMAEAiEBbSYFmz57trd1///3eWr9+/by13r17Z57PoUOHvLWrrrrKWwu1xyBBW8mp7d6921sbOHBg5nGrcSmpkLFjx3prjz76qLc2ZsyYAmaTT0tLi7fWrVu3Cs4knzxtQrSVAACQA4EJAEAEAhMAgAgEJgAAEQhMAAAiEJgAAESgraRAF1xwgbc2btw4b23z5s3e2uWXXx7c59KlS721uro6b+2FF17w1qZNmxbcJzp+W8njjz/u3W7evHmZ9zlx4kRvrah2pawtJ8uXLw+Ou2jRokzjdjR9+vTx1kKvkxkzZnhrBw8eDO4z1Eq3Y8cOb2348OHBcbOirQQAgBwITAAAIhCYAABEIDABAIhAYAIAEIHABAAgAm0lncz27du9tfPOO89b27p1q7d22WWXBfe5f//+8hPr5Dp6WwmKM2vWLG+toaEhuO2KFSu8tYULF2aeU0cyffp0b23t2rWF7JO2EgAAciAwAQCIQGACABCBwAQAIAKBCQBABAITAIAIBCYAABHow+xksvZhvvXWW97apEmTgvvcu3dv+Yl1cvRhdn5ZLxtWDXnmWsTzDI0pSX379vXWmpubM+0zD/owAQDIgcAEACACgQkAQAQCEwCACAQmAAARCEwAACLU1rnQqJodO3Z4a7SNoNZUo8Uj67jLli3z1u6+++6s0wnK8zPIum1LS4u31q1bt+C2HaVlhyNMAAAiEJgAAEQgMAEAiEBgAgAQgcAEACACgQkAQITaOV+3RvXs2dNbu+6664Lbrlu3zlsbOnSotxZq8bjllluC+xw4cGCw7rNt27ZM26FzmDlzpre2Zs2azONu2rTJWxs7dmzmcWup1aCcUOtIuat4FHF1kKJ+duVaR0Kq0coyb968Nu+PI0wAACIQmAAARCAwAQCIQGACABCBwAQAIAKBCQBABHPO+Ytm/mInUl9f762tXbvWWwud0ixJkyZN8tbOP/98b23x4sXe2uTJk4P7DHn66ae9tRtuuMFbO3bsWOZ9dhXOOav2HMppbW31rufVq1d7t5s7d24h88njrLPO8tb2799fwZmgPYR+l77yyive2sSJE4uYjnc9c4QJAEAEAhMAgAgEJgAAEQhMAAAiEJgAAEQgMAEAiEBgAgAQoeNcI6dA48aN89YGDBjgre3duzc47r333uutzZkzx1vr1atXcNyQffv2eWtLlizx1ui17PzyXH4ppKmpyVsL9TjnkbXXshqXvepMGhoavLVZs2ZlHreo12Z74wgTAIAIBCYAABEITAAAIhCYAABEIDABAIhAYAIAEIHLe0kaNGiQt/bcc895axdeeGER0wl6+eWXg/Xbb7/dW9u0aVN7TwepjnB5r460nvv37x+sHzhwwFtbsWKFt7Zw4cLMc+osQq01Uri9JvRzL/dvllXo33P+/PneWp5WFS7vBQBADgQmAAARCEwAACIQmAAARCAwAQCIQGACABCBtpIypk+f7q3NmzcvuG3of+9fuXKlt/bss896ay+++GJwn0ePHg3WUYyu3FbS0tLirTU3N3trffr0KWI6wbaJrVu3emsXX3xxEdOpOe+9916wPnjw4ArNJE7od/DatWsL2SdtJQAA5EBgAgAQgcAEACACgQkAQAQCEwCACAQmAAARaCsB2kFXbiuphkOHDnlrRbWrhNpnQlfGCLVx1FoLR1cSugrKggULaCsBACArAhMAgAgEJgAAEQhMAAAiEJgAAEQgMAEAiEBgAgAQgT5MoB10hD7MBx980LueFy1aVMmpdDrvvPOOt3buuedWcCaJdevWeWvTpk2r4Ezy69Gjh7e2ceNGby3P5dq4vBcAADkQmAAARCAwAQCIQGACABCBwAQAIAKBCQBAhGBbCQAASHCECQBABAITAIAIBCYAABEITAAAIhCYAABEIDABAIjwf7VYr+sR1Ga/AAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 576x576 with 2 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "9aVxKVn2vjms",
"colab_type": "code",
"colab": {}
},
"source": [
"def get_model():\n",
" model = Sequential()\n",
" model.add(Dense(512, activation='relu', input_shape=(784,)))\n",
" model.add(Dense(400, activation='relu'))\n",
" model.add(Dense(512, activation='relu'))\n",
" model.add(Dense(300, activation='relu'))\n",
" model.add(Dense(400, activation='relu'))\n",
" model.add(BatchNormalization())\n",
" model.add(Dense(400, activation='relu'))\n",
" model.add(BatchNormalization())\n",
" model.add(Dense(512, activation='relu'))\n",
" model.add(Dense(784, activation='relu'))\n",
" \n",
" return model"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "HDo1KJ6bz4td",
"colab_type": "code",
"outputId": "b3d7c27d-6b62-42ae-c171-30b54c0e6afb",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 485
}
},
"source": [
"try:\n",
" model = tf.keras.models.load_model(\"model.h5\")\n",
"except Exception as e:\n",
" model = get_model()\n",
"\n",
"initial_learning_rate = 1e-4\n",
"lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(\n",
" initial_learning_rate,\n",
" decay_steps=100,\n",
" decay_rate=0.96,\n",
" staircase=True)\n",
"\n",
"optimizer = tf.keras.optimizers.Adam(learning_rate=lr_schedule)\n",
"model.compile(loss=\"mse\", optimizer=optimizer)\n",
"model.summary()"
],
"execution_count": 8,
"outputs": [
{
"output_type": "stream",
"text": [
"Model: \"sequential\"\n",
"_________________________________________________________________\n",
"Layer (type) Output Shape Param # \n",
"=================================================================\n",
"dense (Dense) (None, 512) 401920 \n",
"_________________________________________________________________\n",
"dense_1 (Dense) (None, 400) 205200 \n",
"_________________________________________________________________\n",
"dense_2 (Dense) (None, 512) 205312 \n",
"_________________________________________________________________\n",
"dense_3 (Dense) (None, 300) 153900 \n",
"_________________________________________________________________\n",
"dense_4 (Dense) (None, 400) 120400 \n",
"_________________________________________________________________\n",
"batch_normalization (BatchNo (None, 400) 1600 \n",
"_________________________________________________________________\n",
"dense_5 (Dense) (None, 400) 160400 \n",
"_________________________________________________________________\n",
"batch_normalization_1 (Batch (None, 400) 1600 \n",
"_________________________________________________________________\n",
"dense_6 (Dense) (None, 512) 205312 \n",
"_________________________________________________________________\n",
"dense_7 (Dense) (None, 784) 402192 \n",
"=================================================================\n",
"Total params: 1,857,836\n",
"Trainable params: 1,856,236\n",
"Non-trainable params: 1,600\n",
"_________________________________________________________________\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "db4kHlPrWgMj",
"colab_type": "code",
"outputId": "30fe21fe-6674-46ca-f9a2-fc8363320641",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000
}
},
"source": [
"history = model.fit(S_x_train, x_train,\n",
" batch_size=1024,\n",
" epochs=100,\n",
" validation_data=(S_x_val, x_val))"
],
"execution_count": 9,
"outputs": [
{
"output_type": "stream",
"text": [
"Epoch 1/100\n",
"53/53 [==============================] - 2s 29ms/step - loss: 0.1074 - val_loss: 0.0947\n",
"Epoch 2/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0493 - val_loss: 0.0735\n",
"Epoch 3/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0377 - val_loss: 0.0575\n",
"Epoch 4/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0324 - val_loss: 0.0457\n",
"Epoch 5/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0288 - val_loss: 0.0362\n",
"Epoch 6/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0259 - val_loss: 0.0296\n",
"Epoch 7/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0236 - val_loss: 0.0251\n",
"Epoch 8/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0218 - val_loss: 0.0225\n",
"Epoch 9/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0202 - val_loss: 0.0207\n",
"Epoch 10/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0189 - val_loss: 0.0197\n",
"Epoch 11/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0178 - val_loss: 0.0184\n",
"Epoch 12/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0169 - val_loss: 0.0175\n",
"Epoch 13/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0160 - val_loss: 0.0166\n",
"Epoch 14/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0153 - val_loss: 0.0158\n",
"Epoch 15/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0146 - val_loss: 0.0154\n",
"Epoch 16/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0140 - val_loss: 0.0145\n",
"Epoch 17/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0135 - val_loss: 0.0141\n",
"Epoch 18/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0130 - val_loss: 0.0137\n",
"Epoch 19/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0125 - val_loss: 0.0132\n",
"Epoch 20/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0121 - val_loss: 0.0126\n",
"Epoch 21/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0118 - val_loss: 0.0124\n",
"Epoch 22/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0115 - val_loss: 0.0121\n",
"Epoch 23/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0112 - val_loss: 0.0116\n",
"Epoch 24/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0109 - val_loss: 0.0113\n",
"Epoch 25/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0107 - val_loss: 0.0112\n",
"Epoch 26/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0104 - val_loss: 0.0109\n",
"Epoch 27/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0102 - val_loss: 0.0107\n",
"Epoch 28/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0100 - val_loss: 0.0104\n",
"Epoch 29/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0098 - val_loss: 0.0100\n",
"Epoch 30/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0096 - val_loss: 0.0103\n",
"Epoch 31/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0095 - val_loss: 0.0097\n",
"Epoch 32/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0093 - val_loss: 0.0095\n",
"Epoch 33/100\n",
"53/53 [==============================] - 1s 23ms/step - loss: 0.0092 - val_loss: 0.0094\n",
"Epoch 34/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0090 - val_loss: 0.0094\n",
"Epoch 35/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0089 - val_loss: 0.0092\n",
"Epoch 36/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0088 - val_loss: 0.0094\n",
"Epoch 37/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0087 - val_loss: 0.0091\n",
"Epoch 38/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0086 - val_loss: 0.0088\n",
"Epoch 39/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0085 - val_loss: 0.0088\n",
"Epoch 40/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0084 - val_loss: 0.0088\n",
"Epoch 41/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0083 - val_loss: 0.0086\n",
"Epoch 42/100\n",
"53/53 [==============================] - 1s 24ms/step - loss: 0.0082 - val_loss: 0.0086\n",
"Epoch 43/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0081 - val_loss: 0.0085\n",
"Epoch 44/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0081 - val_loss: 0.0085\n",
"Epoch 45/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0080 - val_loss: 0.0083\n",
"Epoch 46/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0079 - val_loss: 0.0081\n",
"Epoch 47/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0078 - val_loss: 0.0081\n",
"Epoch 48/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0078 - val_loss: 0.0081\n",
"Epoch 49/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0077 - val_loss: 0.0080\n",
"Epoch 50/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0077 - val_loss: 0.0080\n",
"Epoch 51/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0076 - val_loss: 0.0078\n",
"Epoch 52/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0076 - val_loss: 0.0080\n",
"Epoch 53/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0075 - val_loss: 0.0078\n",
"Epoch 54/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0074 - val_loss: 0.0076\n",
"Epoch 55/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0074 - val_loss: 0.0077\n",
"Epoch 56/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0074 - val_loss: 0.0077\n",
"Epoch 57/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0073 - val_loss: 0.0078\n",
"Epoch 58/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0073 - val_loss: 0.0074\n",
"Epoch 59/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0072 - val_loss: 0.0075\n",
"Epoch 60/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0072 - val_loss: 0.0073\n",
"Epoch 61/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0072 - val_loss: 0.0073\n",
"Epoch 62/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0071 - val_loss: 0.0073\n",
"Epoch 63/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0071 - val_loss: 0.0074\n",
"Epoch 64/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0071 - val_loss: 0.0074\n",
"Epoch 65/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0070 - val_loss: 0.0071\n",
"Epoch 66/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0070 - val_loss: 0.0072\n",
"Epoch 67/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0070 - val_loss: 0.0072\n",
"Epoch 68/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0069 - val_loss: 0.0072\n",
"Epoch 69/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0069 - val_loss: 0.0072\n",
"Epoch 70/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0069 - val_loss: 0.0070\n",
"Epoch 71/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0068 - val_loss: 0.0070\n",
"Epoch 72/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0069 - val_loss: 0.0072\n",
"Epoch 73/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0068 - val_loss: 0.0071\n",
"Epoch 74/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0068 - val_loss: 0.0069\n",
"Epoch 75/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0068 - val_loss: 0.0069\n",
"Epoch 76/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0068 - val_loss: 0.0069\n",
"Epoch 77/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0067 - val_loss: 0.0068\n",
"Epoch 78/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0067 - val_loss: 0.0068\n",
"Epoch 79/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0067 - val_loss: 0.0068\n",
"Epoch 80/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0067 - val_loss: 0.0068\n",
"Epoch 81/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0067 - val_loss: 0.0068\n",
"Epoch 82/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0066 - val_loss: 0.0068\n",
"Epoch 83/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0066 - val_loss: 0.0067\n",
"Epoch 84/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0066 - val_loss: 0.0067\n",
"Epoch 85/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0066 - val_loss: 0.0066\n",
"Epoch 86/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0066 - val_loss: 0.0066\n",
"Epoch 87/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0065 - val_loss: 0.0067\n",
"Epoch 88/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0065 - val_loss: 0.0066\n",
"Epoch 89/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0065 - val_loss: 0.0065\n",
"Epoch 90/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0065 - val_loss: 0.0066\n",
"Epoch 91/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0065 - val_loss: 0.0066\n",
"Epoch 92/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0065 - val_loss: 0.0066\n",
"Epoch 93/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0065 - val_loss: 0.0064\n",
"Epoch 94/100\n",
"53/53 [==============================] - 1s 26ms/step - loss: 0.0065 - val_loss: 0.0065\n",
"Epoch 95/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0064 - val_loss: 0.0065\n",
"Epoch 96/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0064 - val_loss: 0.0065\n",
"Epoch 97/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0064 - val_loss: 0.0064\n",
"Epoch 98/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0064 - val_loss: 0.0065\n",
"Epoch 99/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0064 - val_loss: 0.0064\n",
"Epoch 100/100\n",
"53/53 [==============================] - 1s 25ms/step - loss: 0.0064 - val_loss: 0.0065\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "VC_XKx3F0LQs",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 211
},
"outputId": "ab449f2d-7c83-4852-d4a3-3cf6eeda0e9d"
},
"source": [
"# testing on test images\n",
"\n",
"\n",
"plt.figure(figsize=(10, 10))\n",
"\n",
"img_num = np.random.choice(len(x_test))\n",
"\n",
"prediction = model.predict(S_x_test.iloc[img_num].values.reshape(1, -1))\n",
"\n",
"plt.subplot(1, 3, 1)\n",
"og_image = x_test.iloc[img_num].values.reshape(28, 28) * 255.0\n",
"plt.imshow(og_image * 255.0, cmap=\"gray\")\n",
"plt.title(\"Original Image\")\n",
"plt.axis(\"off\")\n",
"\n",
"plt.subplot(1, 3, 2)\n",
"S_image = S_x_test.iloc[img_num].values.reshape(28, 28)\n",
"plt.imshow(S_image * 255.0, cmap=\"gray\")\n",
"plt.title(\"Scrambled Image\")\n",
"plt.axis(\"off\")\n",
"\n",
"\n",
"plt.subplot(1, 3, 3)\n",
"pred_img = prediction.reshape(28, 28)\n",
"plt.imshow(pred_img * 255.0, cmap=\"gray\")\n",
"plt.title(\"Unscrambled Image\")\n",
"plt.axis(\"off\")\n",
"\n",
"plt.show()"
],
"execution_count": 10,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjwAAADCCAYAAABNCBjaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAeZUlEQVR4nO3deZhV1Znv8d9LGGRQEGRQZlExTkE04BRjjNrRmKAh6dabtEKij5HbJubRdogdE2PU1scEp0YNJnodonG42N6QOOR6tdMGDB1BUNSAgszzPChirfvH3tUeqt5V1C7O4VSt+n6eh4fit3etvU6dvc55a5+9WBZCEAAAQMraVLsDAAAAlUbBAwAAkkfBAwAAkkfBAwAAkkfBAwAAkkfBAwAAktfqCh4z+6GZ3VfufRvRVjCzA8rRFtBcmNkDZvazBrY3+bxnzCBFZvYTM3u4ge3zzeyUJrbd5O9tDVp0wWNmY8xslpltMbNlZna3mXVr6HtCCDeGEC5oTPtF9t0VZvaSmVX8OGi5zOwEM/uzma03szVm9oqZfbba/aoWxkzr4BW9OysY4NvZLyetQYsteMzsMkk3S/pnSV0lHSNpoKQXzKx95Hva7r4eAuVhZntJ+p2kOyV1l9RX0nWSPizYDuc/UIBlWuz7JHbUIp/I/A3gOkmXhBCeDSF8FEKYL+nvJQ2S9K18v5+Y2ZNm9rCZbZA0pu5vB2Z2npm9b2arzexHpZcES/c1s0H5bxvnm9kCM1tlZteUtDPCzKaY2TozW2pmd8UKr508tpPMbJGZXWFmK/K2zjKzM8zsb/lv9z9s7HHN7DQzeye/MjDBzF4u/c3YzL5tZm+Z2Voze87MBhbtMyruIEkKITwaQvg4hLA1hPB8CGFm7Q5mdmH+PG40s9lmNjzP55vZlWY2U9JmM2trZleZ2bsl+55d0s6Y/OrR+Pyces/Mjsvzhfk5eX6d/u1jZi/k7b0cO4fMrIOZ3ZqPn+Vmdo+ZdSzZ/s/5ObzEzL7d2B8OY6b1KnnuLyt57seWbD8jP8c3mtliM7u8ZNsoM5thZhvy8fClPH/JzG4ws1ckbZG0v5mNLRlf75nZRU4fGnX+5fYws9/m7b1mZp+JPL42JeN1tZk9bmbdS7b/o33y/nWN10ak3dr3s7H5uF5rZt81s8+a2cx8bNxVsv8QM3sxP84qM3vESj5NMbPhZjY9fzxP5I/tZyXbz8x/1ussu1J9RGP7WlYhhBb3R9KXJG2X1NbZ9r8kPZp//RNJH0k6S1lx1zHPHs63HyJpk6QTJLWXdGu+/ykl31+77yBJQdLEvJ3PKPsN+9P59qOUXWVqm+/7lqRLS/oVJB0QeTwvSbog//qk/LFdK6mdpAslrZT0G0l7SjpU0lZJg3d2XEn7SNog6Wv59u/nj6/2WKMkzZX06Xz7v0j6c7WfX/7UOz/2krQ6P7dPl7R3ne3fkLRY0mclmaQDJA3Mt82XNENSf0kdS/bfLx8T/yBps6R9821j8vNvrKRPSfqZpAWS/k1SB0mnSdooqUu+/wP5v0/Mt98u6T+9817SeEnPKLtKtaek/yPppnzblyQtl3SYpM75+c6YaeV/vHNAO74u1z73P82f+zOUFSl759uXSvpc/vXekobnX4+QtF7Sqfk46Cvp4JJza0F+3rTN2/2ypCHKxtfn82MMr9OHxp5/P8nPqa/n+18uaZ6kdiVjtvY96PuSpkrqp2x83atP3t9q379qx94v8n6cEvlZPiDpZ/nXg/Kf7T2S9lA2rj+Q9LSkXvnPY4Wkz+f7H5D/rDpI6inpPyTdlm9rL+n9vK/t8rGzreRYR+ZtjVT2mnJ+/hg77PbzqdondBMHwbckLYts+1dJL5ScWP/RwGC5tvbkyf/dKX+iGip4+pXs/xdJ50T6camkSQ0N3JJtL2nHF++tkj6V/3vP/HtHluz/V0ln7ey4ks6TNKVkm0laWHKsP0j6Tsn2NsoG8sBqP8f8qfe8fjp/wVqUv6g9I6l3vu05Sd+PfN98Sd/eSdszJI3Kvx4jaU7JtsPz8693SbZa0rD86wckPVayrYukjyX1z/8dlL1YmrLCakjJvsdKmpd//WtJ/1qy7SDGDH+8c0D1C56tKvnlV9mb6zH51wskXSRprzpt3CtpfAPn1k930q+na8dc0fMv7//UOudQaWE2X5+8B70l6Ysl++6rrFhqq+z9q3TsdVbJ+5fT5wdUv+DpW7J9taR/KPn3Uyr5pb1OW2dJmp5/faKyX7isZPt/lhzrbknX1/n+d5QXU7vzT4v8SEvSKmWX0b17EvbNt9da2EA7+5VuDyFsUfakN2RZyddblL3Ay8wOMrPfWXbz9AZJNyr7bbEpVocQPs6/3pr/vbxk+9ZGHrfu4wvK3jBrDZR0e36ZcZ2kNcpe4Ps2sd+okBDCWyGEMSGEfsquguwn6bZ8c39J7zbw7TuMAcs+xp1R8rwfph3P1brnmkII7vlXt/0QwiZl59F+dfrQU9kvFH8tOe6zeS7VOVeV/cZYBGMmTR8ru2pQqp2yN/1aq0MI20v+/d+vy5JGK7vq837+0eSxeV50zJxuZlPzj6fW5W2WjplGn3912w8h1Cg7x+qOGSk73yaVnG9vKfuZ9Fb9c3Wzdv7+VVfdPsbGTG8zeyz/WHCDpIe145hZnI+Veo8vfwyX1T6G/HH0jzzeimqpBc8UZR8nfa00NLMuyi75/9+SuPRJqGupskuFtd/fUVKPJvbpbklvSzowhLCXpB8qeyGstIaOW/fxWem/lZ2UF4UQupX86RhC+PNu6DeaKITwtrLf1g7Lo4XKLrdHv6X2i/x+k4mS/klSjxBCN0lvaNfO1f4l7XdR9pHVkjr7rFL2AnpoybnWNYRQ+yawtLQdSQN2oT87w5hpORYouxpRarAaWRCHEKaFEEYp+5jmaUmP55uKjJkOyq523KrsSmc3Sb9X+cZMG2XnWN0xU9vP0+ucb3uEEBarzpgxs05q+vvXztyo7GdyeD5mvqUdx0zffKzUKh3LCyXdUOcxdAohPFqhvka1yIInhLBe2U3Ld5rZl8ysnZkNUnYyL5L0UCObelLSVyy7KbO9skuNTT2J91T22f8mMztY0sVNbKecx50s6fD8Brq2kv6npD4l2++RdLWZHSpJZtbVzL6xm/qNRjKzgy27KbNf/u/+ks5V9tm+JN0n6XIzO8oyB1j8RtrOyl64VuZtjdUnhVNTnWHZtPn2kq5Xdrl+h9+Q899iJ0oab2a98mP3NbO/y3d5XNmkgkPyF+4f72KfGsKYaTl+K+lfzKyfZTfwniLpK8peuxtkZu3N7Jtm1jWE8JGy57wm3/wrSWPN7It5u33zc8HTXtm9KyslbTez05Xd87IrjjKzr+Xn2KXKfoGf6ux3j6QbasezmfU0s1H5ticlnVky9n6qyr2n76nsfqH1ZtZX2ezoWlOUXXX6J8smRYxSdo9UrYmSvmtmI/PXp85m9mUz27NCfY1qkQWPJIUQblH2m9mtyk7kV5VVkl8MITRqum4I4U1Jl0h6TFmVuknZ57+FpvvmLpf0P5TdwDlR2UDdHaLHDSGsUnaD6i3KLnUeIum/lD++EMIkZVP7H8svU76h7AoZmpeNym74e9XMNit7YXxD0mWSFEJ4QtINym6S3KjsN9nuXkMhhNmSfq7sRWq5snt0XtnF/v1GWYGyRtkNwd+K7Helsht+p+bn2x8lDc379QdlH9G9mO/z4i72qSGMmZbjp5L+rOyekLXKnpdvhhDeaOT3/6Ok+flz9V1J35SkEMJflN2YP17ZzcsvK/vopZ4QwkZJ31NWlK9Vdu4808THU+vflU0YWJv38Wt5UVbX7fmxnjezjcrG/si8X28qK8h/o+z9a612/Pi1nK6TNFzZz2qypP9duyGEsE3Zpy3fkbRO2fj/nT4ZM/+l7Ebuu/I+zlV2r+BuZzt+7Na65Zfj1ym71D2v2v0pt/zS6SJlLxj/r9r9AZo7xgxQnJm9KumeEML91e5LqRZ7hadczOwrZtbJzDoru1o0S9ld8kkws78zs27559C19yp4l04BiDEDFGVmnzezPvlHWudLOkLZpIRmpdUXPMr+X40l+Z8DlU0zT+my17HKZiOsUvbZ91khhK0NfwvQqjFmgGKGSnpd2Sckl0n6eghhaXW7VB8faQEAgORxhQcAACSPggcAACSvwdWTzYzPu9CshBB2x3/mGFXpMfHxxx/Xyz71qU9V8pBRt912m5tfcsklbh7rZ01NTb2sTZvq/K41Z84cN99///3dPPaYvOcptn+RfZsi9TEBFBUbE1zhAQAAyaPgAQAAyaPgAQAAyaPgAQAAyWvw/+HhZjQ0N9W+QbOmpsYdE+W6AfWUU06plz333HPuvhMmTHDzcePGuXmlb36u9M25nh/84AduPn78+IodsyHVuOm82mOC9wk0N9y0DAAAWi0KHgAAkDwKHgAAkDwKHgAAkDwKHgAAkLwGl5YAsKNXX33Vzcs1Q+mPf/xjo/eNLfEQ68vEiRPd/MILL2z0MRsSe6xnn312vWzSpEllOeb27dsL7R/72cQUff6K7H/99de7+dVXX+3mbdvycg3sCq7wAACA5FHwAACA5FHwAACA5FHwAACA5FHwAACA5LGWFlqU5rpuULlmaRWZRVSutquxxlbRY3bu3NnNN2/eXKid008/3c3/8Ic/uHmsn0Vne1VScx0TQLWwlhYAAGi1KHgAAEDyKHgAAEDyKHgAAEDyKHgAAEDyWJwFKCA2O2fo0KFlaafI7KVly5a5eZ8+fQq1Xa71pTZu3Fhof8/w4cPd/LXXXmt0Gw2JzcaKia2ddvTRR5ejO4WMHj16tx8TldGjR496We/evd19Fy9e7Obr168va59aA67wAACA5FHwAACA5FHwAACA5FHwAACA5HHTchNdfPHFbn7kkUfWyy644AJ3XzP/f4RvaLkPz9SpU9386aefdvOHHnrIzZcuXVrouK3RL37xCzefO3eum1dyOYfYzckx3bt3L7R/0T6OGTOm0P6eadOmufmtt97q5nfddZebL1y40M1jS1Rs2LDBzSu97IZnypQpbn7sscfu5p6gsfr37+/mJ598spsPHDiw0W3E8vnz5zeuc7m3337bzZcsWeLmkydPdvOiy7k0J1zhAQAAyaPgAQAAyaPgAQAAyaPgAQAAyaPgAQAAybOGZgSZWbHpQi1Y7L+0HzdunJuff/75bt6mTfOvIadPn+7mN9xwg5tPmjSpkt0pJITgT23bTWJjolzLM5TDHnvs4eYffPBBoXZis7rWrFnj5pWckVbJtsvJW+4jNpuuXI+puY6JFJ111llufuqpp7r5gAED3Nx7n9h///3dfZcvX97oNiSpQ4cObv7++++7+erVq9089noRm+VbruVfyiE2Jpr/uzMAAMAuouABAADJo+ABAADJo+ABAADJo+ABAADJS3aWVmzNnNNPP93NJ0yY4OY9evQoW5+au9mzZ7v54Ycfvpt7EpfKjJQiM3SqNUPppptucvOrr766osetpNg6VSNGjChL+9WYNZbKmKiG2Ay6UaNGufmhhx7q5j179nTzffbZp9F96dq1q5tv3brVzWOzqGJrNMZmb8VmKL/77rtu/sgjj7j5b3/7WzdfsGCBm1cSs7QAAECrRcEDAACSR8EDAACSR8EDAACSR8EDAACS17baHdhVXbp0cfPbb7/dzceMGVPB3hSzbds2N58xY4abl2smScygQYPc/KKLLnLze++9t4K9SUM5Zlidc8455eqOa/HixW7et29fN581a5abl2M239e//nU3f/LJJ3e5bUk69thj3Tw2E6Zjx467fMyWsg5Ya3TCCSe4+bnnnuvme++9t5u/+eabbu6trSZJNTU19bJFixa5+65fv97NO3XqVGj/2Izj2OywXr16ufnJJ5/s5tOmTXPzaszSiuEKDwAASB4FDwAASB4FDwAASB4FDwAASB4FDwAASF6LX0vrzjvvdPNx48bt5p5kvDvtY+t0zZw5082ff/55N//qV7/q5sccc4ybX3rppW7etm2xyXkrV65089g6NJVU7XWDampq3DFRyRk35ZrlU7SdSh+3iNgxb775Zje/8sorC7XfkmdSVXtMtIT3iWHDhrl57Dw59dRT3Xz58uVuvnDhQjePzd7yZi69/fbb7r4bN25089jaWN26dXPz2PqSRx55pJufcsopbr527Vo3f+qpp9x84sSJ9bLYOmDlwlpaAACg1aLgAQAAyaPgAQAAyaPgAQAAyaPgAQAAyWvxa2mVY/2eprj66qvd/Fe/+lW9bPXq1WU55qRJkwrlo0ePdvPBgweXpT+tUaVnRjWXYzZl/1/+8pdlacdz3XXXuXnR2Vgx1Zhl9+CDD7r5eeedV7G+tFZHHHGEm8fWi4vNgFqzZo2bv/76624eWxfxhRdeqJetWrXK3beo2BpbBx98sJv37t3bzdu08a+HDBkyxM333XdfNy86K7iSuMIDAACSR8EDAACSR8EDAACSR8EDAACSR8EDAACS13xun25hHnnkETcv14wspKEaazF95jOfcfPf//73bn7GGWcUar+S604NHTrUzX/84x/vcttS8XW9yrHOWNGfy9ixYwvt39B6iK1NbM2sPffc081jM5S6dOni5ps2bXLz2HqDixcvLtROOWzZssXN33jjDTePzXResWKFm8dmgW3dutXNm9N6dFzhAQAAyaPgAQAAyaPgAQAAyaPgAQAAyaPgAQAAyWOWVhO9++67bj5+/Ph62d133+3uu2DBgrL2CZCk+fPnu3lsNlbRWVflmHVRyZleDSk662rAgAGF2ini7LPPdvMHHnjAzbt27brLx0zdBx984OadO3d2823btrn5O++84+Yffvihm48YMcLNt2/f3uj9H374YXff2AywojMO99prLzfv3r27m7dr165QO7HZguvXr29E73YPrvAAAIDkUfAAAIDkUfAAAIDkUfAAAIDkUfAAAIDktfhZWjNmzHDzz33ucxU9buwO9iuuuKJe9p3vfMfd9/7773fz6dOnu/ljjz3m5gcccICbx9aDKerGG28sSzv4RGyGhTdrpGPHjoXajs2KiM0sqvTsn1deeaVe9vOf/7xQG8uWLXPzPn36uHk1Zp4VNWnSJDePjXPsXGztqo8++sjNN2zY4OY1NTVu3q9fPzcfNGiQmw8cONDNly9fXi87+uij3X2nTZtWKJ86daqbm5mbb9y40c1ja3L97W9/c/M//elPbt6ccIUHAAAkj4IHAAAkj4IHAAAkj4IHAAAkz2L/HbQkmVl8YzMR+y/DTzrpJDd/5plnKtib8og9J96yFVL8BupLLrmk0HHfe+89Nx85cqSbr1mzplD75RBC8O+8202KjolqLKEwa9YsN7/qqqvcfPLkyYXaL/qYvP2rcZOwFP/ZHH744bu5J/GlJWI3M8e0tDFRDSeeeKKbDxkyxM1PPvlkN+/Vq5eb77///m6+du1aN4+9b3m2bt3q5gsXLnTzBx980M1j7ysnnHCCmw8ePNjNZ8+e7eY/+tGP3LwaYmOCKzwAACB5FDwAACB5FDwAACB5FDwAACB5FDwAACB5LX6WVkynTp3c/LjjjnPz0047zc3POeccN+/bt2/TOtaMxe6+r8YMlpiWNiNlxYoVbh6b7eGpxkyvah63HJpT33v06OHmq1evLkv7LW1MNCexpUn2228/Nz/qqKPcfNiwYW4+YMCARrcfW4Zi3bp1bh6b6bV06VI3nzJlipvHzs/Ye1yll20qB2ZpAQCAVouCBwAAJI+CBwAAJI+CBwAAJI+CBwAAJC/ZWVrlMnToUDd/8skn3dybfbPPPvuUtU+VMnXqVDc//vjjd3NP4qo9I2Xbtm3umGjbtq27f9FZQeVYd6o5zVCSqrOWVk1NjZvHXu9i/bniiivc/JZbbnHzajx/1R4TKb5PtG/f3s0PPvhgN+/WrZubjxo1ys0POuigRvcldi4fcsghbh6b1RVbeys2C3f+/PluPmHCBDcvugZcJTFLCwAAtFoUPAAAIHkUPAAAIHkUPAAAIHkUPAAAIHn+1BL8t3feecfNY3e2e2unxO7UP/fcc9386KOPdvNKz2y58cYbK9p+Cjp06ODmsZk15RBbz+2jjz5y89h5UnT2T2z/2PpACxYsKNR+JbVp4/8uV/R5eumll9x8+fLljW6j6DFbwlplqdu2bZubz5w508179uzp5rEZVlu2bKmXde3a1d33y1/+spv369fPzVetWuXmMcuWLXPzJUuWuHlsnLcEXOEBAADJo+ABAADJo+ABAADJo+ABAADJo+ABAADJY5ZWmXl3sN95553uvosWLXLzyy+/3M2POeaYpnesxK9//Ws3nzt3blnab41ef/11N4/9TGPP/bhx4+pljz32WNM7VqLo7J958+a5+eLFi8vSvqfS64CVawZb7969y9IfpGHlypWFck/RGaCxWZojRoxw89iajps3b3bzWbNmufn06dPdvCXgCg8AAEgeBQ8AAEgeBQ8AAEgeBQ8AAEgeBQ8AAEges7TKrH///vWyCy64wN33qquucvO2bSv7tEycONHNY+uGYeeGDx9eaP9rr73Wze+9995Gt1HpGU0vv/xyWY7r7R/bt2jfP/zwQzePjaGi648VdeWVV9bLbr75Znff0aNHu/ndd9/t5r169Wp6x1q52FpX3bt3d/OFCxe6ubcGlhRfuy22ltahhx5aLzMzd9+RI0e6eewx7bvvvm7+5ptvuvmKFSvc/NFHH3Xz2GNqCbjCAwAAkkfBAwAAkkfBAwAAkkfBAwAAkkfBAwAAkmchhPhGs/jGZiJ2l/0XvvCFih73mmuucXNvjZ0+ffpUtC/vvfeemz/00ENufsstt7j5Bx98ULY+VUoIwZ/KsJuceeaZ7piYPHmyu385ZlIVbeOKK65w85tuummX+yJJL730kpufdNJJbl5kllZRsZ/NIYcc4uZFZyJWckZauVR7TDSn94nYelSx1+DYTKeuXbu6ebdu3dw8NlvwwAMPdHPvfSLW98GDB7t5jx493Dx2vi1dutTNn332WTd/4okn3HzdunVu3pzExgRXeAAAQPIoeAAAQPIoeAAAQPIoeAAAQPIoeAAAQPKa3Syt733ve25+7rnnunmXLl3cPDZLoyVbsGCBm99xxx1uPn78+Ep2pyqa64yU2BpN7dq1K9S+9xwPGDCgUBvlct9997n52LFj3bySM89ili1b5uax2TfNaUZauX4GzXVMlIs3G+nEE0909x02bJibb9q0yc333ntvN4/NxvLWwJLi52FsXav169fXy2KzrpYsWeLmsfNk1apVbv7aa6+5+V/+8hc3/+tf/+rmsXXqtm/f7ubVwCwtAADQalHwAACA5FHwAACA5FHwAACA5FHwAACA5FV1ltbo0aPrZQ8//LC7b/v27SvZlaqI3dV+6623uvmDDz7o5kXXB2rJqj0jpaamxh0TlVwv6eKLL3bzu+66q1A75Zot9Morr7j58ccfX6g/1VBTU+PmbdqU53e/2M/Ss3LlSjcvuvZetcdENWbzPv74427eq1cvN+/YsaObb9682c1jr82xMRFb72rhwoVuPnv27HrZfvvt5+67ePFiN58zZ46bz5gxw83nzZvn5rHHtGXLFjeP1QyxmWrVwCwtAADQalHwAACA5FHwAACA5FHwAACA5FHwAACA5FV1ltZhhx1WL4vNAImtmVUta9ascXNvlsbEiRPdfWfOnOnmTzzxRNM7lrjWOCOl0sq1plORdprbWlo33HCDm19zzTWF+lMNjIlPzJo1y81jM6Bi52Fs/arYOlWxdbAWLVrk5t4MqBdeeMHdd926dW7+4osvunnsvSmmJayNVRSztAAAQKtFwQMAAJJHwQMAAJJHwQMAAJJX1ZuWPffff7+bn3feeRU97nPPPefmsRvD7rnnHjfftGlT2fqE+rhB8xNFb/wt143C5VC0L94yNJL01FNPufnll1/u5rElCYr+t/tFxB5rLC+6jE5rHBNDhw5189gyOxMmTHDz2M869txMmTLFzWM3M7/22mtu7t0UHVv+YuvWrW6OOG5aBgAArRYFDwAASB4FDwAASB4FDwAASB4FDwAASF6zm6UFNCSVGSmxWSCvvvpqvey4444rxyEL96XoDKW5c+e6+ZAhQ+plsf/qf+nSpYWOGVPpGWmVbL9o26mMCaBcmKUFAABaLQoeAACQPAoeAACQPAoeAACQPAoeAACQvLbV7gCQgk6dOrn5xo0b3Tw242bOnDm73JeVK1e6ec+ePQv1JaboLKJZs2bVy77xjW+4+95xxx0V7Uu5VLL9omuhAWgcrvAAAIDkUfAAAIDkUfAAAIDkUfAAAIDkUfAAAIDkMUsLKGDevHluPn78eDcvOpvnwAMPLNynumKzscql6CyiSs+Yau6GDh3q5s8//7ybDxw40M0bWEuraR0DWhmu8AAAgORR8AAAgORR8AAAgORR8AAAgORR8AAAgOQZd/gDAIDUcYUHAAAkj4IHAAAkj4IHAAAkj4IHAAAkj4IHAAAkj4IHAAAk7/8DugYUvKONUzcAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 720x720 with 3 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "8PuhUQET2nYS",
"colab_type": "code",
"colab": {}
},
"source": [
"model.save(\"model.h5\")"
],
"execution_count": 0,
"outputs": []
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment