Last active
January 22, 2021 15:02
-
-
Save rozeappletree/6c8b9a3023ed62a603cf8cbec6efe01d to your computer and use it in GitHub Desktop.
Train_ MAPNet.ipynb
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"nbformat": 4, | |
"nbformat_minor": 0, | |
"metadata": { | |
"colab": { | |
"name": "Train_ MAPNet.ipynb", | |
"provenance": [], | |
"collapsed_sections": [], | |
"include_colab_link": true | |
}, | |
"kernelspec": { | |
"name": "python3", | |
"display_name": "Python 3" | |
}, | |
"accelerator": "GPU" | |
}, | |
"cells": [ | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "view-in-github", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"<a href=\"https://colab.research.google.com/gist/rakesh4real/6c8b9a3023ed62a603cf8cbec6efe01d/train_-mapnet.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "EYmlIfEdUJMa", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"outputId": "85c162ea-024c-453d-8220-9cff0870b94b" | |
}, | |
"source": [ | |
"!ln -sf /opt/bin/nvidia-smi /usr/bin/nvidia-smi\n", | |
"import subprocess\n", | |
"print(subprocess.getoutput('nvidia-smi'))" | |
], | |
"execution_count": null, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"Fri Jan 1 15:06:34 2021 \n", | |
"+-----------------------------------------------------------------------------+\n", | |
"| NVIDIA-SMI 418.67 Driver Version: 418.67 CUDA Version: 10.1 |\n", | |
"|-------------------------------+----------------------+----------------------+\n", | |
"| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n", | |
"| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n", | |
"|===============================+======================+======================|\n", | |
"| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |\n", | |
"| N/A 50C P8 10W / 70W | 0MiB / 15079MiB | 0% Default |\n", | |
"+-------------------------------+----------------------+----------------------+\n", | |
" \n", | |
"+-----------------------------------------------------------------------------+\n", | |
"| Processes: GPU Memory |\n", | |
"| GPU PID Type Process name Usage |\n", | |
"|=============================================================================|\n", | |
"| No running processes found |\n", | |
"+-----------------------------------------------------------------------------+\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "2YCApa2aeU9C", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"outputId": "053f2af7-b8be-4b58-ad4d-9a9aa1b1edc9" | |
}, | |
"source": [ | |
"from tensorflow.python.client import device_lib\n", | |
"print(device_lib.list_local_devices())" | |
], | |
"execution_count": 1, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"[name: \"/device:CPU:0\"\n", | |
"device_type: \"CPU\"\n", | |
"memory_limit: 268435456\n", | |
"locality {\n", | |
"}\n", | |
"incarnation: 9162827415679841530\n", | |
", name: \"/device:GPU:0\"\n", | |
"device_type: \"GPU\"\n", | |
"memory_limit: 14638920512\n", | |
"locality {\n", | |
" bus_id: 1\n", | |
" links {\n", | |
" }\n", | |
"}\n", | |
"incarnation: 2329053322952791945\n", | |
"physical_device_desc: \"device: 0, name: Tesla T4, pci bus id: 0000:00:04.0, compute capability: 7.5\"\n", | |
"]\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "MFGVP64SaGsg", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"outputId": "7f7c9518-517b-4ed8-c983-c2698987dc12" | |
}, | |
"source": [ | |
"# Install required libs\n", | |
"\n", | |
"!git clone https://github.com/lehaifeng/MAPNet.git" | |
], | |
"execution_count": 2, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"Cloning into 'MAPNet'...\n", | |
"remote: Enumerating objects: 3, done.\u001b[K\n", | |
"remote: Counting objects: 100% (3/3), done.\u001b[K\n", | |
"remote: Compressing objects: 100% (3/3), done.\u001b[K\n", | |
"remote: Total 103 (delta 0), reused 0 (delta 0), pack-reused 100\u001b[K\n", | |
"Receiving objects: 100% (103/103), 3.94 MiB | 41.64 MiB/s, done.\n", | |
"Resolving deltas: 100% (29/29), done.\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "loZqWTY1ecre", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 1000 | |
}, | |
"outputId": "da60509c-6661-4bf1-b4dd-2f17a1955250" | |
}, | |
"source": [ | |
"!pip install tensorflow==1.15.0\n", | |
"!pip install tensorflow-gpu==1.15.0\n", | |
"!pip install utils" | |
], | |
"execution_count": 3, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"Collecting tensorflow==1.15.0\n", | |
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/3f/98/5a99af92fb911d7a88a0005ad55005f35b4c1ba8d75fba02df726cd936e6/tensorflow-1.15.0-cp36-cp36m-manylinux2010_x86_64.whl (412.3MB)\n", | |
"\u001b[K |████████████████████████████████| 412.3MB 39kB/s \n", | |
"\u001b[?25hRequirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (1.1.2)\n", | |
"Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (1.1.0)\n", | |
"Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (1.15.0)\n", | |
"Requirement already satisfied: numpy<2.0,>=1.16.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (1.19.5)\n", | |
"Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (3.3.0)\n", | |
"Requirement already satisfied: protobuf>=3.6.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (3.12.4)\n", | |
"Requirement already satisfied: google-pasta>=0.1.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (0.2.0)\n", | |
"Collecting keras-applications>=1.0.8\n", | |
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/71/e3/19762fdfc62877ae9102edf6342d71b28fbfd9dea3d2f96a882ce099b03f/Keras_Applications-1.0.8-py3-none-any.whl (50kB)\n", | |
"\u001b[K |████████████████████████████████| 51kB 9.0MB/s \n", | |
"\u001b[?25hCollecting tensorboard<1.16.0,>=1.15.0\n", | |
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/1e/e9/d3d747a97f7188f48aa5eda486907f3b345cd409f0a0850468ba867db246/tensorboard-1.15.0-py3-none-any.whl (3.8MB)\n", | |
"\u001b[K |████████████████████████████████| 3.8MB 56.2MB/s \n", | |
"\u001b[?25hCollecting tensorflow-estimator==1.15.1\n", | |
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/de/62/2ee9cd74c9fa2fa450877847ba560b260f5d0fb70ee0595203082dafcc9d/tensorflow_estimator-1.15.1-py2.py3-none-any.whl (503kB)\n", | |
"\u001b[K |████████████████████████████████| 512kB 51.0MB/s \n", | |
"\u001b[?25hRequirement already satisfied: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (1.32.0)\n", | |
"Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (0.36.2)\n", | |
"Requirement already satisfied: absl-py>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (0.10.0)\n", | |
"Requirement already satisfied: wrapt>=1.11.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (1.12.1)\n", | |
"Collecting gast==0.2.2\n", | |
" Downloading https://files.pythonhosted.org/packages/4e/35/11749bf99b2d4e3cceb4d55ca22590b0d7c2c62b9de38ac4a4a7f4687421/gast-0.2.2.tar.gz\n", | |
"Requirement already satisfied: astor>=0.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (0.8.1)\n", | |
"Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.6.1->tensorflow==1.15.0) (51.3.3)\n", | |
"Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from keras-applications>=1.0.8->tensorflow==1.15.0) (2.10.0)\n", | |
"Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (3.3.3)\n", | |
"Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (1.0.1)\n", | |
"Requirement already satisfied: importlib-metadata; python_version < \"3.8\" in /usr/local/lib/python3.6/dist-packages (from markdown>=2.6.8->tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (3.3.0)\n", | |
"Requirement already satisfied: typing-extensions>=3.6.4; python_version < \"3.8\" in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (3.7.4.3)\n", | |
"Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (3.4.0)\n", | |
"Building wheels for collected packages: gast\n", | |
" Building wheel for gast (setup.py) ... \u001b[?25l\u001b[?25hdone\n", | |
" Created wheel for gast: filename=gast-0.2.2-cp36-none-any.whl size=7540 sha256=d2b49c833459c411dd5782e3d6b916d2bde994c067fccdad3cfb9aae9c82f3bc\n", | |
" Stored in directory: /root/.cache/pip/wheels/5c/2e/7e/a1d4d4fcebe6c381f378ce7743a3ced3699feb89bcfbdadadd\n", | |
"Successfully built gast\n", | |
"\u001b[31mERROR: tensorflow-probability 0.12.1 has requirement gast>=0.3.2, but you'll have gast 0.2.2 which is incompatible.\u001b[0m\n", | |
"Installing collected packages: keras-applications, tensorboard, tensorflow-estimator, gast, tensorflow\n", | |
" Found existing installation: tensorboard 2.4.0\n", | |
" Uninstalling tensorboard-2.4.0:\n", | |
" Successfully uninstalled tensorboard-2.4.0\n", | |
" Found existing installation: tensorflow-estimator 2.4.0\n", | |
" Uninstalling tensorflow-estimator-2.4.0:\n", | |
" Successfully uninstalled tensorflow-estimator-2.4.0\n", | |
" Found existing installation: gast 0.3.3\n", | |
" Uninstalling gast-0.3.3:\n", | |
" Successfully uninstalled gast-0.3.3\n", | |
" Found existing installation: tensorflow 2.4.0\n", | |
" Uninstalling tensorflow-2.4.0:\n", | |
" Successfully uninstalled tensorflow-2.4.0\n", | |
"Successfully installed gast-0.2.2 keras-applications-1.0.8 tensorboard-1.15.0 tensorflow-1.15.0 tensorflow-estimator-1.15.1\n" | |
], | |
"name": "stdout" | |
}, | |
{ | |
"output_type": "display_data", | |
"data": { | |
"application/vnd.colab-display-data+json": { | |
"pip_warning": { | |
"packages": [ | |
"gast", | |
"tensorboard", | |
"tensorflow" | |
] | |
} | |
} | |
}, | |
"metadata": { | |
"tags": [] | |
} | |
}, | |
{ | |
"output_type": "stream", | |
"text": [ | |
"Collecting tensorflow-gpu==1.15.0\n", | |
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/a5/ad/933140e74973fb917a194ab814785e7c23680ca5dee6d663a509fe9579b6/tensorflow_gpu-1.15.0-cp36-cp36m-manylinux2010_x86_64.whl (411.5MB)\n", | |
"\u001b[K |████████████████████████████████| 411.5MB 41kB/s \n", | |
"\u001b[?25hRequirement already satisfied: google-pasta>=0.1.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (0.2.0)\n", | |
"Requirement already satisfied: tensorboard<1.16.0,>=1.15.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (1.15.0)\n", | |
"Requirement already satisfied: wrapt>=1.11.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (1.12.1)\n", | |
"Requirement already satisfied: astor>=0.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (0.8.1)\n", | |
"Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (0.36.2)\n", | |
"Requirement already satisfied: keras-applications>=1.0.8 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (1.0.8)\n", | |
"Requirement already satisfied: gast==0.2.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (0.2.2)\n", | |
"Requirement already satisfied: numpy<2.0,>=1.16.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (1.19.5)\n", | |
"Requirement already satisfied: protobuf>=3.6.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (3.12.4)\n", | |
"Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (1.1.2)\n", | |
"Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (3.3.0)\n", | |
"Requirement already satisfied: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (1.32.0)\n", | |
"Requirement already satisfied: absl-py>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (0.10.0)\n", | |
"Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (1.1.0)\n", | |
"Requirement already satisfied: tensorflow-estimator==1.15.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (1.15.1)\n", | |
"Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow-gpu==1.15.0) (1.15.0)\n", | |
"Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.16.0,>=1.15.0->tensorflow-gpu==1.15.0) (51.3.3)\n", | |
"Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.16.0,>=1.15.0->tensorflow-gpu==1.15.0) (1.0.1)\n", | |
"Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.16.0,>=1.15.0->tensorflow-gpu==1.15.0) (3.3.3)\n", | |
"Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from keras-applications>=1.0.8->tensorflow-gpu==1.15.0) (2.10.0)\n", | |
"Requirement already satisfied: importlib-metadata; python_version < \"3.8\" in /usr/local/lib/python3.6/dist-packages (from markdown>=2.6.8->tensorboard<1.16.0,>=1.15.0->tensorflow-gpu==1.15.0) (3.3.0)\n", | |
"Requirement already satisfied: typing-extensions>=3.6.4; python_version < \"3.8\" in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<1.16.0,>=1.15.0->tensorflow-gpu==1.15.0) (3.7.4.3)\n", | |
"Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<1.16.0,>=1.15.0->tensorflow-gpu==1.15.0) (3.4.0)\n", | |
"Installing collected packages: tensorflow-gpu\n", | |
"Successfully installed tensorflow-gpu-1.15.0\n" | |
], | |
"name": "stdout" | |
}, | |
{ | |
"output_type": "display_data", | |
"data": { | |
"application/vnd.colab-display-data+json": { | |
"pip_warning": { | |
"packages": [ | |
"tensorflow" | |
] | |
} | |
} | |
}, | |
"metadata": { | |
"tags": [] | |
} | |
}, | |
{ | |
"output_type": "stream", | |
"text": [ | |
"Collecting utils\n", | |
" Downloading https://files.pythonhosted.org/packages/55/e6/c2d2b2703e7debc8b501caae0e6f7ead148fd0faa3c8131292a599930029/utils-1.0.1-py2.py3-none-any.whl\n", | |
"Installing collected packages: utils\n", | |
"Successfully installed utils-1.0.1\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "R-DalbHfjrZ2" | |
}, | |
"source": [ | |
"import os\n", | |
"import time\n", | |
"import utils\n", | |
"import tensorflow as tf\n", | |
"import numpy as np\n", | |
"import skimage.io as io\n", | |
"import argparse\n", | |
"\n", | |
"from tensorflow.python.framework import ops\n", | |
"ops.reset_default_graph()" | |
], | |
"execution_count": 1, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "3y4Isl-6cFn2" | |
}, | |
"source": [ | |
"#from google.colab import drive\n", | |
"#drive.mount('/content/drive')" | |
], | |
"execution_count": 2, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "4YCjOIIQcFk5", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"outputId": "def0deeb-0783-48ac-d0e8-bd97161ba5a2" | |
}, | |
"source": [ | |
"DATA_DIR = '/content/MAPNet/dataset/'\n", | |
"import os\n", | |
"# load repo with data if it is not exists\n", | |
"if not os.path.exists(DATA_DIR):\n", | |
" print('no data available')\n", | |
"else:\n", | |
" print('Done!!')" | |
], | |
"execution_count": 3, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"Done!!\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "_UmAQg4ASFP2" | |
}, | |
"source": [ | |
"# **Load data**" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "GOCrdX8ybFZ8" | |
}, | |
"source": [ | |
"#load data\n", | |
"import numpy as np\n", | |
"import glob\n", | |
"import scipy\n", | |
"import random\n", | |
"import cv2\n", | |
"import skimage.io\n", | |
"\n", | |
"def load_batch(x, y):\n", | |
" x1 = []\n", | |
" y1 = []\n", | |
" for i in range(len(x)):\n", | |
" img = skimage.io.imread(x[i])\n", | |
" lab = skimage.io.imread(y[i])\n", | |
" #ret, lab = cv2.threshold(_lab,0,1,cv2.THRESH_BINARY)\n", | |
" img, lab = data_augmentation(img, lab)\n", | |
" lab = lab.reshape(512, 512, 1)\n", | |
" x1.append(img / 255.0)\n", | |
" y1.append(lab)\n", | |
" y1 = np.array(y1).astype(np.float32)\n", | |
" return x1, y1\n", | |
"\n", | |
"\n", | |
"def prepare_data():\n", | |
" \n", | |
" img = np.array(sorted(glob.glob(rf'{DATA_DIR}train/img/*.png')))\n", | |
" label = np.array(sorted(glob.glob(rf'{DATA_DIR}train/lab/*.png')))\n", | |
" test_img = np.array(sorted(glob.glob(rf'{DATA_DIR}test/img/*.png')))\n", | |
" test_label = np.array(sorted(glob.glob(rf'{DATA_DIR}test/lab/*.png')))\n", | |
"\n", | |
" print(f\"[DEBUG] train label {label}\")\n", | |
"\n", | |
" return img, label, test_img, test_label\n", | |
"\n", | |
"\n", | |
"def data_augmentation(image, label):\n", | |
" # Data augmentation\n", | |
" if random.randint(0, 1):\n", | |
" image = np.fliplr(image)\n", | |
" label = np.fliplr(label)\n", | |
" if random.randint(0, 1):\n", | |
" image = np.flipud(image)\n", | |
" label = np.flipud(label)\n", | |
"\n", | |
" if random.randint(0,1):\n", | |
" angle = random.randint(0, 3)*90\n", | |
" if angle!=0:\n", | |
" M = cv2.getRotationMatrix2D((image.shape[1] // 2, image.shape[0] // 2), angle, 1.0)\n", | |
" image = cv2.warpAffine(image, M, (image.shape[1], image.shape[0]), flags=cv2.INTER_NEAREST)\n", | |
" label = cv2.warpAffine(label, M, (label.shape[1], label.shape[0]), flags=cv2.INTER_NEAREST)\n", | |
"\n", | |
" return image, label" | |
], | |
"execution_count": 4, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "3IUD9qj4SI9j" | |
}, | |
"source": [ | |
"# **Load MapNet**" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "m1Kfv7WxbjTU" | |
}, | |
"source": [ | |
"# mapnet\n", | |
"import tensorflow as tf\n", | |
"\n", | |
"# from keras.layers import UpSampling2D\n", | |
"\n", | |
"\n", | |
"def conv2d(input,filters,kernel_size=3,stride=1,padding='SAME'):\n", | |
" return tf.layers.conv2d(input,filters=filters,kernel_size=kernel_size,\n", | |
" padding=padding,strides=stride,use_bias=False,\n", | |
" kernel_initializer=tf.variance_scaling_initializer())\n", | |
"\n", | |
"\n", | |
"def bn(input,is_training=True):\n", | |
" return tf.layers.batch_normalization(input,momentum=0.99,epsilon=1e-3,training=is_training)\n", | |
"\n", | |
"\n", | |
"def bottleneck(x, size,is_training,downsampe=False):\n", | |
" residual = x\n", | |
" out = bn(x, is_training)\n", | |
" out = tf.nn.relu(out)\n", | |
" out = conv2d(out, size, 1, padding='VALID')\n", | |
" out = bn(out, is_training)\n", | |
" out = tf.nn.relu(out)\n", | |
" out = conv2d(out, size, 3)\n", | |
" out = bn(out, is_training)\n", | |
" out = tf.nn.relu(out)\n", | |
" out = conv2d(out, size * 4, 1, padding='VALID')\n", | |
"\n", | |
" if downsampe:\n", | |
" residual = bn(x, is_training)\n", | |
" residual = tf.nn.relu(residual)\n", | |
" residual = conv2d(residual, size * 4, 1, padding='VALID')\n", | |
" out = tf.add(out,residual)\n", | |
" return out\n", | |
"\n", | |
"\n", | |
"def resblock(x, size,is_training):\n", | |
" residual = x\n", | |
"\n", | |
" out = bn(x, is_training)\n", | |
" out = tf.nn.relu(out)\n", | |
" out = conv2d(out, size, 3)\n", | |
" out = bn(out, is_training)\n", | |
" out = tf.nn.relu(out)\n", | |
" out = conv2d(out, size, 3)\n", | |
"\n", | |
" out = tf.add(out, residual)\n", | |
" return out\n", | |
"\n", | |
"\n", | |
"def stage0(x,is_training):\n", | |
" x = bottleneck(x, 64,is_training, downsampe=True)\n", | |
" x = bottleneck(x, 64,is_training)\n", | |
" x = bottleneck(x, 64,is_training)\n", | |
" x = bottleneck(x, 64,is_training)\n", | |
" return x\n", | |
"\n", | |
"\n", | |
"def translayer(x, in_channels, out_channels,is_training):\n", | |
" num_in = len(in_channels)\n", | |
" num_out = len(out_channels)\n", | |
" out = []\n", | |
" for i in range(num_out):\n", | |
" if i < num_in:\n", | |
" residual = bn(x[i], is_training)\n", | |
" residual = tf.nn.relu(residual)\n", | |
" residual = conv2d(residual, out_channels[i], 3)\n", | |
" out.append(residual)\n", | |
" else:\n", | |
" residual = bn(x[-1], is_training)\n", | |
" residual = tf.nn.relu(residual)\n", | |
" residual = conv2d(residual, out_channels[i], 3, stride=2)\n", | |
" out.append(residual)\n", | |
" return out\n", | |
"\n", | |
"\n", | |
"def convb(x, block_num, channels,is_training):\n", | |
" out = []\n", | |
" for i in range(len(channels)):\n", | |
" residual = x[i]\n", | |
" for j in range(block_num):\n", | |
" residual = resblock(residual, channels[i],is_training)\n", | |
" out.append(residual)\n", | |
" return out\n", | |
"\n", | |
"\n", | |
"def featfuse(x, channels, is_training, multi_scale_output=True):\n", | |
" out = []\n", | |
" for i in range(len(channels) if multi_scale_output else 1):\n", | |
" residual = x[i]\n", | |
" for j in range(len(channels)):\n", | |
" if j > i:\n", | |
" if multi_scale_output == False:\n", | |
" y = bn(x[j], is_training)\n", | |
" y = tf.nn.relu(y)\n", | |
" y = conv2d(y, channels[j], 1, padding='VALID')\n", | |
" out.append(tf.keras.layers.UpSampling2D(size=2 ** (j - i))(y))\n", | |
" else:\n", | |
" y = bn(x[j], is_training)\n", | |
" y = tf.nn.relu(y)\n", | |
" y = conv2d(y, channels[i], 1, padding='VALID')\n", | |
" y = tf.keras.layers.UpSampling2D(size=2 ** (j - i))(y)\n", | |
" residual = tf.add(residual, y)\n", | |
"\n", | |
" elif j < i:\n", | |
" y = x[j]\n", | |
" for k in range(i - j):\n", | |
" if k == i - j - 1:\n", | |
" y = bn(y, is_training)\n", | |
" y = tf.nn.relu(y)\n", | |
" y = conv2d(y, channels[i], 1)\n", | |
" y = tf.layers.max_pooling2d(y, 2, 2)\n", | |
"\n", | |
" else:\n", | |
" y = bn(y, is_training)\n", | |
" y = tf.nn.relu(y)\n", | |
" y = conv2d(y, channels[j], 1)\n", | |
" y = tf.layers.max_pooling2d(y, 2, 2)\n", | |
"\n", | |
" residual = tf.add(residual, y)\n", | |
" out.append(residual)\n", | |
" return out\n", | |
"\n", | |
"\n", | |
"def convblock(x, channels,is_training, multi_scale_output=True):\n", | |
" residual = convb(x, 4, channels,is_training)\n", | |
" out = featfuse(residual, channels,is_training, multi_scale_output=multi_scale_output)\n", | |
" return out\n", | |
"\n", | |
"\n", | |
"def stage(x, num_modules, channels, is_training,multi_scale_output=True):\n", | |
" out = x\n", | |
" for i in range(num_modules):\n", | |
" if i == num_modules - 1 and multi_scale_output == False:\n", | |
" out = convblock(out, channels,is_training, multi_scale_output=False)\n", | |
" else:\n", | |
" out = convblock(out, channels,is_training)\n", | |
" return out\n", | |
"\n", | |
"\n", | |
"def pyramid_pooling_block(input, bin_sizes):\n", | |
" pool_list = []\n", | |
" h = input.shape[1]\n", | |
" c = input.shape[-1]\n", | |
" for bin_size in bin_sizes:\n", | |
" pool1 = tf.layers.average_pooling2d(input, (h // bin_size, h // bin_size), (h // bin_size, h // bin_size))\n", | |
" pool1 = conv2d(pool1, int(c)//4, 1)\n", | |
" pool1 = tf.image.resize_bilinear(pool1, (h, h))\n", | |
" pool_list.append(pool1)\n", | |
" pool = tf.concat(pool_list, axis=3)\n", | |
" return tf.add(input, pool)\n", | |
"\n", | |
"\n", | |
"def spatial_pooling(input):\n", | |
" h,w=input.shape[1],input.shape[2]\n", | |
" p1=tf.image.resize_bilinear(tf.layers.max_pooling2d(input,2,2),(h,w))\n", | |
" p2 = tf.image.resize_bilinear(tf.layers.max_pooling2d(input, 3, 3), (h, w))\n", | |
" p3=tf.image.resize_bilinear(tf.layers.max_pooling2d(input,5,5),(h,w))\n", | |
" p4 = tf.image.resize_bilinear(tf.layers.max_pooling2d(input, 6, 6), (h, w))\n", | |
" p=tf.concat([p1,p2,p3,p4,input],axis=-1)\n", | |
" return p\n", | |
"\n", | |
"\n", | |
"def channel_squeeze(input,filters,name=\" \"):\n", | |
" with tf.name_scope(name):\n", | |
" squeeze=tf.reduce_mean(input,axis=[1,2])\n", | |
" with tf.name_scope(name+\"fc1\"):\n", | |
" fc1=tf.layers.dense(squeeze,use_bias=True,units=filters)\n", | |
" fc1=tf.nn.relu(fc1)\n", | |
" with tf.name_scope(name+\"fc2\"):\n", | |
" fc2=tf.layers.dense(fc1,use_bias=True,units=filters)\n", | |
" fc2=tf.nn.sigmoid(fc2)\n", | |
" result=tf.reshape(fc2,[-1,1,1,filters])\n", | |
" return input*result\n", | |
"\n", | |
"\n", | |
"def mapnet(input, is_training=True):\n", | |
" channels_s2 = [64, 128]\n", | |
" channels_s3 = [64, 128, 256]\n", | |
" num_modules_s2 = 2\n", | |
" num_modules_s3 = 3\n", | |
"\n", | |
" conv_1 = conv2d(input, 64, stride=2)\n", | |
" conv_1 = bn(conv_1, is_training)\n", | |
" conv_1 = tf.nn.relu(conv_1)\n", | |
" conv_2 = conv2d(conv_1, 64)\n", | |
" conv_2 = bn(conv_2, is_training)\n", | |
" conv_2 = tf.nn.relu(conv_2)\n", | |
" conv_3 = conv2d(conv_2, 64)\n", | |
" conv_3 = bn(conv_3, is_training)\n", | |
" conv_3 = tf.nn.relu(conv_3)\n", | |
" conv_4 = tf.layers.max_pooling2d(conv_3, 2, 2)\n", | |
"\n", | |
" stage1 = stage0(conv_4,is_training)\n", | |
" trans1 = translayer([stage1], [256], channels_s2,is_training)\n", | |
" stage2 = stage(trans1, num_modules_s2, channels_s2,is_training)\n", | |
" trans2 = translayer(stage2, channels_s2, channels_s3,is_training)\n", | |
" stage3 = stage(trans2, num_modules_s3, channels_s3,is_training,multi_scale_output=False)\n", | |
"\n", | |
" stg3=tf.concat(stage3,axis=-1)\n", | |
" squeeze=channel_squeeze(stg3, stg3.shape[-1], name=\"squeeze\")\n", | |
"\n", | |
" spatial=tf.concat([stage3[0],stage3[1]],axis=-1)\n", | |
" # spatial=pyramid_pooling_block(spatial, [1, 2, 4, 8])\n", | |
" spatial=spatial_pooling(spatial)\n", | |
"\n", | |
" new_feature = tf.concat([spatial, squeeze], axis=-1)\n", | |
" new_feature = bn(new_feature, is_training)\n", | |
" new_feature = tf.nn.relu(new_feature)\n", | |
" result=conv2d(new_feature, 128, 1, padding='SAME')\n", | |
"\n", | |
" up1=tf.image.resize_bilinear(result,size=(stage3[0].shape[1]*2,stage3[0].shape[2]*2))\n", | |
" up1 = bn(up1, is_training)\n", | |
" up1 = tf.nn.relu(up1)\n", | |
" up1 = conv2d(up1, 64, 3)\n", | |
"\n", | |
" up2 = tf.image.resize_bilinear(up1, size=(up1.shape[1]*2, up1.shape[2]*2))\n", | |
" up2 = bn(up2, is_training)\n", | |
" up2 = tf.nn.relu(up2)\n", | |
" up2 = conv2d(up2, 32, 3)\n", | |
"\n", | |
" up2 = bn(up2, is_training)\n", | |
" up2 = tf.nn.relu(up2)\n", | |
" final = conv2d(up2, 1, 1, padding='valid')\n", | |
"\n", | |
" return final" | |
], | |
"execution_count": 5, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "UFEOU_1YSW4P" | |
}, | |
"source": [ | |
"# **Train**" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "RF81kkV8by-h", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"outputId": "525f37be-3681-4d9e-abe3-b2777b0aae93" | |
}, | |
"source": [ | |
"parser = argparse.ArgumentParser()\n", | |
"parser.add_argument('--batch_size', type=int, default=4, help='Number of images in each batch')\n", | |
"parser.add_argument('--learning_rate', type=float, default=0.001, help='Number of images in each batch')\n", | |
"parser.add_argument('--crop_height', type=int, default=512, help='Height of cropped input image to network')\n", | |
"parser.add_argument('--crop_width', type=int, default=512, help='Width of cropped input image to network')\n", | |
"parser.add_argument('--clip_size', type=int, default=450, help='Width of cropped input image to network')\n", | |
"parser.add_argument('--num_epochs', type=int, default=200, help='Number of epochs to train for')\n", | |
"parser.add_argument('--h_flip', type=bool, default=True, help='Whether to randomly flip the image horizontally for data augmentation')\n", | |
"parser.add_argument('--v_flip', type=bool, default=True, help='Whether to randomly flip the image vertically for data augmentation')\n", | |
"parser.add_argument('--color', type=bool, default=True, help='Whether to randomly flip the image vertically for data augmentation')\n", | |
"parser.add_argument('--rotation', type=bool, default=True, help='randomly rotate, the imagemax rotation angle in degrees.')\n", | |
"parser.add_argument('--start_valid', type=int, default=20, help='Number of epoch to valid')\n", | |
"parser.add_argument('--valid_step', type=int, default=1, help=\"Number of step to validation\")\n", | |
"parser.add_argument('-f')\n", | |
"\n", | |
"args = parser.parse_args()\n", | |
"num_images=[]\n", | |
"train_img, train_label,valid_img,valid_lab= prepare_data()\n", | |
"num_batches=len(train_img)//(args.batch_size)" | |
], | |
"execution_count": 6, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"[DEBUG] train label ['/content/MAPNet/dataset/train/lab/7000000.png'\n", | |
" '/content/MAPNet/dataset/train/lab/7000001.png'\n", | |
" '/content/MAPNet/dataset/train/lab/7000002.png'\n", | |
" '/content/MAPNet/dataset/train/lab/7000003.png'\n", | |
" '/content/MAPNet/dataset/train/lab/7000006.png'\n", | |
" '/content/MAPNet/dataset/train/lab/7000007.png']\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "j0EaxcHnkB7z", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"outputId": "38319ec3-08e0-40e7-bf4f-fa8eb4193683" | |
}, | |
"source": [ | |
"import tensorflow.compat.v1 as tf\n", | |
"tf.disable_v2_behavior()" | |
], | |
"execution_count": 7, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/compat/v2_compat.py:68: disable_resource_variables (from tensorflow.python.ops.variable_scope) is deprecated and will be removed in a future version.\n", | |
"Instructions for updating:\n", | |
"non-resource variables are not supported in the long term\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "OEdHFizTg9KZ", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"outputId": "4de1ff73-46de-4dfc-daf3-dcf775c50dd4" | |
}, | |
"source": [ | |
"img=tf.placeholder(tf.float32,[None,args.crop_height,args.crop_width,3])\n", | |
"is_training=tf.placeholder(tf.bool)\n", | |
"label=tf.placeholder(tf.float32,[None,args.crop_height,args.crop_height,1])\n", | |
"\n", | |
"pred=mapnet(img,is_training)\n", | |
"pred1=tf.nn.sigmoid(pred)\n", | |
"\n", | |
"update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n", | |
"with tf.control_dependencies(update_ops):\n", | |
"\n", | |
" sig=tf.nn.sigmoid_cross_entropy_with_logits(labels=label, logits=pred)\n", | |
" sigmoid_cross_entropy_loss = tf.reduce_mean(sig)\n", | |
" train_step = tf.train.AdamOptimizer(args.learning_rate).minimize(sigmoid_cross_entropy_loss)\n", | |
"saver=tf.train.Saver(var_list=tf.global_variables())" | |
], | |
"execution_count": 8, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"WARNING:tensorflow:From <ipython-input-5-474ff2aaaabc>:10: conv2d (from tensorflow.python.layers.convolutional) is deprecated and will be removed in a future version.\n", | |
"Instructions for updating:\n", | |
"Use `tf.keras.layers.Conv2D` instead.\n", | |
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/layers/convolutional.py:424: Layer.apply (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n", | |
"Instructions for updating:\n", | |
"Please use `layer.__call__` method instead.\n", | |
"WARNING:tensorflow:From <ipython-input-5-474ff2aaaabc>:14: batch_normalization (from tensorflow.python.layers.normalization) is deprecated and will be removed in a future version.\n", | |
"Instructions for updating:\n", | |
"Use keras.layers.BatchNormalization instead. In particular, `tf.control_dependencies(tf.GraphKeys.UPDATE_OPS)` should not be used (consult the `tf.keras.layers.batch_normalization` documentation).\n", | |
"WARNING:tensorflow:From <ipython-input-5-474ff2aaaabc>:192: max_pooling2d (from tensorflow.python.layers.pooling) is deprecated and will be removed in a future version.\n", | |
"Instructions for updating:\n", | |
"Use keras.layers.MaxPooling2D instead.\n", | |
"WARNING:tensorflow:From <ipython-input-5-474ff2aaaabc>:168: dense (from tensorflow.python.layers.core) is deprecated and will be removed in a future version.\n", | |
"Instructions for updating:\n", | |
"Use keras.layers.Dense instead.\n", | |
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/ops/nn_impl.py:183: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n", | |
"Instructions for updating:\n", | |
"Use tf.where in 2.0, which has the same broadcast rule as np.where\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "Gfv4V3AOxvgr" | |
}, | |
"source": [ | |
"history = {\n", | |
" 'train': {\n", | |
" \"iter\" : [],\n", | |
" \"iou\" : [],\n", | |
" \"loss\" : []\n", | |
" },\n", | |
" 'val': {\n", | |
" \"iter\" : [],\n", | |
" \"loss\" : [], \n", | |
" \"iou\" : []\n", | |
" }\n", | |
"}\n", | |
"\n", | |
"CHECKPOINTS_DIR = './'\n", | |
"PRINT_EVERY = 3 # epochs" | |
], | |
"execution_count": 15, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "fbvtMhxJd1ec" | |
}, | |
"source": [ | |
"from tqdm import tqdm\n", | |
"\n", | |
"def load():\n", | |
" import re\n", | |
" print(\"[INFO] Reading checkpoints dir...\")\n", | |
" checkpoint_dir = CHECKPOINTS_DIR\n", | |
" \n", | |
" ckpt = tf.train.get_checkpoint_state(checkpoint_dir)\n", | |
" if ckpt and ckpt.model_checkpoint_path:\n", | |
" ckpt_name = os.path.basename(ckpt.model_checkpoint_path)\n", | |
" saver.restore(sess, os.path.join(checkpoint_dir, ckpt_name))\n", | |
" counter = int(next(re.finditer(\"(\\d+)(?!.*\\d)\",ckpt_name)).group(0))\n", | |
" print(\"[INFO] Checkpoint {} read successed\".format(ckpt_name))\n", | |
" return True, counter\n", | |
" else:\n", | |
" print(\"[INFO] Checkpoint not found\")\n", | |
" return False, 0\n", | |
"\n", | |
"def train():\n", | |
"\n", | |
" tf.global_variables_initializer().run()\n", | |
"\n", | |
" could_load, checkpoint_counter = load()\n", | |
" if could_load:\n", | |
" start_epoch = (int)(checkpoint_counter / num_batches)\n", | |
" start_batch_id = checkpoint_counter - start_epoch * num_batches\n", | |
" counter = checkpoint_counter\n", | |
" print(\"[INFO] Checkpoint Load Success!\")\n", | |
"\n", | |
" else:\n", | |
" start_epoch = 0\n", | |
" start_batch_id = 0\n", | |
" counter = 1\n", | |
" print(\"[INFO] Checkpoint load failed. Training from scratch...\")\n", | |
"\n", | |
" train_iter=[]\n", | |
" train_loss=[]\n", | |
" IOU=0.65\n", | |
"\n", | |
" print(\"==================================================================\")\n", | |
" print(\"[INFO] GENERAL INFORMATION\")\n", | |
" print(\"==================================================================\")\n", | |
" # utils.count_params()\n", | |
" print(\"Total train image:{}\".format(len(train_img)))\n", | |
" print(\"Total validate image:{}\".format(len(valid_img)))\n", | |
" print(\"Total epoch:{}\".format(args.num_epochs))\n", | |
" print(\"Batch size:{}\".format(args.batch_size))\n", | |
" print(\"Learning rate:{}\".format(args.learning_rate))\n", | |
" #print(\"Checkpoint step:{}\".format(args.checkpoint_step))\n", | |
"\n", | |
" print(\"==================================================================\")\n", | |
" print(\"[INFO] DATA AUGMENTATION\")\n", | |
" print(\"==================================================================\")\n", | |
" print(\"h_flip: {}\".format(args.h_flip))\n", | |
" print(\"v_flip: {}\".format(args.v_flip))\n", | |
" print(\"rotate: {}\".format(args.rotation))\n", | |
" print(\"clip size: {}\".format(args.clip_size))\n", | |
"\n", | |
" print(\"==================================================================\")\n", | |
" print(\"[INFO] TRAINING STARTED\")\n", | |
" print(\"==================================================================\")\n", | |
"\n", | |
" loss_tmp = []\n", | |
" \n", | |
" # -----------------------------------------------------------------------------------\n", | |
" # beg: epoch\n", | |
" # -----------------------------------------------------------------------------------\n", | |
" args.num_epochs = 10\n", | |
" args.start_valid = 0\n", | |
" for i in range(start_epoch, args.num_epochs):\n", | |
" \n", | |
"\n", | |
" # -------------------------------------------------------------------------------------------------------\n", | |
" # beg: batches\n", | |
" # -------------------------------------------------------------------------------------------------------\n", | |
" epoch_time=time.time()\n", | |
" id_list = np.random.permutation(len(train_img))\n", | |
" batch_pbar = tqdm(range(start_batch_id, num_batches), desc=f\"[TRAIN] Epoch {i}\")\n", | |
" for j in batch_pbar:\n", | |
"\n", | |
" img_d = []\n", | |
" lab_d = []\n", | |
" for ind in range(args.batch_size):\n", | |
" id = id_list[j * args.batch_size + ind]\n", | |
" img_d.append(train_img[id])\n", | |
" lab_d.append(train_label[id])\n", | |
" x_batch, y_batch = load_batch(img_d, lab_d)\n", | |
" # print(f\"[DEBUG] {x_batch[0].shape} {y_batch[0].shape}\")\n", | |
" # (512, 512, 3) (512, 512, 1)\n", | |
"\n", | |
" feed_dict = {img: x_batch,\n", | |
" label: y_batch,\n", | |
" is_training:True}\n", | |
"\n", | |
" _, loss, pred1 = sess.run([train_step, sigmoid_cross_entropy_loss, pred], feed_dict=feed_dict)\n", | |
"\n", | |
" loss_tmp.append(loss)\n", | |
" if (j == num_batches-1):\n", | |
" tmp = np.median(loss_tmp)\n", | |
" history['val']['iter'].appned(i)\n", | |
" history['val']['iou'].append(0.2)\n", | |
" history['val']['loss'].append(tmp)\n", | |
" #train_iter.append(counter)\n", | |
" #train_loss.append(tmp)\n", | |
" #print('Epoch', i, '|Iter', counter, '|Loss', tmp)\n", | |
" batch_pbar.set_description(f\"[TRAIN] Epoch {i} --- Iter {counter} --- Loss {tmp}\")\n", | |
" loss_tmp.clear()\n", | |
"\n", | |
" counter += 1\n", | |
" start_batch_id = 0\n", | |
" # print(f'[DEBUG] Time taken for epoch {i}: {time.time() - epoch_time:.3f} seconds')\n", | |
" # saver.save(sess, './ckeckpoint_10epoch_new/model.ckpt', global_step=counter)\n", | |
" # -------------------------------------------------------------------------------------------------------\n", | |
" # end: batches\n", | |
" # -------------------------------------------------------------------------------------------------------\n", | |
"\n", | |
"\n", | |
" # -------------------------------------------------------------------------------------------------------\n", | |
" # beg: val for epoch\n", | |
" # -------------------------------------------------------------------------------------------------------\n", | |
" if (i>args.start_valid):\n", | |
" if (i-args.start_valid)%args.valid_step==0:\n", | |
" val_iou, val_loss = validation()\n", | |
" #print(f\"[INFO] current val loss: {val_loss}\")\n", | |
" #print(f\"[INFO] last iou valu: {IOU}\")\n", | |
" #print(f\"[INFO] new_iou value: {val_iou}\")\n", | |
" history['val']['iter'].appned(i)\n", | |
" history['val']['iou'].append(val_iou)\n", | |
" history['val']['loss'].append(val_loss)\n", | |
" # saving best model based on best IOU score.\n", | |
" # Can do based on best val_loss instead too!\n", | |
" if val_iou > IOU:\n", | |
" print(f\"[INFO] Saving best model as checkpoint... val_iou: {val_iou}\")\n", | |
" saver.save(sess, f'{CHECKPOINTS_DIR}model.ckpt', global_step=counter, write_meta_graph=True)\n", | |
" IOU = val_iou\n", | |
" # -------------------------------------------------------------------------------------------------------\n", | |
" # end: val for epoch\n", | |
" # -------------------------------------------------------------------------------------------------------\n", | |
"\n", | |
"\n", | |
"\n", | |
" # -----------------------------------------------------------------------------------\n", | |
" # end: epoch\n", | |
" # -----------------------------------------------------------------------------------\n", | |
" saver.save(sess, f'{CHECKPOINTS_DIR}model.ckpt', global_step=counter)\n", | |
"\n", | |
"\n", | |
"\n", | |
"def f_iou(predict, label):\n", | |
"\n", | |
" tp = np.sum(np.logical_and(predict == 1, label == 1))\n", | |
" fp = np.sum(predict==1)\n", | |
" fn = np.sum(label == 1)\n", | |
" return tp,fp+fn-tp\n", | |
"\n", | |
"\n", | |
"\n", | |
"def validation():\n", | |
"\n", | |
" #print(\"[INFO] Validating ...\")\n", | |
" inter=0\n", | |
" unin=0\n", | |
" loss_accumulator = []\n", | |
"\n", | |
" batch_pbar = tqdm(range(0,len(valid_img)), desc=f\"Validating -- \")\n", | |
" for j in batch_pbar:\n", | |
" x_batch = valid_img[j]\n", | |
" x_batch = io.imread(x_batch) / 255.0\n", | |
" x_batch = np.expand_dims(x_batch, axis=0)\n", | |
" y_actual_batch = np.expand_dims(io.imread(valid_lab[j]), axis=0)\n", | |
" # print(f\"[DEBUG] {x_batch.shape} {y_actual_batch.shape}\")\n", | |
" # (1, 512, 512, 3) (1, 512, 512) \n", | |
" y_actual_batch = np.expand_dims(y_actual_batch, axis=-1) \n", | |
" # (1, 512, 512) > (1, 512, 512, 1)\n", | |
"\n", | |
" feed_dict = {img: x_batch,\n", | |
" label: y_actual_batch,\n", | |
" is_training:False}\n", | |
"\n", | |
" #predict = sess.run(pred1, feed_dict=feed_dict)\n", | |
" _, loss, predict = sess.run([train_step, sigmoid_cross_entropy_loss, pred1], feed_dict=feed_dict)\n", | |
" loss_accumulator.append(loss)\n", | |
" \n", | |
" predict[predict < 0.5] = 0\n", | |
" predict[predict >= 0.5] = 1\n", | |
" result = np.squeeze(predict)\n", | |
" gt_value=io.imread(valid_lab[j])\n", | |
" intr,unn=f_iou(gt_value,result)\n", | |
"\n", | |
" inter=inter+intr\n", | |
" unin=unin+unn\n", | |
"\n", | |
" if j == (len(valid_img) - 1):\n", | |
" batch_pbar.set_description(f\"[VALID] --- Loss {np.median(loss_accumulator):.4f} --- IOU {(inter*1.0)/(unin+1e-10):.4f}\")\n", | |
"\n", | |
" iou = (inter*1.0)/(unin+1e-10)\n", | |
" loss = np.median(loss_accumulator)\n", | |
" return iou, loss\n" | |
], | |
"execution_count": 16, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "qH24Mi-ceC67", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"outputId": "45eca95c-953e-43e2-d3b3-437fd1dca060" | |
}, | |
"source": [ | |
"with tf.Session() as sess:\n", | |
" train()" | |
], | |
"execution_count": 18, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"text": [ | |
"[INFO] Reading checkpoints dir...\n", | |
"INFO:tensorflow:Restoring parameters from ./model.ckpt-11\n", | |
"[INFO] Checkpoint model.ckpt-11 read successed\n", | |
"[INFO] Checkpoint Load Success!\n", | |
"==================================================================\n", | |
"[INFO] GENERAL INFORMATION\n", | |
"==================================================================\n", | |
"Total train image:6\n", | |
"Total validate image:5\n", | |
"Total epoch:10\n", | |
"Batch size:4\n", | |
"Learning rate:0.001\n", | |
"==================================================================\n", | |
"[INFO] DATA AUGMENTATION\n", | |
"==================================================================\n", | |
"h_flip: True\n", | |
"v_flip: True\n", | |
"rotate: True\n", | |
"clip size: 450\n", | |
"==================================================================\n", | |
"[INFO] TRAINING STARTED\n", | |
"==================================================================\n" | |
], | |
"name": "stdout" | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "zR4gtIDtz6Iq" | |
}, | |
"source": [ | |
"import matplotlib.pyplot as plt\n", | |
"\n", | |
"def plot_curves():\n", | |
" fig = plt.figure(figsize=(10,7))\n", | |
" \n", | |
" plt.plot(history['val']['loss'], label='Val Loss')\n", | |
" plt.plot(history['train']['loss'], label='Train Loss')\n", | |
"\n", | |
" plt.xlabel(\"Loss\")\n", | |
" plt.ylabel(\"Epoch Number\")\n", | |
" plt.legend()\n", | |
" plt.grid()\n", | |
" plt.show()" | |
], | |
"execution_count": 13, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 442 | |
}, | |
"id": "D8DwQ4HH4b-i", | |
"outputId": "7708037c-7943-4cb1-f33d-ebe54f8e219b" | |
}, | |
"source": [ | |
"plot_curves()" | |
], | |
"execution_count": 14, | |
"outputs": [ | |
{ | |
"output_type": "display_data", | |
"data": { | |
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAmEAAAGpCAYAAADFpuEPAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdd3gVVeLG8e9JJxVCQiCk0HsLBBI62BURRKUr2FdFLOu6lv0puuu6u3YUC65dMIqCgoK4KpHee++E0GtICCHt/P5IXFkMECA3c2/yfp5nHnPvnXvnDQPmzcyZM8Zai4iIiIiULy+nA4iIiIhURiphIiIiIg5QCRMRERFxgEqYiIiIiANUwkREREQc4ON0gPMVERFh69Sp49JtHD9+nKCgIJduQ1xL+9DzaR96Pu1Dz6b9VzaWLFly0FobWdJrHlfC6tSpw+LFi126jdTUVHr06OHSbYhraR96Pu1Dz6d96Nm0/8qGMWbHmV7T6UgRERERB6iEiYiIiDhAJUxERETEAR43JkxEREQuXl5eHunp6eTk5JT4elhYGOvWrSvnVJ4rICCAmJgYfH19S/0elTAREZFKKD09nZCQEOrUqYMx5nevZ2ZmEhIS4kAyz2Ot5dChQ6Snp1O3bt1Sv0+nI0VERCqhnJwcqlevXmIBk/NjjKF69epnPKp4JiphIiIilZQKWNm5kD9LlTARERERB6iEiYiISLnr2bMn06dP/5/nXn31Ve65554zvqdHjx4lTth+pufdnUqYiIiIlLtBgwaRkpLyP8+lpKQwaNAghxKVP5UwERERKXc33ngj3333Hbm5uQBs376d3bt307VrV+655x4SExNp3rw5Tz/99AV9/uHDh+nbty+tWrUiOTmZlStXAvDLL7/Qpk0b2rRpQ0JCApmZmezZs4du3brRpk0bWrRowaxZs8rs+zwbTVEhIiJSyT0zZQ1rdx/7n+cKCgrw9va+4M9sFh3K072bn/H18PBwOnTowLRp0+jTpw8pKSn0798fYwzPPfcc4eHhFBQUcOmll7Jy5UpatWp1Xtt/+umnSUhI4Ouvv+bnn3/mlltuYfny5bz44ouMGTOGzp07k5WVRUBAAGPHjuXKK6/kySefpKCggOzs7Av+vs+HjoSJiIiII049JXnqqcgvvviCtm3bkpCQwJo1a1i7du15f/bs2bO5+eabAbjkkks4dOgQx44do3Pnzjz88MOMHj2ao0eP4uPjQ/v27fnggw8YNWoUq1atKrf50XQkTEREpJIr6YhVeUzW2qdPHx566CGWLl1KdnY27dq1Y9u2bbz44ossWrSIatWqMXz48POef+tsHnvsMXr16sXUqVPp3Lkz06dPp1u3bsycOZPvvvuO4cOH8/DDD3PLLbeU2TbPREfCTncyk6CsbVBY4HQSERGRCi04OJiePXty2223/fco2LFjxwgKCiIsLIx9+/Yxbdq0C/rsrl27Mm7cOABSU1OJiIggNDSULVu20LJlS/785z/Tvn171q9fz44dO4iKiuLOO+/kjjvuYOnSpWX2PZ6NjoSdbmsq7Rc/CKuegtgOEJsMcclQux34BTqdTkREpEIZNGgQ119//X9PS7Zu3ZqEhASaNGlCbGwsnTt3LtXn9OrV67/3bezYsSPvvPMOt912G61atSIwMJCPPvoIKJoGY8aMGXh5edG8eXOuvvpqUlJSeOGFF/D19SU4OJiPP/7YNd/saYy1tlw2VFYSExOtS+cCyTrAum/H0DToKOxcAPuLz0N7+UCt1hDXEWKTiopZcA3X5ZCLkpqaSo8ePZyOIRdB+9DzaR+6t3Xr1tG0adMzvq57R56/kv5MjTFLrLWJJa2vI2GnC45kX80eNP31fxzZhyF9EaTNL1oWvgvz3ih6Lbx+URmLSy4qZ9UbgG4BISIiIqWgEnYugeHQ6MqiBSD/JOxZAWnzIG0BbJgGy8cVr1v9t9OXcclQqw34+DmXXURERNyWStj58vEvHivWAToD1sLBTbCz+EhZ2jzY8F3xugEQ3fa3I2Wx7aFKNUfji4iIiHtQCbtYxkBko6KlbfHlrFn7iwrZzgVFpWzuaJj9ctFrNZoVlbJfj5hVjdMpTBERkUpIJcwVgmtAs+uKFoDcbNi1pLiYzYdVX8Li94teC4mGuKTfBvxHtQBv7RYREZGKTj/ty4NfINTtWrRA0Rxk+9f+Ntg/bT6smVS8bjDEtP9tXFntRPAPdi67iIiIuIRKmBO8vKFmy6Klw51Fzx3d+dvpy7QFkPoPwIIpXjeu429HzEJqOhpfRETkYh06dIhLL70UgL179+Lt7U1kZCQACxcuxM/vzBe2LV68mI8//pjRo0eXent16tRh8eLFREREXFzwMqQS5i6qxhYtLW8sepyT8b9TYyz5EBa8VfRatTqnXIXZESIagZdufiAiIp6jevXqLF++HIBRo0YRHBzMI4888t/X8/Pz8fEpuaYkJiaSmFji1FseRSXMXQWEQYPLihaAgjzYs7LoSNnO+bDlJ1iZUrxu1eLB/sVHyqITwDfAuewiIiIXYPjw4QQEBLBs2TI6d+7MwIEDeeCBB8jJyaFKlSp88MEHNG7cmNTUVF588UW+/fZbRo0aRVpaGlu3biUtLY0HH3yQkSNHlmp727dv57bbbuPgwYNERkbywQcfEBcXx4QJE3jmmWfw9vYmLCyMmTNnsmbNGm699VZyc3MpLCzkq6++omHDhhf1/aqEeQpvX4hpV7QwomhqjMNbf5sWY+cC2Ph98bp+RUXs1KswA8MdjS8iIm5s2mOwd9X/PFWlIP/iLhSr2RKu/sd5vy09PZ25c+fi7e3NsWPHmDVrFj4+Pvz444888cQTfPXVV797z/r165kxYwaZmZk0btyYe+6557+3MDqb+++/n2HDhjFs2DDef/99Ro4cyddff82zzz7L9OnTqV27NkePHgXg7bff5oEHHmDIkCHk5uZSUHDx95hWCfNUxkD1+kVLwpCi544fLB5XVnwKc96bMOe1otciGv82piwuGarV1dQYIiLidm666Sa8vb0ByMjIYNiwYWzatAljDHl5eSW+p1evXvj7++Pv70+NGjXYt28fMTEx59zWvHnzmDhxIgA333wzjz76KACdO3dm+PDh9O/fn379+gFF96N87rnnSE9Pp1+/fhd9FAxUwiqWoAho0qtoAcg7AbuX/TbYf+03sLT4pqQd7oJrXnAuq4iIuI8SjlidcOjekUFBQf/9+v/+7//o2bMnkyZNYvv27We8F6m/v/9/v/b29iY/P/+iMrz99tssWLCA7777jnbt2rFkyRIGDx5MUlIS3333Hddccw3vvPMOl1xyyUVtRyWsIvOtAvGdihaAwkI4sL5oTFn1i2/wIiIirpSRkUHt2rUB+PDDD8v88zt16kRKSgo333wz48aNo2vXoqmktmzZQlJSEklJSUybNo2dO3eSkZFBvXr1GDlyJGlpaaxcuVIlTM6DlxdENStaRERE3Nyjjz7KsGHD+Nvf/kavXr0u+vNatWqFV/FsAv379+f111/n1ltv5YUXXvjvwHyAP/3pT2zatAlrLZdeeimtW7fmn//8J5988gm+vr7UrFmTJ5544qLzGGvtRX9IeUpMTLSLFy926TZSU1PPeMhTPIP2oefTPvR82ofubd26dTRt2vSMr2c6dDrSk5X0Z2qMWWKtLXE+DU0uJSIiIuIAlTARERERB6iEiYiIVFKeNiTJnV3In6VKmIiISCUUEBDAoUOHVMTKgLWWQ4cOERBwfnercenVkcaYq4DXAG/g39baf5z2+itAz+KHgUANa21VV2YSERERiImJIT09nQMHDpT4ek5OznmXisosICCgVBPEnsplJcwY4w2MAS4H0oFFxpjJ1tq1v65jrX3olPXvBxJclUdERER+4+vrS926dc/4empqKgkJ+rHsSq48HdkB2Gyt3WqtzQVSgD5nWX8Q8JkL84iIiIi4DVeejqwN7DzlcTqQVNKKxph4oC7w8xlevwu4CyAqKorU1NQyDXq6rKwsl29DXEv70PNpH3o+7UPPpv3neu4yY/5A4EtrbYm3JLfWjgXGQtFkra6e/E8TDHo+7UPPp33o+bQPPZv2n+u58nTkLiD2lMcxxc+VZCA6FSkiIiKViCtL2CKgoTGmrjHGj6KiNfn0lYwxTYBqwDwXZhERERFxKy4rYdbafGAEMB1YB3xhrV1jjHnWGHPdKasOBFKsJioRERGRSsSlY8KstVOBqac999Rpj0e5MoOIiIiIO9KM+SIiIiIOUAkTERERcYBKmIiIiIgDVMJEREREHKASJiIiIuIAlTARERERB6iEiYiIiDhAJUxERETEASphIiIiIg5QCRMRERFxgEqYiIiIiANUwkREREQcoBImIiIi4gCVMBEREREHqISJiIiIOEAlTERERMQBKmEiIiIiDlAJExEREXGASpiIiIiIA1TCRERERBygEiYiIiLiAJUwEREREQeohImIiIg4QCVMRERExAEqYSIiIiIOUAkTERERcYBKmIiIiIgDVMJEREREHKASJiIiIuIAlTARERERB6iEiYiIiDhAJUxERETEASphIiIiIg5QCRMRERFxgEqYiIiIiANUwkREREQcoBImIiIi4gCVMBEREREHqISJiIiIOEAlTMTTnDgC4/rDlhlgrdNpRETkAqmEiXiag5thzwr4pC+8ewms+xYKC51OJSIi50klTMTTxLaHB1fCta/CicPw+RB4qyOs+BwK8p1OJyIipaQSJuKJfPwh8VYYsQT6/RswMOkueL0tLH4f8nKcTigiIuegEibiybx9oNVNcM9cGPgZBEXAtw/Ba61h7utwMsvphCIicgYqYSIVgZcXNLkG7vgJbvkGIhvBD3+BV5pD6j8g+7DTCUVE5DQqYSIViTFQrwcMmwK3/wjxnSD1eXi1ZVEpy9zrdEIRESmmEiZSUcW2h0GfFZ2qbHw1zBsDr7YqOl15ZLvT6UREKj2VMJGKLqo53PBvuH8JtBkEyz6F0W1h4l2wf53T6UREKi2VMJHKIrwe9H4NHlgByffAuinwZjKkDIFdS5xOJyJS6aiEiVQ2odFw5XPw4Gro/mfYPqto0teP+8K2WZqFX0SknKiEiVRWQdWh5xNFZezyZ2HfGvjoWnjvCtjwvcqYiIiLqYSJVHYBodD5gaJZ+Hu9VHQF5WcD4O0usOpLKCxwOqGISIWkEiYiRXyrQPs7YORSuP4dKMiDr26HNxJhyUeQf9LphCIiFYpKmIj8L29faD0Q7p0P/T8B/1CYMhJeawPz3oTc404nFBGpEFTCRKRkXl7Q7Dq4KxWGTiy6unL64/BKC/jlBThxxOmEIiIeTSVMRM7OGGhwKdz6Hdw2HWLaw4y/wSst4T9PQ9Z+pxOKiHgklTARKb24ZBjyBdw9CxpeDnNeK7ol0nePwNE0p9OJiHgUlTAROX+1WsFNH8CIxdDyJljyIYxOgEn3wIGNTqcTEfEIKmEicuEiGkCfN+CB5dD+TlgzCcZ0gC9ugd3LnU4nIuLWVMJE5OKFxcDV/4CHVkPXh2HLDBjbHT69AXbMdTqdiIhbUgkTkbITFAGXPlVUxi59quho2AdXw/tXwab/aBZ+EZFTqISJSNkLCIOuf4QHV8HV/4KjO2HcjfBO16JTlpqFX0REJUxEXMgvEJLuhpHLoM8YyDsBE4YXjRtb9ink5zqdUETEMSphIuJ6Pn6QMBTuWwg3fVh0i6Rv7iu6onLBO5Cb7XRCEZFypxImIuXHyxuaX180z9iQL4sG9E97tGiusVkvQU6G0wlFRMqNSpiIlD9jiiZ7vX063DoNarWGn54tuiXST8/C8YNOJxQRcTmXljBjzFXGmA3GmM3GmMfOsE5/Y8xaY8waY8x4V+YRETcU3wlunlh0j8p6PWDWy/BKC+K3pzibS0TExXxc9cHGGG9gDHA5kA4sMsZMttauPWWdhsDjQGdr7RFjTA1X5RERNxedAAM+gQMbYParFGb6Op1IRMSlXHkkrAOw2Vq71VqbC6QAfU5b505gjLX2CIC1VncCFqnsIhvD9W+xM66f00lERFzKZUfCgNrAzlMepwNJp63TCMAYMwfwBkZZa78//YOMMXcBdwFERUWRmprqirz/lZWV5fJtiGtpH3o+7UPPp33o2bT/XM+VJay0228I9ABigJnGmJbW2qOnrmStHQuMBUhMTLQ9evRwaajU1FRcvQ1xLe1Dz6d96Pm0Dz2b9p/rufJ05C4g9pTHMcXPnSodmGytzbPWbgM2UlTKRERERCo0V5awRUBDY0xdY4wfMBCYfNo6X1N0FAxjTARFpye3ujCTiIiIiFtwWQmz1uYDI4DpwDrgC2vtGmPMs8aY64pXmw4cMsasBWYAf7LWHnJVJhERERF34dIxYdbaqcDU05576pSvLfBw8SIiIiJSaWjGfBEREREHqISJiIiIOEAlTERERMQBKmEiIiIiDlAJExEREXGASpiIiIiIA1TCRERERBygEiYiIiLiAJUwEREREQeohImIiIg4QCVMRERExAEqYSIiIiIOUAkTERERcYBKmIiIiIgDVMJEREREHKASJiIiIuIAlTARERERB6iEiYiIiDhAJUxERETEASphIiIiIg5QCStBobVORxAREZEKTiXsNAu3Heb/5pxgb0aO01FERESkAlMJO014kB8HTlge/HwZBYU6IiYiIiKuoRJ2mgY1grm5qR/ztx7mjZ83Ox1HREREKigfpwO4oy61fTjsG8lrP20kqV44yfWqOx1JREREKhgdCSuBMYa/9m1BfPUgHkhZxuHjuU5HEhERkQpGJewMgv19eGNwAkeO5/HIhBVYXTEpIiIiZUgl7CyaR4fxZK+m/Lx+P+/N3uZ0HBEREalAVMLO4ZaO8VzZPIp/fr+eFTuPOh1HREREKgiVsHMwxvCvG1pTIySAEZ8t5VhOntORREREpAJQCSuFsEBfRg9qw+6jOTw+cZXGh4mIiMhFUwkrpXbx4Tx8eSO+W7mHlEU7nY4jIiIiHk4l7Dzc070+XRtGMGryGjbszXQ6joiIiHiws5YwY4y3Meah8grj7ry8DC/3b0NIgC8jxi/lRG6B05FERETEQ521hFlrC4BB5ZTFI0SG+PPqgDZsPpDFqMlrnI4jIiIiHqo0pyPnGGPeMMZ0Nca0/XVxeTI31qVhBPf2qM/ni3fyzfJdTscRERERD1Sae0e2Kf7vs6c8Z4FLyj6O53joskYs2HqYJyauonVMVepEBDkdSURERDzIOY+EWWt7lrBU6gIG4OPtxWuDEvDx9mLEZ0s5ma/xYSIiIlJ65yxhxpgoY8x7xphpxY+bGWNud30091e7ahX+dWMrVu86xj+nbXA6joiIiHiQ0owJ+xCYDkQXP94IPOiqQJ7myuY1Gd6pDu/P2caPa/c5HUdEREQ8RGlKWIS19gugEMBamw/o3NspHr+mCc2jQ3nkyxXsPnrC6TgiIiLiAUpTwo4bY6pTNBgfY0wykOHSVB7G38ebNwa3JS+/kAdSlpFfUOh0JBEREXFzpSlhDwOTgfrGmDnAx8D9Lk3lgepGBPHc9S1ZtP0Ir/20yek4IiIi4ubOOUWFtXapMaY70BgwwAZrbZ7Lk3mgvgm1mbP5IG/M2Exyvep0bhDhdCQRERFxU6W5OjIAGAn8FXgGuK/4OSnBM32aUy8iiAc/X86BzJNOx5EKavP+LKy1TscQEZGLUJrTkR8DzYHXgTeKv/7ElaE8WaCfD28MbkvGiTz+OGEFhYX6QSlla9KydC57+Remrd7rdBQREbkIpSlhLay1t1trZxQvd1JUxOQMmtYK5alrmzFz4wHGztrqdBypQA4fz+Wv364D4ON52x3NIiIiF6c0JWxp8RWRABhjkoDFrotUMQxJiuOaljV5cfoGlqYdcTqOVBB/n7qOYyfy6JdQm/lbD7N5f6bTkURE5AKdsYQZY1YZY1YC7YC5xpjtxpjtwDwgsZzyeSxjDM/3a0XNsADuH7+MjGxdyyAXZ+6Wg3y5JJ07u9XjiV5N8fU2fDo/zelYIiJygc52JOxaoDdwFVAX6F681AWudn00zxdWxZc3Brdl37Ec/vzVSg2klguWk1fAk5NWExceyAOXNiQi2J+rW9Tiq6XpnMjV3MkiIp7ojCXMWrvj1wU4BoQB1U9ZpBTaxFbl0asa8/2avXw6f4fTccRDvTljM9sOHue561sQ4OsNwNDkeDJz8pmyYrfD6URE5EKcc54wY8xfgeHAFopnzS/+7yWui1Wx3NGlHnO3HOKv362jbXw1mkeHOR1JPMimfZm89csW+raJpmvDyP8+375ONRpFBfPpgh30bx/rYEIREbkQpRmY3x+ob63tYa3tWbyogJ0HLy/DSze1pmoVX+7/bBnHT+Y7HUk8RGGh5YlJqwjy9+Ev1zb7n9eMMQxJimdlegYr0486lFBERC5UaUrYaqCqq4NUdNWD/Xl1YBu2HTzOU9+scTqOeIjPF+9k0fYjPHF1UyKC/X/3+vVta1PF15txGqAvIuJxSlPCngeWGWOmG2Mm/7q4OlhF1Kl+BPdf0pCvlqYzcWm603HEze3PzOH5qetIqhvOTYkxJa4TGuBLnzbRfLNiFxkndAWuiIgnKU0J+wj4J/AP4KVTFrkAIy9pQIe64fzl69VsOZDldBxxY3/9dh05eYX8vV9LjDFnXG9ocjw5eYUq9iIiHqY0JSzbWju6eLb8X35dXJ6sgvLx9mL0wAT8fbwYMX4ZOXmaXkB+L3XDfqas2M19PRtQPzL4rOu2qB1G69iqjFuQpmlQREQ8SGlK2CxjzPPGmI7GmLa/Li5PVoHVDAvgpf6tWbfnGH+fus7pOOJmsnPz+cvXq6kfGcQfetQr1XuGJMWxeX8WC7YddnE6EREpK6UpYQlAMvB3fjsV+aIrQ1UGlzSJ4o4udfl43g6+X73H6TjiRl77cRPpR07wfL9W+Pt4l+o9vVtFExrgo7noREQ8yDnnCbPW9iyPIJXRo1c1YdH2wzz65UqaR4cRGx7odCRx2JrdGfx79jYGto+lQ93wUr+vip83N7aL5ZP52zmQeZLIkN9fSSkiIu7lnEfCjDFPlbSUR7iKzs/Hi9cHtcVaeCBlGXkFhU5HEgcVFFqemLiKaoG+PH510/N+/5DkOPIKLF8s3umCdCIiUtZKczry+ClLAUX3jazjwkyVSlz1QP7eryVL047y8n82Oh1HHPTxvO2sSM/g/65tRlig73m/v35kMB3rVWf8gjQKCjVAX0TE3Z2zhFlrXzpleQ7oAZRutLCUSu/W0QzqEMtbqVuYufGA03HEAbuPnuDF6Rvo1iiS61pHX/DnDE2OZ9fRE/p7JCLiAUpzJOx0gUDJM0fKBXvq2uY0igrm4S+Wsz8zx+k4Us6enryGAmt5rm+Ls84Jdi5XNI8iMsRfA/RFRDxAacaErTLGrCxe1gAbgFddH61yqeLnzZjBbck6mc9Dny/X6aRK5PvVe/nP2n08eFmji744w9fbiwGJsfy8YT/pR7LLKKGIiLhCaY6EXQv0Ll6uAKKttW+4NFUl1TAqhGeua86czYd4K3Wz03GkHGTm5DFq8hqa1Azh9i51y+QzByXFYYCUhRqgLyLizkozJmzHKcsua21+aT/cGHOVMWaDMWazMeaxEl4fbow5YIxZXrzccb7fQEXTPzGW61pH8/J/NrJouyberOhenL6BfZk5/OOGVvh6X8jogN+rXbUKlzSpQcqineTm64pbERF3dcb/6xtjMo0xx4qXzFMeZxtjzlnEjDHewBiKrqZsBgwyxjQrYdXPrbVtipd/X/B3UkEYY3ju+hbEhgcy8rNlHM3OdTqSuMiytCN8PH8HtyTH0ya2apl+9pCkeA5mneSHtXvL9HNFRKTsnLGEWWtDrLWhxUsIUAt4DtgLvFaKz+4AbLbWbrXW5gIpQJ+yCF3RhQT48vqgBA5mneSRCSt1P8AKKK+gkMcnriIqJIBHrmxc5p/frVEkMdWqMG5+Wpl/toiIlA1zrh/wxpiqwIPALcB44BVr7aFzfrAxNwJXWWvvKH58M5BkrR1xyjrDgeeBA8BG4CFr7e8Gshhj7gLuAoiKimqXkpJSqm/uQmVlZREcfPabJpeH6dvz+Gx9LkOa+HF5nfOfN6oyc5d9eCZTt+byxcY87k/wp13UOW9ccUG+3ZrLlxvz+HuXKkQHl82pzvLk7vtQzk370LNp/5WNnj17LrHWJpb02hn/72+MiQD+CAwA3gcSrLUZZZxtCvCZtfakMeZu4CPgktNXstaOBcYCJCYm2h49epRxjP+VmpqKq7dRGt2t5cDHi5mw8SCDLu9Ai9phTkfyGO6yD0uy83A2k3/6hcubRfHHASX+uywTLRJP8s3zP7HJRjG4R3OXbcdV3HkfSuloH3o27T/XO9uv4DsoOkL1AZAN3H7q/EXW2pfP8dm7gNhTHscUP3fqZ5x6RO3fwL/OHbnyMMbwwo2tuWb0LEaMX8q3I7sS7O+aoyZSPqy1PPn1aryN4ZnrXFuMIoL9uapFLb5aks6jVzahil/pbgYuctEK8uH4fvxOHoJju0978Qzz4J1xfrzzWd+Vn10Cb1/wDynduiIlONtP9BeAX89VXsjfskVAQ2NMXYrK10Bg8KkrGGNqWWv3FD+8Dlh3Adup0KoF+fHawAQGjp3Hk5NW8eqANhc1mac4a/KK3czceICnezcjumoVl29vaFIcU1bsZsrK3fRPjD33G0TKwrF0eK01nQDmOR3Ghep2h2GTnU4hHuyMJcxaO+piPtham2+MGQFMB7yB9621a4wxzwKLrbWTgZHGmOuAfOAwMPxitllRdagbzkOXNeKl/2ykc/0I+rfXD1NPdDQ7l79+u5bWMWHc0rFOuWyzQ91wGtYIZtz8HSphUn6qhEPv19iwYSONGzf67fkzjkE+w/Pns/75XsBUFp8deuG3GBOBsx8Ju2jW2qnA1NOee+qUrx8HHndlhori3p4NmLf1EE9NXk1CXFUaRukQuKf5x7T1HMnO46PbOuDtVT5HM40xDEmKY9SUtaxKz6BljMYVSjkICIV2w9mTmUrjdj2cTiPitjzvkqlKytvL8OqANgT5+TBi/DJy8gqcjiTnYQ5YhJQAACAASURBVOG2w6Qs2sntXerSPLp8i1C/djFU8fVm3ALdT1JExJ2ohHmQGqEBvNS/NRv2ZfLst2udjiOldDK/gMcnriSmWhUevKxhuW8/NMCXPm2i+Wb5bo7l5JX79kVEpGSluYG3vzFmsDHmCWPMU78u5RFOfq9H4xrc3b0e4xek8d3KPed+gzju7dStbDlwnL/1bUGgnzNXtw5JiudEXgETl6Q7sn0REfm90hwJ+4aime7zgeOnLOKQR65oTEJcVR77aiVph7KdjiNnseVAFmNmbKZ362h6NK7hWI6WMWG0jglj3II03YFBRMRNlKaExVhrB1hr/2WtfenXxeXJ5Ix8vb0YPTABY+D+z5bqJs1uylrLk5NWEeDrxVPXlnTb1PI1JDmeTfuzWLhNN4YXEXEHpSlhc40xLV2eRM5LbHgg/7qxFSvSM3hh+nqn40gJJixJZ/7Wwzx+TVMiQ/ydjkPvVtGEBvjw6QLdT1JExB2csYQZY1YZY1YCXYClxpgNxpiVpzwvDruqRS1uTo7n3Vnb+Hn9PqfjyCkOZp3k71PX0b5ONQa4yfxcVfy8uaFdDN+v3sPBrJNOxxERqfTOdiTsWqA3cDXQALii+PGvz4sbeLJXU5rWCuWRCSvZm5HjdBwp9rdv13L8ZD7P92uJVznNCVYaQ5LiySuwfLF4p9NRREQqvTOWMGvtDmvtDqAWcPiUx0eAmuUVUM4uwNebNwYnkJNXwIOfL6OgUIOunTZr0wG+Xr6be7rXp0EN95pUt0GNYJLrhTN+QZr+roiIOKw0Y8LeArJOeZxV/Jy4ifqRwTzbpwXztx7m9Z83OR2nUjuRW8CTk1ZTLyKIe3s2cDpOiYYmx5N+5AQzNx1wOoqISKVWmhJm7CnXtFtrC3Hx7Y7k/N3YLoZ+CbUZ/dMm5m895HScSmv0z5tIO5zN365vQYCvt9NxSnRFs5pEBPszbr5m0BcRcVJpSthWY8xIY4xv8fIAsNXVweT8/bVvC+pUD+KBlGUc0sDrcrd+7zHenbmVG9vF0Kl+hNNxzsjPx4sB7WP4ef1+dh094XQcEZFKqzQl7A9AJ2BX8ZIE3OXKUHJhgvx9eH1wAkey83hkwgoKNean3BQWWh6fuIrQKr48eU1Tp+Oc06AOcVggZaGmqxARcco5S5i1dr+1dqC1tkbxMthau788wsn5ax4dxl96NWXGhgO8N3ub03EqjXELdrAs7Sh/6dWUakF+Tsc5p5hqgVzSuAYpi3aSV6DJfkVEnFCae0fGGGMmGWP2Fy9fGWNiyiOcXJibk+O5snkU//x+Pct3HnU6ToW371gO//p+A50bVOf6hNpOxym1ocnxHMg8yQ9rNMeciIgTSnM68gNgMhBdvEwpfk7clDGGf93QmqjQAO7/bCnHcvKcjlShjZq8htyCQp7r2xJj3GdOsHPp1iiS2lWr8KkG6IuIOKI0JSzSWvuBtTa/ePkQiHRxLrlIYYG+jB7Uht1Hc3h84irdtNlFfly7j2mr9zLy0obUiQhyOs558fYyDE6KY97WQ2zen3XuN4iISJkqTQk7ZIwZaozxLl6GApoDwQO0iw/nj1c04ruVe/hsoWZIL2vHT+bz1DeraRQVzJ1d6zkd54L0T4zF19swXveTFBEpd6UpYbcB/YG9xcuNwK2uDCVl5w/d6tO1YQTPTFnD+r3HnI5Tobz0w0Z2Z+TwfL+W+PmU5p+S+4kM8efK5jX5cslOTuQWOB1HRKRSKc3VkTustddZayOLl77WWv3a7CG8vAwv929DaBVfRoxfRnZuvtORKoRV6Rl8OHcbQ5LiaBcf7nScizI0OZ5jOfl8u3K301FERCqV0lwdWc8YM8UYc6D46shvjDGeee6lkooM8efVAW3YciCLUZPXOB3H4+UXFPLYxJVUD/bn0auaOB3noiXVDadBjWA+1SlJEZFyVZpzKOOBLyi6kXc0MAH4zJWhpOx1bhDBfT0a8MXidL5ZvsvpOB7tw7nbWbP7GKN6Nyesiq/TcS6aMYYhSXGs2HmU1bsynI4jIlJplKaEBVprPznl6shPgQBXB5Oy9+BlDWlfpxpPTFylq+EuUPqRbF76YSOXNqnBNS1rOh2nzPRrG0MVX2/GLdB0FSIi5aU0JWyaMeYxY0wdY0y8MeZRYKoxJtwY49mDYSoZH28vXhuYgJ+PF33emM0n87br1kbnwVrLU9+swRh4tm8Lj5oT7FzCqvhyXetovl62W/PKiYiUk9KUsP7A3cAMIBW4BxgILAEWuyyZuER01SpMHtGFtvHV+L9v1jBw7Hy2HtBRsdKYumovP6/fz8OXN6J21SpOxylzQ5LjOJFXwKSlOl0tIlIeSnN1ZN2zLBqg74FiwwP5+LYOvHBjK9bvPcbVr83i7V+2kK97CJ5Rxok8Rk1ZQ4vaoQzvVMfpOC7RKqYqrWLCGLdghyb3FREpB2csYcWnHX/9+qbTXvu7K0OJ6xljuCkxlh8f7k6PxpH8Y9p6rn9zLuv2aC6xkvzr+/UcyjrJP/q1wsfbM+cEK42hSfFs3JfFou1HnI4iIlLhne2nycBTvn78tNeuckEWcUCN0ADeHtqOMYPbsifjBL1fn83LP2zgZL4m7vzVkh2HGbcgjVs716VF7TCn47jUta1rERLgo/tJioiUg7OVMHOGr0t6LB7MGEOvVrX4z0Pdua51NKN/3sy1o2ezLE1HQ3LzC3l84ipqV63Cw5c3cjqOywX6+XBD2ximrd7DwayTTscREanQzlbC7Bm+LumxVADVgvx4eUAbPhjenqyT+fR7ay5//XZtpb6dzdiZW9i4L4tn+zQnyN/H6TjlYmhyHHkFlgmL052OIiJSoZ2thLU2xhwzxmQCrYq//vVxy3LKJw7o2aQGPzzUjSFJcbw3extXvjqTuVsOOh2r3G07eJzRP2/mmpY1ubRplNNxyk2DGiEk1wtn/MIdmsJERMSFzljCrLXe1tpQa22Itdan+OtfH3v+NOFyViEBvvytb0tS7krGy8Dgdxfw+MRVlWYOKWstT05ahb+3F0/3bu50nHI3JCmenYdPMHPTAaejiIhUWBX3Mi8pE8n1qjPtgW7c1a0eny9K44qXZ/LTun1Ox3K5iUt3MXfLIR69uglRoZXvBhFXNq9JRLA/n87X/SRFRFxFJUzOqYqfN09c05RJ93YmrIovt3+0mAdSlnH4eK7T0Vzi8PFc/vbdWtrGVWVIhzin4zjCz8eLAe1j+Hn9PnYdPeF0HBGRCkklTEqtdWxVptzfhQcva8jUVXu47OVfmLxid4Wb2PO579aRmZPP8/1a4eVVeS8EHtg+DgukLNTRMBERV1AJk/Pi5+PFg5c14tv7uxJbrQojP1vGnR8vYd+xHKejlYm5mw/y1dJ07upWj8Y1Q5yO46jY8EB6Nq5ByqKd5OluCiIiZU4lTC5I45ohTLy3M09e05RZmw5w2cu/8PmiNI8+KpaTV8CTX68mvnogIy9t6HQctzA0OY4DmSf5z9qKPw5QRKS8qYTJBfP2MtzZrR7TH+xGs1qh/PmrVQx9bwE7D2c7He2CjJmxmW0Hj/Nc35YE+Ho7HcctdG9Ug9pVq2gGfRERF1AJk4tWJyKIz+5M5rnrW7BiZwZXvDKT92dvo8CD5pjatC+Tt3/ZwvUJtenSMMLpOG7D28swOCmOuVsOseVAltNxREQqFJUwKRNeXoYhSfH88FA3kuuF8+y3a7np7bls3p/pdLRzKiy0PD5xFUH+PvylV1On47id/omx+HgZxi/QAH0RkbKkEiZlKrpqFd4f3p5XBrRm68HjXPPabN74eZNbD+xOWbSTxTuO8MQ1Take7O90HLcTGeLPlS1q8uWSdHLyKu8trEREyppKmJQ5YwzXJ8Tw48Pdubx5FC/+sJE+b8xh9a4Mp6P9zv7MHJ6fto7keuHc1C7G6Thua2hSPBkn8vh25R6no4iIVBgqYeIyEcH+jBnclndubseBrJP0GTOHf32/3q2Opjw7ZS0n8wp57vqWGFN55wQ7l+R64TSoEawB+iIiZUglTFzuyuY1+fGh7vRLqM2bqVu4ZvQsFm8/7HQsZmzYz7cr93BfzwbUjwx2Oo5bM8YwJCmO5TuPuuURTRERT6QSJuUiLNCXF25qzSe3d+BkXiE3vTOPUZPXcPxkviN5snPz+cuk1TSoEcwfetRzJIOn6dc2hgBfL8ZpgL6ISJlQCZNy1bVhJD881I1hHevw0bztXPHKTGZtOlDuOV79cRO7jp7g+X4t8ffRnGClEVbFl+taR/PN8l1k5uQ5HUdExOOphEm5C/L3YdR1zZlwd0f8fb24+b2F/GnCCjKyy+cH+5rdGbw3exuDOsTSvk54uWyzohiaHE92bgGTlu1yOoq4uc37M8n3oLkCRZygEiaOSawTztSRXbm3R30mLtvFZa/8wvQ1e126zYLiOcGqBfrx2FWaE+x8tYqpSsvaYYyb79m3qBLXWrLjMJe9PJPnF+SwN6Ni3FdWxBVUwsRRAb7ePHpVE765rzORwf7c/ckS7hu3lAOZJ12yvY/nbWdlegZP9W5GWKCvS7ZR0Q1NjmPDvkwW7zjidBRxU2NmbCEkwIf0rEKufX02C7c5fyGOiDtSCRO30KJ2GN+M6MyfrmzMf9bu4/JXfmHSsvQyPdqy++gJXpy+ge6NIundqlaZfW5l07t1NCEBPpquQkq0dvcxfl6/n7u61uOp5CqEBPgw+N35fDhnm46eipxGJUzchq+3F/f1bMDUB7pQLyKIhz5fwW0fLmL30RMX/dnWWp76Zg0F1vK3vi00J9hFCPTz4Ya2MUxbtZdDWa45Yime661fthDk580tHetQO8SLb0Z0pkfjGoyaspY/frGCE7nuM0+giNNUwsTtNKgRwoQ/dOKpa5sxf+thrnhlJp/O30HhRQzynb5mLz+u28dDlzUiNjywDNNWTkOS4sgtKGTCknSno4gb2X7wON+t3M3Q5Pj/nu4PDfBl7M3tePjyRkxavosb3prLzsPZDicVcQ8qYeKWvL0Mt3Wpyw8PdaN1bBh/+Xo1g96dz/aDx8/7s47l5PH05DU0rRXKbV3quiBt5dMwKoSkuuGMX5B2UeVYKpZ3Zm7Fx9uL20/7d+blZRh5aUPeH9ae9CPZ9H5jNjM3lv/UNCLuRiVM3FpseCCf3p7EP29oydo9x7jy1ZmMnbmFgvP4wf/i9A3szzzJ8/1a4uutv/JlZUhyPGmHs5npwDxv4n72HcvhqyXp3NQuhhqhASWu07NJDSaP6ELN0ACGfbCQMTM2a5yYVGr6iSRuzxjDgPZx/Phwd7o2jOTvU9fT7805bNibec73Lk07wifzdzCsYx3axFYth7SVx1XNaxIR7KcZ9AWA92ZvI7+wkLu71T/renUigph4byeubRXNC9M38IdPl2jyX6m0VMLEY0SFBvDuLe14fVAC6UdOcO3rs3j1x43k5heWuH5eQSFPTFxFVEgAf7yiUTmnrfj8fLzonxjLT+v2lcnFE+K5jmbn8un8HfRuHU1c9XOPuQz082H0wDb8pVdTfly3n75j5rB5f1Y5JBVxLyph4lGMMfRuHc1/Hu7ONS1r8eqPm+j9+mxW7Dz6u3X/PWsb6/dm8kyf5oQEaE4wVxjUIQ4LpCzU0bDK7KO5O8jOLeCeHmc/CnYqYwx3dK3HJ7d34Gh2Hn3HzHH5ZM0i7kYlTDxSeJAfrw1M4L1hiWScyOP6N+fw96nr/nv5+/7sQl77aSNXNIviyuY1HU5bccWGB9KjUSQpi3aSV1DyEUmp2LJz8/lw7jYubVKDJjVDz/v9nepHMOX+LtSPDOLuT5bwwvT15zXmU8STqYSJR7u0aRQ/PNyNAe3jGDtzK1e/NpP5Ww/x8ZpcfLy8eKZPc6cjVnhDk+PZn3mSH9fuczqKOOCzhTs5kp3HvT0bXPBnRFetwud3d2Rg+1jGzNjCrR8u4mh2bhmmFHFPKmHi8UIDfHm+X0vG35lEoYWBY+ez+lABj1zRiFphVZyOV+H1aFyD2lWr8OkCzaBf2ZzML+DdmVtJqhtOu/hqF/VZAb7e/OOGVvz9+pbM23KQ3m/MZu3uY2WUVMQ9qYRJhdGpfgTTH+zG3d3q0bGWNzd3rON0pErB28swqEMsczYfYusBDa6uTL5etou9x3Iu6ijY6QYnxfH53R3Jy7f0e2sOXy/bVWafLeJuVMKkQqni583j1zTl7tYBeHvp1kTlpX/7WHy8DOM1XUWlUVBoefuXrTSPDqVbw4gy/ey2cdWYcn8XWsVU5cHPl/PMlDUacygVkkqYiFy0GiEBXNmiJhOWpJOTp3sDVgbfr97LtoPHua9nA5fcizUyxJ9xdyRxW+e6fDBnO0P+vYADmbpXqVQsKmEiUiaGJMWRcSKP71bucTqKuJi1ljdTN1MvIsilVx/7envxVO9mvDqgDSvTj3Lt67NYmnbEZdsTKW8qYSJSJjrWq079yCAN0K8Eftl4gDW7j/GH7vXL5bR/34TaTLynM34+Xgx4Zx7jFuzQ7Y6kQlAJE5EyYYxhSFI8y9KOsmZ3htNxxIXeTN1CrbAA+ibULrdtNosOZcqILnSqH8GTk1bz2FerdOpbPJ5LS5gx5ipjzAZjzGZjzGNnWe8GY4w1xiS6Mo+IuNYNbWMI8PXS/SQrsCU7DrNw22Hu6FoPP5/y/T2+aqAf7w9vz4ieDfh88U4GvDNPt8wSj+ayf0HGGG9gDHA10AwYZIxpVsJ6IcADwAJXZRGR8hEW6EvvVtF8vWyXbspcQb05YwvVAn0Z1CHWke17exkeubIx79zcji0HjtP79dnM3XLQkSwiF8uVv8Z0ADZba7daa3OBFKBPCev9FfgnkOPCLCJSToYmx5OdW6D5nSqgdXuO8dP6/dzauS6Bfj6OZrmyeU2+GdGZakF+3PzeQv49a6vGiYnHMa76S2uMuRG4ylp7R/Hjm4Eka+2IU9ZpCzxprb3BGJMKPGKtXVzCZ90F3AUQFRXVLiUlxSWZf5WVlUVwcLBLtyGupX3oHGsto+blUFBo+WvnKhc8fYH2oft5e0UOy/cX8FKPQIJ8z71fy2Mfnsi3vLfqJIv3FZBU05vbWvjj76M5AsuC/g2WjZ49ey6x1pY43MqxX2WMMV7Ay8Dwc61rrR0LjAVITEy0PXr0cGm21NRUXL0NcS3tQ2ftC0rjsYmrCKnbmsQ64Rf0GdqH7mXHoeMsnJ7KHV3r0evypqV6T3ntw6sutbz1yxZenL6Bo9abd25uR52IIJdvt6LTv0HXc+XpyF3AqYMGYoqf+1UI0AJINcZsB5KByRqcL+L5rmsTTYi/D5/O13QVFcU7M7fi4+XF7V3qOh3ld4wx3NujAR/e2oF9mTlc98ZsZqzf73QskXNyZQlbBDQ0xtQ1xvgBA4HJv75orc2w1kZYa+tYa+sA84HrSjodKSKeJdDPh35tazN11V4OZWmWc0+3/1gOXy5O58bEGKJCA5yOc0bdGkUyZUQXYsMDue2jRYz+aROFhRonJu7LZSXMWpsPjACmA+uAL6y1a4wxzxpjrnPVdkXEPQxJjie3oJAvl6Q7HUUu0nuzt5FfWMjd3eo5HeWcYsMD+eqeTlzfpjYv/2cjd32ymGO6UlfclEsnebHWTrXWNrLW1rfWPlf83FPW2sklrNtDR8FEKo5GUSF0qBvO+IVpOhrhwTKy8/h0/g6ubRVNfHXPGGcV4OvNS/1b88x1zUndcIA+b8xh475Mp2OJ/I5mzBcRlxmaHM+OQ9nM2qx5nDzVR/O2czy3gHt61Hc6ynkxxjCsUx0+uyuZrJP59B0zR/c1FbejEiYiLnNl8yiqB/kxTgP0PVJ2bj4fzNnGpU1q0LRWqNNxLkj7OuF8e38XmtYK5b7xS3l+2jryCwqdjiUCqISJiAv5+3jTv30sP67bx54M3V7G06Qs3MmR7Dzu7elZR8FOFxUawGd3JjM0OY53ftnKsA8Wcvh4rtOxRFTCRMS1BneIwwKfLdzpdBQ5D7n5hbw7aysd6obTLv7C5npzJ34+Xvytb0v+dWMrFm0/Qu/XZ7MqXTeaF2ephImIS8WGB9K9USQpC9PI02kgj/H1sl3sycjhXg8bC3Yu/RNj+eoPnQC44e25unpXHKUSJiIuNzQpnv2ZJ/lp3T6no0gpFBRa3v5lC82jQ+neKNLpOGWuZUwYk0d0pn2dajwyYQX/9/VqcvP1C4KUP5UwEXG5nk1qEB0WwKfz05yOIqUwfc1eth48zr09GlzwvT/dXfVgfz66tQN3d6vHJ/N3MOjd+ew7luN0LKlkVMJExOW8vQyDOsQxe/NBth087nQcOQtrLW+mbqZeRBBXtajpdByX8vH24vFrmjJmcFvW7TnGta/PZtH2w07HkkpEJUxEysWADrH4eBnGL9B0Fe5s5qaDrN51jLu718Pbq2IeBTtdr1a1+Pq+zgT7+zBo7Hw+nrcdazXBsLieSpiIlIsaIQFc2bwmE5akk5NX4HQcOYM3Z2ymZmgA1yfEOB2lXDWKCuHr+zrTo3EkT32zhj9OWKG/p+JyKmEiUm6GJMVxNDuPqas0c7k7WrLjCAu2HeaOrnXx86l8Px7Cqvgy9uZEHrqsEZOW7eKGt+ay83C207GkAqt8/8pExDEd61enXmQQn2oGfbf0VupmqgX6MqhDnNNRHOPlZXjgsoa8NyyRtMPZXPfGbGZtOuB0LKmgVMJEpNwYYxiSFM/StKOs3X3M6ThyivV7j/Hjuv0M71SXIH8fp+M47pImUUwZ0YUaIQEMe38hb6Vu0TgxKXMqYSJSrm5sG4O/jxfjNEDfrbyVuoUgP2+GdYp3OorbqBMRxKT7OnFNy1r88/v13DtuKVkn852OJRWISpiIlKuwQF96t47m62W79APNTaQdymbKit0MToqjaqCf03HcSqCfD68PSuAvvZryw9p99B0zhy0HspyOJRWESpiIlLuhyfEczy1g0rJdTkcR4J2ZW/Dx8uKOrvWcjuKWjDHc0bUen9zegSPHc+n7xhx+WLPX6VhSAaiEiUi5ax0TRovaoYybv0PjbBy2PzOHCUvSuaFdDFGhAU7HcWud6kcw5f4u1IsM4q5PlvDC9PXk636ochFUwkSk3P06QH/93kyWph1xOk6l9t7sbeQXFPKH7joKVhrRVavw+d0dGdQhljEztjBg7HzSj2gaC7kwKmEi4og+baIJ8ffR/SQdlJGdx6fzdtCrVTTx1YOcjuMxAny9eb5fK0YPSmDD3kyueW0W36/W3Hdy/lTCRMQRgX4+9Gtbm+9W7uHw8Vyn41RKH8/bzvHcAu7pXt/pKB7putbRTB3ZlbqRwfzh06U8OWmVZtmX86ISJiKOGZIcT25BIV8u2el0lErnRG4BH8zdziVNatAsOtTpOB4rrnogE+7uyN3d6zFuQRp93pjDxn2ZTscSD6ESJiKOaRQVQoc64YxbkEZhoQbol6eURWkcPp7LvT10FOxi+fl48fjVTfnotg4cOn6S696YzWcL03TRiZyTSpiIOGpIchw7DmUze/NBp6NUGrn5hYyduZUOdcJJrBPudJwKo3ujSKY+0JX2dcJ5fOIqRoxfRsaJPKdjiRtTCRMRR13VoibVg/w0g345+nr5LvZk5HBPTx0FK2s1QgL46NYOPHZ1E6av2Uuv0bN0BbCckUqYiDjK38ebmxJj+XHdfvZm5Dgdp8IrKLS8/csWmtUKpUejSKfjVEheXoY/dK/PhD90BOCmt+fxZupmnXKX31EJExHHDe4QR6G1fLZQ01W42g9r9rL1wHHu7VkfY4zTcSq0hLhqTH2gK1e3qMm/vt/ALe8vZH+mftGQ36iEiYjj4qoH0q1hJCmL0jQDuQtZa3kzdQt1I4K4ukUtp+NUCqEBvrw+KIF/3tCSxTsOc/Wrs0jdsN/pWOImVMJExC0MTY5n37GT/LhOP6BcZdamg6zalcHd3erh7aWjYOXFGMOA9nFMGdGFyBB/hn+wiL9PXUduvn7hqOxUwkTELVzSpAbRYQEaoO9Cb6ZuJirUn+vb1nY6SqXUMCqEr+/rzM3J8YyduZWb3p7LjkPHnY4lDlIJExG34O1lGNQhjlmbDrL9oH4wlbWlaUeYv/Uwd3ath7+Pt9NxKq0AX2/+2rcFbw9tx7aDx+k1ejbfLN/ldCxxiEqYiLiNAe1j8fEyjNcA/TL35owtVA30ZVCHOKejCEVTs0x7sBtNaobwQMpy/jRhBdm5+U7HknKmEiYibqNGaABXNI9iwuKd5Bbocv6ysmFvJj+u28fwTnUI8vdxOo4Uq121Cil3JTPykgZ8uTSda1+fzZrdGU7HknKkEiYibmVoUjxHsvNYtFdHBcrKW6mbCfTzZninOk5HkdP4eHvx8BWNGXdHEsdP5nP9mLl8OGebbnlUSaiEiYhb6Vi/Oo2jQhi/Ppf1e485Hcfj7TyczZSVexjcIY6qgX5Ox5Ez6FQ/gmkPdKNrwwhGTVnLnR8v4cjxXKdjiYuphImIWzHG8O4tifh5GYa8u4DN+zOdjuTR3pm5BW9juKNrPaejyDmEB/nx72GJPHVtM2ZuPMDVr81i/tZDTscSF1IJExG3E1c9kD93CMAYw+B3F+hqyQu0PzOHLxanc0O72tQMC3A6jpSCMYbbutRl4r2dqOLnzeB35/PKfzZqEuMKSiVMRNxSzSAvxt+ZRH6hZfC789l5ONvpSB7n/dnbyS8o5O5uulG3p2lRO4wp93ehb0JtXvtpE4PfXcDuoyecjiVlTCVMRNxWo6gQPr09ieO5BQz+93z2ZOiHUGllnMjj0/k7uKZlLepEBDkdRy5AsL8PL/dvwysDWrNmdwbXjJ7FD2v2Oh1LypBKmIi4tWbRoXx8WweOHs9j8LsL2H9MN0AujU/mbSfrZD739NBRME93fUIM347sSky1Ktz1yRL+v707D5OqutM4/v1VVa8szb7I2iCC7LI1ImCDk4yCuGcUgogIEglLKHzxiQAAEWJJREFUtnGiThKTGTWrcQWDiBgIImMWNcGoM7LIIjRIg7Ko3Y3YgDabDTRLb3XyR3cyYDCxsapPV9X7eR6fp+t2UfeF81T51rnn3vuDF97hVHml71gSASphIlLn9WnXiAWTBlJ09BRfnbeeQyWlviPVaSfLKpm/5gNGdG1Oj/MyfMeRCMhsVo/f3jGE24Zm8sy63Vw7ey15+0t8x5IvSCVMRGJC/w5NmD9xIIWfnGD8UxsoPqHT9z/Lczkfcvh4GdNGnO87ikRQSijI967sztMTq76QjHl0NUs3FuqaYjFMJUxEYsbgTk15csIA8g+UMGH+Bo6eKvcdqc4pqwgzd1UBAzs2ZmDHJr7jSBSM6NaCl2cNo2+7Rtz5/FZmLcnlmN4LMUklTERiyrAuzXlifD92fHSUifM3UFKqK+uf7oXcvew7copp2ZoFi2ctG6ayaHIW3/nyBfzp7Y8Y/chqthQW+44lNaQSJiIxZ2S3ljw6th9b9hzhtgU5nCzTImWAcNjxxMp8LmzdkOyuzX3HkSgLBozpI7uwdOpgKsOO6+esZe6qfMJhHZ6MFSphIhKTLu/Ziodu7EvOB4e5feFGnS0GvLr9Y/IPHGdadmfMzHccqSX9OzRh2cxhfKl7S+5ftpOJC3I4cEwnr8QClTARiVlj+pzHz27ow+q8g9yxaBNlFYl7VXHnHLNX5NOxaTqjerX2HUdqWUZ6ErO/2o//vqYn6wsOccXDb/DG+wd8x5J/QiVMRGLa9f3bct81vVj+7gFmPPsW5Ql6e5fVeQfZuucIUy/tTDCgWbBEZGaMH9yBF6cPpXF6EhPmb+Anf96ZsO+JWKASJiIxb1xWe+4d051XthXxzedyqUzANTGzl+fTsmEK1/Vr4zuKeNa1VQNenD6Umwa2Z86KfP7tV+t02686SiVMROLCxEsyuXtUN/649SP+/fktCbU4efOHn7Cu4BBThnUiJRT0HUfqgLTkIA9c14vHx/Ujb38Jox5+gz9u3ec7lnxKyHcAEZFIuX14Z0rLw/zitfdICQW4/9peCbFAffaKfBqlJzF2UHvfUaSOGd27Nb3bZjBzyWamL97MmryDfP/KHqQlq6zXBZoJE5G4MuOyLkwfcT7Pbijkhy9tj/urib9XdIzXthdxy8UdqZei79Xy99o1SWfp1IuZlt2ZJTmFXPXYanZ+fNR3LEElTETi0Le/fAFThmWyYO0HPPDyzrguYnNW5JOeHGTikI6+o0gdlhQMcOfl3Vg4KYvik+Vc/dgaFr65O67fG7FAJUxE4o6ZcfeoC5lwcQfmrirgl6+95ztSVBQePsGLW/YxdlB7GtdL9h1HYsDQLs14edYwBndqyvf+8A53LHqLIyd0yyNfVMJEJC6ZGfeO6cFNA9vxyOt5PPb6+74jRdzcVQUEDKYM6+Q7isSQZvVTeHriQO4ZdSH/u6OIUY+8wcYPDvuOlZBUwkQkbgUCxn3X9uK6i9rw81ff48lVBb4jRcyBY6Us3VjI9f3a0ioj1XcciTGBgDFleCd+e8cQggHjxrlv8uj/vZ+Ql3fxSSVMROJaMGD89IbejO7dmvuW7eDX6z7wHSki5q/ZRXllmKmXdvYdRWJYn3aN+NPMoVzZuzW/eO09xs9bT9HRU75jJQyVMBGJe6FggIdu7MuXurfk+y9sY8mGD31H+kKOnCxn4brdXNGrNZnN6vmOIzGuQWoSD93Yl5/d0JvcwmKuePgNXt9Z5DtWQlAJE5GEkBQM8Ni4i8ju2py7fv82v3trj+9I52zRm7spKa1gWrZmwSQyzIyvDGjHSzOG0rJhKpMWbGTxjlJKKyp9R4trKmEikjBSQkGeGN+fIZ2b8p3/2RKTVxA/WVbJ/NW7yO7anB7nZfiOI3Hm/Bb1+f20IUwc0pFVeyr4qFiHJqNJJUxEEkpqUpAnJwxgQIcmzFqSyyvbPvYdqUaWbizk0PEypmWf7zuKxKnUpCD3XtWDB4al0VGHu6NKJUxEEk56coj5tw6kd9sMpi9+i+U79/uO9LmUV4aZu6qAAR0aMyizie84Eucap6oiRJv+hUUkIdVPCbHg1kF0bdWAqYs2sSbvoO9I/9QLufvYW3ySaSO0FkwkHqiEiUjCykhLYuGkLDo1q8dtz+SwvuCQ70ifKRx2PLEyn26tGjCiawvfcUQkAlTCRCShNa6XzKLJWbRplMakBTls2v2J70hn9er2IvL2lzBtxPmYme84IhIBKmEikvCa1U9h8ZTBNG+QwsT5G9i6p9h3pDM455izIo8OTdMZ1bOV7zgiEiEqYSIiQMuGqSyeMpiM9CRufmoD2/cd9R3pb9bkHWLLniNMHd6ZUFAf2yLxIqrvZjO73MzeNbM8M/vuWX7/NTN728xyzWy1mXWPZh4RkX/kvEZpPDtlMOnJQW5+aj3vFx3zHQmA2SvyaNEghev7t/EdRUQiKGolzMyCwOPAFUB3YOxZStZi51wv51xf4KfAg9HKIyLyebRrks5vJmcRCBjj5q1n18HjXvPkFhazNv8QU4Z1IiUU9JpFRCIrmjNhg4A851yBc64MWAJcffoTnHOnz/fXA3T7dhHxrlPz+iyenEVl2DHuyTcpPHzCW5bZy/PISEtibFZ7bxlEJDrMuej0HjO7AbjcOTe5+vHNQJZzbvqnnvd14FtAMjDSOff+WV7rduB2gJYtW/ZfsmRJVDL/VUlJCfXr14/qPiS6NIaxry6M4YdHK/lJzinSQsZdg1Jpmla767H2loS5Z/VJru6cxLVdkmt135FQF8ZQzp3GLzJGjBixyTk34Gy/C9V2mE9zzj0OPG5m44D/BG45y3PmAnMBBgwY4LKzs6OaacWKFUR7HxJdGsPYV1fGsG+/Yr765HoefcdYOnUwLRqm1tq+v/VcLmlJH3PvuGwa14u9ElZXxlDOjcYv+qL5tW4v0O60x22rt32WJcA1UcwjIlJjvds2YsGkQew/Vsq4ees5WFJaK/stPHyCF7bsY1xW+5gsYCLyz0WzhOUAXcws08ySgZuAF09/gpl1Oe3haODvDkWKiPjWv0Nj5k8cyJ5PTjB+3no+OV4W9X0++UYBAYPJwzKjvi8R8SNqJcw5VwFMB14BdgBLnXPbzOxHZnZV9dOmm9k2M8ulal3Y3x2KFBGpCwZ3asq8CQMpOHicCfM3cORkedT2deBYKc/lFHLdRW1pnZEWtf2IiF9RXRPmnFsGLPvUtu+f9vOsaO5fRCSShnZpxhPj+zF14SYmPr2BhbdlUT8l8h+j89fsoqwyzNRLO0X8tUWk7tCll0VEamBkt5Y8OrYfW/ccYdLTOZwoq4jo6x89Vc6idbsZ1bM1nZrrzDSReKYSJiJSQ5f3bMVDN/Zl4+7DTPn1Rk6VV0bstReu282x0gruyO4csdcUkbpJJUxE5ByM6XMeP7uhD2vzD3HHok2UVnzxInaqvJKn1+zi0gua07NNRgRSikhdphImInKOru/flvuu6cXydw8wY/FmyivDX+j1lm4s5GBJGdM0CyaSEFTCRES+gHFZ7bl3THde3V7EN57LpeIci1h5ZZhfrSygf4fGDMpsEuGUIlIXeb9ivohIrJt4SSZllWHuX7aT5GCAn3+lD8GA1eg1Xszdx97ik/zo6h6Y1ezPikhsUgkTEYmA24d3prQ8zC9ee4+UUID7r+1F4HMWsXDYMWdlPt1aNWBktxZRTioidYVKmIhIhMy4rAulFWEeW55HcijAD6/6fLNar+0oIm9/CQ/f1FezYCIJRCVMRCSCvv3lCyirDDN3VQHJwQD3jL7wHxYr5xyzV+TTvkk6o3u1rsWkIuKbSpiISASZGXdd0Y3S8krmrd5FalKQ7/xr1898/tr8Q2wpLOa+a3sSCupcKZFEohImIhJhZsYPxvSgrLLq0GRKKMCMy7qc9bmzV+TRokEK1/drW8spRcQ3lTARkSgIBIz7run1t8X6yaEAUy898/pfWwqLWZN3iLtHdSM1KegpqYj4ohImIhIlgYDx0xt6U1YZ5oGXd5ISCjDxksy//X72ijwy0pIYl9XBY0oR8UUlTEQkikLBAL+8sS9lFWHufWk7yaEg47La837RMV7ZVsTMkedTP0UfxSKJSO98EZEoSwoGeHTcRXxt4Sbu+cPbJIcCrM0/SFpS8IyZMRFJLCphIiK1ICUUZM74/kx+ZiN3Pr8FM+OWizvSpF6y72gi4onOhxYRqSWpSUHmTujPgI5NCAWMKcM1CyaSyDQTJiJSi9KTQyy6LYtDx0tpnZHmO46IeKSZMBGRWpYcCqiAiYhKmIiIiIgPKmEiIiIiHqiEiYiIiHigEiYiIiLigUqYiIiIiAcqYSIiIiIeqISJiIiIeKASJiIiIuKBSpiIiIiIByphIiIiIh6ohImIiIh4oBImIiIi4oFKmIiIiIgHKmEiIiIiHqiEiYiIiHhgzjnfGWrEzA4Au6O8m2bAwSjvQ6JLYxj7NIaxT2MY2zR+kdHBOdf8bL+IuRJWG8xso3NugO8ccu40hrFPYxj7NIaxTeMXfTocKSIiIuKBSpiIiIiIByphZzfXdwD5wjSGsU9jGPs0hrFN4xdlWhMmIiIi4oFmwkREREQ8UAkTERER8UAl7FPM7HIze9fM8szsu77zSM2YWTszW25m281sm5nN8p1Jas7Mgma22cz+6DuL1JyZNTKz581sp5ntMLOLfWeSmjGzb1Z/hr5jZs+aWarvTPFIJew0ZhYEHgeuALoDY82su99UUkMVwLedc92BwcDXNYYxaRaww3cIOWcPA392znUD+qCxjClm1gaYCQxwzvUEgsBNflPFJ5WwMw0C8pxzBc65MmAJcLXnTFIDzrmPnHNvVf98jKoP/zZ+U0lNmFlbYDQwz3cWqTkzywCGA08BOOfKnHPFflPJOQgBaWYWAtKBfZ7zxCWVsDO1AQpPe7wH/Q88ZplZR+AiYL3fJFJDDwF3AmHfQeScZAIHgKerDynPM7N6vkPJ5+ec2wv8HPgQ+Ag44px71W+q+KQSJnHJzOoDvwW+4Zw76juPfD5mdiWw3zm3yXcWOWchoB8wxzl3EXAc0PraGGJmjak6CpQJnAfUM7PxflPFJ5WwM+0F2p32uG31NokhZpZEVQH7jXPud77zSI1cAlxlZh9QtRxgpJkt8htJamgPsMc599cZ6OepKmUSO/4F2OWcO+CcKwd+BwzxnCkuqYSdKQfoYmaZZpZM1ULEFz1nkhowM6NqLcoO59yDvvNIzTjn7nLOtXXOdaTq/fe6c07fwGOIc+5joNDMulZvugzY7jGS1NyHwGAzS6/+TL0MnVwRFSHfAeoS51yFmU0HXqHqbJD5zrltnmNJzVwC3Ay8bWa51dvuds4t85hJJNHMAH5T/WW2ALjVcx6pAefcejN7HniLqjPON6NbGEWFblskIiIi4oEOR4qIiIh4oBImIiIi4oFKmIiIiIgHKmEiIiIiHqiEiYiIiHigEiYicc3MSnxnEBE5G5UwEREREQ9UwkQk4ZhZXzN708y2mtnvq++Vh5nNNLPt1duXVG+71Mxyq//bbGYN/KYXkXihi7WKSFwzsxLnXP1PbdsKzHDOrTSzHwENnXPfMLN9QKZzrtTMGjnnis3sJeDHzrk11TeGP+Wcq/DwVxGROKOZMBFJKGaWATRyzq2s3vQMMLz6561U3W5nPFW3awFYAzxoZjOr/5wKmIhEhEqYiMj/Gw08DvQDcsws5Jz7MTAZSAPWmFk3nwFFJH6ohIlIQnHOHQE+MbNh1ZtuBlaaWQBo55xbDvwHkAHUN7POzrm3nXM/AXIAlTARiYiQ7wAiIlGWbmZ7Tnv8IHAL8ISZpQMFwK1AEFhUfbjSgEeq14T9l5mNAMLANuDl2o0vIvFKC/NFREREPNDhSBEREREPVMJEREREPFAJExEREfFAJUxERETEA5UwEREREQ9UwkREREQ8UAkTERER8eAvHCfKeUkE3pcAAAAASUVORK5CYII=\n", | |
"text/plain": [ | |
"<Figure size 720x504 with 1 Axes>" | |
] | |
}, | |
"metadata": { | |
"tags": [], | |
"needs_background": "light" | |
} | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"metadata": { | |
"id": "D-l6EXE_knU_" | |
}, | |
"source": [ | |
"" | |
], | |
"execution_count": null, | |
"outputs": [] | |
} | |
] | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment