Skip to content

Instantly share code, notes, and snippets.

@mirrornerror
Last active November 1, 2018 06:37
Show Gist options
  • Save mirrornerror/c694b0316fc78868e730e7a4b5297c03 to your computer and use it in GitHub Desktop.
Save mirrornerror/c694b0316fc78868e730e7a4b5297c03 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Kaggle: Titanic: Machine Learning from Disaster \n",
"### with EarlyStopping\n",
"https://www.kaggle.com/c/titanic"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/mirrornerror/.pyenv/versions/anaconda3-5.1.0/envs/py36/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
" from ._conv import register_converters as _register_converters\n",
"Using TensorFlow backend.\n"
]
}
],
"source": [
"import pandas as pd\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"%matplotlib inline\n",
"\n",
"# random seed\n",
"import tensorflow as tf\n",
"import random as rn\n",
"import os\n",
"os.environ['PYTHONHASHSEED'] = '0'\n",
"random_n = 123\n",
"np.random.seed(random_n)\n",
"rn.seed(random_n)\n",
"session_conf = tf.ConfigProto(intra_op_parallelism_threads=1, inter_op_parallelism_threads=1)\n",
"from keras import backend as K\n",
"tf.set_random_seed(random_n)\n",
"sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)\n",
"K.set_session(sess)\n",
"\n",
"train = pd.read_csv('train.csv', index_col=0)\n",
"test = pd.read_csv('test.csv', index_col=0)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Survived</th>\n",
" <th>Pclass</th>\n",
" <th>Name</th>\n",
" <th>Sex</th>\n",
" <th>Age</th>\n",
" <th>SibSp</th>\n",
" <th>Parch</th>\n",
" <th>Ticket</th>\n",
" <th>Fare</th>\n",
" <th>Cabin</th>\n",
" <th>Embarked</th>\n",
" </tr>\n",
" <tr>\n",
" <th>PassengerId</th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>0</td>\n",
" <td>3</td>\n",
" <td>Braund, Mr. Owen Harris</td>\n",
" <td>male</td>\n",
" <td>22.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>A/5 21171</td>\n",
" <td>7.2500</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>Cumings, Mrs. John Bradley (Florence Briggs Th...</td>\n",
" <td>female</td>\n",
" <td>38.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>PC 17599</td>\n",
" <td>71.2833</td>\n",
" <td>C85</td>\n",
" <td>C</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>1</td>\n",
" <td>3</td>\n",
" <td>Heikkinen, Miss. Laina</td>\n",
" <td>female</td>\n",
" <td>26.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>STON/O2. 3101282</td>\n",
" <td>7.9250</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>Futrelle, Mrs. Jacques Heath (Lily May Peel)</td>\n",
" <td>female</td>\n",
" <td>35.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>113803</td>\n",
" <td>53.1000</td>\n",
" <td>C123</td>\n",
" <td>S</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>0</td>\n",
" <td>3</td>\n",
" <td>Allen, Mr. William Henry</td>\n",
" <td>male</td>\n",
" <td>35.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>373450</td>\n",
" <td>8.0500</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Survived Pclass \\\n",
"PassengerId \n",
"1 0 3 \n",
"2 1 1 \n",
"3 1 3 \n",
"4 1 1 \n",
"5 0 3 \n",
"\n",
" Name Sex Age \\\n",
"PassengerId \n",
"1 Braund, Mr. Owen Harris male 22.0 \n",
"2 Cumings, Mrs. John Bradley (Florence Briggs Th... female 38.0 \n",
"3 Heikkinen, Miss. Laina female 26.0 \n",
"4 Futrelle, Mrs. Jacques Heath (Lily May Peel) female 35.0 \n",
"5 Allen, Mr. William Henry male 35.0 \n",
"\n",
" SibSp Parch Ticket Fare Cabin Embarked \n",
"PassengerId \n",
"1 1 0 A/5 21171 7.2500 NaN S \n",
"2 1 0 PC 17599 71.2833 C85 C \n",
"3 0 0 STON/O2. 3101282 7.9250 NaN S \n",
"4 1 0 113803 53.1000 C123 S \n",
"5 0 0 373450 8.0500 NaN S "
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"train.head()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Drop Survived and Ticket, then combine train with test "
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'pandas.core.frame.DataFrame'>\n",
"Int64Index: 1309 entries, 1 to 1309\n",
"Data columns (total 9 columns):\n",
"Pclass 1309 non-null int64\n",
"Name 1309 non-null object\n",
"Sex 1309 non-null object\n",
"Age 1046 non-null float64\n",
"SibSp 1309 non-null int64\n",
"Parch 1309 non-null int64\n",
"Fare 1308 non-null float64\n",
"Cabin 295 non-null object\n",
"Embarked 1307 non-null object\n",
"dtypes: float64(2), int64(3), object(4)\n",
"memory usage: 102.3+ KB\n"
]
}
],
"source": [
"train_tmp = train.drop(['Survived', 'Ticket'], axis=1)\n",
"test_tmp = test.drop(['Ticket'], axis=1)\n",
"df = pd.concat([train_tmp, test_tmp])\n",
"df.info()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Name --> Title --> Number"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['Mr' 'Mrs' 'Miss' 'Master' 'Don' 'Rev' 'Dr' 'Mme' 'Ms' 'Major' 'Lady'\n",
" 'Sir' 'Mlle' 'Col' 'Capt' 'Countess' 'Jonkheer' 'Dona']\n"
]
}
],
"source": [
"# Name to Title\n",
"df = df.assign(Title=df.Name.str.extract(' ([A-Za-z]+)\\..', expand=True))\n",
"title_list = df.Title.unique()\n",
"print(title_list)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Pclass</th>\n",
" <th>Sex</th>\n",
" <th>Age</th>\n",
" <th>SibSp</th>\n",
" <th>Parch</th>\n",
" <th>Fare</th>\n",
" <th>Cabin</th>\n",
" <th>Embarked</th>\n",
" <th>Title</th>\n",
" </tr>\n",
" <tr>\n",
" <th>PassengerId</th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>3</td>\n",
" <td>male</td>\n",
" <td>22.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>7.2500</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>1</td>\n",
" <td>female</td>\n",
" <td>38.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>71.2833</td>\n",
" <td>C85</td>\n",
" <td>C</td>\n",
" <td>1</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>3</td>\n",
" <td>female</td>\n",
" <td>26.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>7.9250</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" <td>2</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>1</td>\n",
" <td>female</td>\n",
" <td>35.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>53.1000</td>\n",
" <td>C123</td>\n",
" <td>S</td>\n",
" <td>1</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>3</td>\n",
" <td>male</td>\n",
" <td>35.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>8.0500</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" <td>0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Pclass Sex Age SibSp Parch Fare Cabin Embarked Title\n",
"PassengerId \n",
"1 3 male 22.0 1 0 7.2500 NaN S 0\n",
"2 1 female 38.0 1 0 71.2833 C85 C 1\n",
"3 3 female 26.0 0 0 7.9250 NaN S 2\n",
"4 1 female 35.0 1 0 53.1000 C123 S 1\n",
"5 3 male 35.0 0 0 8.0500 NaN S 0"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Title to Number(0-17)\n",
"df.Title = df.Title.replace(df.Title.unique(), np.arange(len(df.Title.unique())))\n",
"\n",
"# Drop Name column\n",
"df = df.drop(['Name'], axis=1)\n",
"df.head()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Sex --> male:0, female:1"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"df.Sex = df.Sex.replace({'male': 0, 'female': 1})"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Cabin --> Number: nan:0, C:1, E:2, G:3, D:4, A:5, B:6, F:7, T:8"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[nan 'C' 'E' 'G' 'D' 'A' 'B' 'F' 'T']\n",
"[0 1 2 3 4 5 6 7 8]\n"
]
}
],
"source": [
"df = df.assign(Cabin=df.Cabin.str[0])\n",
"cabin_list = df.Cabin.unique()\n",
"\n",
"df.Cabin = df.Cabin.replace(df.Cabin.str[0].unique(), np.arange(len(df.Cabin.str[0].unique())))\n",
"\n",
"print(cabin_list)\n",
"print(df.Cabin.unique())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Embarked --> S:0, C:1, Q:2, nan"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array(['S', 'C', 'Q', nan], dtype=object)"
]
},
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df.Embarked.unique()"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"df.Embarked = df.Embarked.replace({'S':0, 'C':1, 'Q':2})"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## zscore or normalization: \n",
"* Age: including NaN\n",
"* Fare: including NaN \n",
" \n",
"Z = (x - x.mean) / x.std \n",
"N = (x - x.min) / (x.max - x.min) \n",
" \n",
"sklearn.preprocessing.MinMaxScaler causes error with Null data."
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"# Normalize Function\n",
"def normalize(df_col):\n",
" df_col = (df_col - df_col.min()) / (df_col.max() - df_col.min())\n",
" return df_col"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"# Standardization(zscore)\n",
"def zscore(df_col):\n",
" df_col = (df_col - df_col.mean()) / df_col.std()\n",
" return df_col"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Pclass</th>\n",
" <th>Sex</th>\n",
" <th>Age</th>\n",
" <th>SibSp</th>\n",
" <th>Parch</th>\n",
" <th>Fare</th>\n",
" <th>Cabin</th>\n",
" <th>Embarked</th>\n",
" <th>Title</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>count</th>\n",
" <td>1309.000000</td>\n",
" <td>1309.000000</td>\n",
" <td>1.046000e+03</td>\n",
" <td>1.309000e+03</td>\n",
" <td>1.309000e+03</td>\n",
" <td>1.308000e+03</td>\n",
" <td>1309.000000</td>\n",
" <td>1307.000000</td>\n",
" <td>1.309000e+03</td>\n",
" </tr>\n",
" <tr>\n",
" <th>mean</th>\n",
" <td>2.294882</td>\n",
" <td>0.355997</td>\n",
" <td>9.488904e-17</td>\n",
" <td>2.178887e-16</td>\n",
" <td>-5.920059e-17</td>\n",
" <td>-6.049357e-16</td>\n",
" <td>0.786860</td>\n",
" <td>0.394797</td>\n",
" <td>5.343319e-17</td>\n",
" </tr>\n",
" <tr>\n",
" <th>std</th>\n",
" <td>0.837836</td>\n",
" <td>0.478997</td>\n",
" <td>1.000000e+00</td>\n",
" <td>1.000000e+00</td>\n",
" <td>1.000000e+00</td>\n",
" <td>1.000000e+00</td>\n",
" <td>1.794388</td>\n",
" <td>0.653817</td>\n",
" <td>1.000000e+00</td>\n",
" </tr>\n",
" <tr>\n",
" <th>min</th>\n",
" <td>1.000000</td>\n",
" <td>0.000000</td>\n",
" <td>-2.061342e+00</td>\n",
" <td>-4.789037e-01</td>\n",
" <td>-4.448295e-01</td>\n",
" <td>-6.432832e-01</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>-5.418264e-01</td>\n",
" </tr>\n",
" <tr>\n",
" <th>25%</th>\n",
" <td>2.000000</td>\n",
" <td>0.000000</td>\n",
" <td>-6.161683e-01</td>\n",
" <td>-4.789037e-01</td>\n",
" <td>-4.448295e-01</td>\n",
" <td>-4.907329e-01</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>-5.418264e-01</td>\n",
" </tr>\n",
" <tr>\n",
" <th>50%</th>\n",
" <td>3.000000</td>\n",
" <td>0.000000</td>\n",
" <td>-1.305123e-01</td>\n",
" <td>-4.789037e-01</td>\n",
" <td>-4.448295e-01</td>\n",
" <td>-3.640217e-01</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>-5.418264e-01</td>\n",
" </tr>\n",
" <tr>\n",
" <th>75%</th>\n",
" <td>3.000000</td>\n",
" <td>1.000000</td>\n",
" <td>6.326615e-01</td>\n",
" <td>4.811039e-01</td>\n",
" <td>-4.448295e-01</td>\n",
" <td>-3.903654e-02</td>\n",
" <td>0.000000</td>\n",
" <td>1.000000</td>\n",
" <td>6.481916e-01</td>\n",
" </tr>\n",
" <tr>\n",
" <th>max</th>\n",
" <td>3.000000</td>\n",
" <td>1.000000</td>\n",
" <td>3.477218e+00</td>\n",
" <td>7.201157e+00</td>\n",
" <td>9.953060e+00</td>\n",
" <td>9.255140e+00</td>\n",
" <td>8.000000</td>\n",
" <td>2.000000</td>\n",
" <td>9.573327e+00</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Pclass Sex Age SibSp Parch \\\n",
"count 1309.000000 1309.000000 1.046000e+03 1.309000e+03 1.309000e+03 \n",
"mean 2.294882 0.355997 9.488904e-17 2.178887e-16 -5.920059e-17 \n",
"std 0.837836 0.478997 1.000000e+00 1.000000e+00 1.000000e+00 \n",
"min 1.000000 0.000000 -2.061342e+00 -4.789037e-01 -4.448295e-01 \n",
"25% 2.000000 0.000000 -6.161683e-01 -4.789037e-01 -4.448295e-01 \n",
"50% 3.000000 0.000000 -1.305123e-01 -4.789037e-01 -4.448295e-01 \n",
"75% 3.000000 1.000000 6.326615e-01 4.811039e-01 -4.448295e-01 \n",
"max 3.000000 1.000000 3.477218e+00 7.201157e+00 9.953060e+00 \n",
"\n",
" Fare Cabin Embarked Title \n",
"count 1.308000e+03 1309.000000 1307.000000 1.309000e+03 \n",
"mean -6.049357e-16 0.786860 0.394797 5.343319e-17 \n",
"std 1.000000e+00 1.794388 0.653817 1.000000e+00 \n",
"min -6.432832e-01 0.000000 0.000000 -5.418264e-01 \n",
"25% -4.907329e-01 0.000000 0.000000 -5.418264e-01 \n",
"50% -3.640217e-01 0.000000 0.000000 -5.418264e-01 \n",
"75% -3.903654e-02 0.000000 1.000000 6.481916e-01 \n",
"max 9.255140e+00 8.000000 2.000000 9.573327e+00 "
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df.Age = zscore(df.Age)\n",
"df.Fare = zscore(df.Fare)\n",
"df.SibSp = zscore(df.SibSp)\n",
"df.Parch = zscore(df.Parch)\n",
"df.Title = zscore(df.Title)\n",
"\n",
"# df.Age = normalize(df.Age)\n",
"# df.Fare = normalize(df.Fare)\n",
"\n",
"# for col in df.columns:\n",
"# df[col] = normalize(df[col])\n",
"\n",
"df.describe()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Split the Data into Null-data and Notnull-data\n",
"\n",
"Make a Copy of df: df0 = df.copy() \n",
"* Age\n",
"* Embarked\n",
"* Fare\n"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'pandas.core.frame.DataFrame'>\n",
"Int64Index: 1309 entries, 1 to 1309\n",
"Data columns (total 9 columns):\n",
"Pclass 1309 non-null int64\n",
"Sex 1309 non-null int64\n",
"Age 1046 non-null float64\n",
"SibSp 1309 non-null float64\n",
"Parch 1309 non-null float64\n",
"Fare 1308 non-null float64\n",
"Cabin 1309 non-null int64\n",
"Embarked 1307 non-null float64\n",
"Title 1309 non-null float64\n",
"dtypes: float64(6), int64(3)\n",
"memory usage: 102.3 KB\n"
]
}
],
"source": [
"# Drop Cabin if the result gets better\n",
"#df = df.drop(['Cabin'], axis=1)\n",
"\n",
"df0 = df.copy()\n",
"df0.info()"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
"Age_null = df[df.Age.isnull()]\n",
"df = df[df.Age.notnull()]\n",
"\n",
"Embarked_null = df[df.Embarked.isnull()]\n",
"df = df[df.Embarked.notnull()]\n",
"\n",
"Fare_null = df[df.Fare.isnull()]\n",
"df = df[df.Fare.notnull()]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Get Notnull Data: df.shape = (1043, 9)"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(1043, 9)\n",
"<class 'pandas.core.frame.DataFrame'>\n",
"Int64Index: 1043 entries, 1 to 1307\n",
"Data columns (total 9 columns):\n",
"Pclass 1043 non-null int64\n",
"Sex 1043 non-null int64\n",
"Age 1043 non-null float64\n",
"SibSp 1043 non-null float64\n",
"Parch 1043 non-null float64\n",
"Fare 1043 non-null float64\n",
"Cabin 1043 non-null int64\n",
"Embarked 1043 non-null float64\n",
"Title 1043 non-null float64\n",
"dtypes: float64(6), int64(3)\n",
"memory usage: 81.5 KB\n"
]
}
],
"source": [
"print(df.shape)\n",
"df.info()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Model to fill NaN in Fare, Embarked, Age"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"from keras.models import Sequential\n",
"from keras.layers import Flatten, Dense, Dropout, BatchNormalization\n",
"import keras\n",
"from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau\n",
"\n",
"initializer = keras.initializers.glorot_uniform(seed=random_n)\n",
"# model for Fare, Embarked, Age\n",
"def fill_data(col):\n",
" n_cols = len(df.columns) - 1\n",
" num = len(df[col].unique())\n",
" \n",
" model = Sequential()\n",
" model.add(Dense(64, activation='relu', input_shape=(n_cols,), kernel_initializer=initializer))\n",
" model.add(Dropout(0.5, seed=random_n))\n",
" \n",
" if col == 'Embarked':\n",
" model.add(Dense(num, activation='softmax', kernel_initializer=initializer))\n",
" model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['acc'])\n",
" else: # 'Fare', 'Age'\n",
" model.add(Dense(1, activation='relu', kernel_initializer=initializer))\n",
" model.compile(optimizer='adam', loss='mse', metrics=['mae'])\n",
" \n",
" data = df.drop([col], axis=1)\n",
" \n",
" reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.000001,verbose=1)\n",
" checkpointer = ModelCheckpoint(filepath='checkpoint_'+col+'.hdf5', verbose=1, save_best_only=True)\n",
" early_stopping = EarlyStopping(patience=10, verbose=1)\n",
" epochs = 300\n",
" hist = model.fit(data, df[col], \n",
" epochs=epochs, \n",
" batch_size=32,\n",
" verbose=1,\n",
" validation_split=0.1,\n",
" callbacks=[reduce_lr, early_stopping, checkpointer])\n",
"\n",
" null_data = df0[df0[col].isnull()]\n",
" null_data = null_data.drop([col], axis=1)\n",
" \n",
" model.load_weights('checkpoint_'+col+'.hdf5')\n",
" pred = model.predict(null_data)\n",
" \n",
" if col == 'Embarked':\n",
" pred = pred.argmax(axis=1)\n",
" \n",
" plt.plot(hist.history['acc'], 'b-', label='acc' )\n",
" plt.plot(hist.history['loss'], 'r-', label='loss' )\n",
" plt.xlabel('epochs')\n",
" plt.legend()\n",
" plt.show()\n",
" \n",
" pred = pred.reshape(-1, )\n",
" \n",
" idx = df0[df0[col].isnull()].index.values\n",
"\n",
" for n, i in enumerate(idx):\n",
" df0.loc[i, col] = pred[n]"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Train on 938 samples, validate on 105 samples\n",
"Epoch 1/300\n",
"938/938 [==============================] - 0s 344us/step - loss: 0.8662 - acc: 0.6205 - val_loss: 0.7452 - val_acc: 0.7524\n",
"\n",
"Epoch 00001: val_loss improved from inf to 0.74523, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 2/300\n",
"938/938 [==============================] - 0s 76us/step - loss: 0.7523 - acc: 0.7313 - val_loss: 0.7328 - val_acc: 0.7429\n",
"\n",
"Epoch 00002: val_loss improved from 0.74523 to 0.73280, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 3/300\n",
"938/938 [==============================] - 0s 74us/step - loss: 0.7292 - acc: 0.7495 - val_loss: 0.7240 - val_acc: 0.7333\n",
"\n",
"Epoch 00003: val_loss improved from 0.73280 to 0.72398, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 4/300\n",
"938/938 [==============================] - 0s 82us/step - loss: 0.7211 - acc: 0.7441 - val_loss: 0.7141 - val_acc: 0.7333\n",
"\n",
"Epoch 00004: val_loss improved from 0.72398 to 0.71408, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 5/300\n",
"938/938 [==============================] - 0s 84us/step - loss: 0.6862 - acc: 0.7601 - val_loss: 0.7117 - val_acc: 0.7333\n",
"\n",
"Epoch 00005: val_loss improved from 0.71408 to 0.71174, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 6/300\n",
"938/938 [==============================] - 0s 74us/step - loss: 0.6646 - acc: 0.7537 - val_loss: 0.7110 - val_acc: 0.7333\n",
"\n",
"Epoch 00006: val_loss improved from 0.71174 to 0.71097, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 7/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 0.6576 - acc: 0.7655 - val_loss: 0.7034 - val_acc: 0.7429\n",
"\n",
"Epoch 00007: val_loss improved from 0.71097 to 0.70335, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 8/300\n",
"938/938 [==============================] - 0s 69us/step - loss: 0.6594 - acc: 0.7655 - val_loss: 0.7016 - val_acc: 0.7429\n",
"\n",
"Epoch 00008: val_loss improved from 0.70335 to 0.70159, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 9/300\n",
"938/938 [==============================] - 0s 73us/step - loss: 0.6650 - acc: 0.7644 - val_loss: 0.7008 - val_acc: 0.7429\n",
"\n",
"Epoch 00009: val_loss improved from 0.70159 to 0.70081, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 10/300\n",
"938/938 [==============================] - 0s 73us/step - loss: 0.6344 - acc: 0.7665 - val_loss: 0.7043 - val_acc: 0.7429\n",
"\n",
"Epoch 00010: val_loss did not improve from 0.70081\n",
"Epoch 11/300\n",
"938/938 [==============================] - 0s 72us/step - loss: 0.6337 - acc: 0.7665 - val_loss: 0.6979 - val_acc: 0.7429\n",
"\n",
"Epoch 00011: val_loss improved from 0.70081 to 0.69788, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 12/300\n",
"938/938 [==============================] - 0s 74us/step - loss: 0.6329 - acc: 0.7601 - val_loss: 0.6970 - val_acc: 0.7429\n",
"\n",
"Epoch 00012: val_loss improved from 0.69788 to 0.69701, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 13/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 0.6176 - acc: 0.7708 - val_loss: 0.6976 - val_acc: 0.7429\n",
"\n",
"Epoch 00013: val_loss did not improve from 0.69701\n",
"Epoch 14/300\n",
"938/938 [==============================] - 0s 79us/step - loss: 0.6086 - acc: 0.7655 - val_loss: 0.6951 - val_acc: 0.7429\n",
"\n",
"Epoch 00014: val_loss improved from 0.69701 to 0.69508, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 15/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 0.6289 - acc: 0.7719 - val_loss: 0.6965 - val_acc: 0.7429\n",
"\n",
"Epoch 00015: val_loss did not improve from 0.69508\n",
"Epoch 16/300\n",
"938/938 [==============================] - 0s 73us/step - loss: 0.6104 - acc: 0.7687 - val_loss: 0.6972 - val_acc: 0.7429\n",
"\n",
"Epoch 00016: val_loss did not improve from 0.69508\n",
"Epoch 17/300\n",
"938/938 [==============================] - 0s 72us/step - loss: 0.6014 - acc: 0.7772 - val_loss: 0.6946 - val_acc: 0.7333\n",
"\n",
"Epoch 00017: val_loss improved from 0.69508 to 0.69457, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 18/300\n",
"938/938 [==============================] - 0s 73us/step - loss: 0.6165 - acc: 0.7665 - val_loss: 0.6978 - val_acc: 0.7429\n",
"\n",
"Epoch 00018: val_loss did not improve from 0.69457\n",
"Epoch 19/300\n",
"938/938 [==============================] - 0s 74us/step - loss: 0.6167 - acc: 0.7644 - val_loss: 0.6933 - val_acc: 0.7333\n",
"\n",
"Epoch 00019: val_loss improved from 0.69457 to 0.69334, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 20/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 0.6039 - acc: 0.7708 - val_loss: 0.6966 - val_acc: 0.7429\n",
"\n",
"Epoch 00020: val_loss did not improve from 0.69334\n",
"Epoch 21/300\n",
"938/938 [==============================] - 0s 72us/step - loss: 0.6079 - acc: 0.7740 - val_loss: 0.6931 - val_acc: 0.7429\n",
"\n",
"Epoch 00021: val_loss improved from 0.69334 to 0.69311, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 22/300\n",
"938/938 [==============================] - 0s 73us/step - loss: 0.6071 - acc: 0.7697 - val_loss: 0.6897 - val_acc: 0.7333\n",
"\n",
"Epoch 00022: val_loss improved from 0.69311 to 0.68972, saving model to checkpoint_Embarked.hdf5\n",
"Epoch 23/300\n",
"938/938 [==============================] - 0s 69us/step - loss: 0.6012 - acc: 0.7687 - val_loss: 0.6914 - val_acc: 0.7333\n",
"\n",
"Epoch 00023: val_loss did not improve from 0.68972\n",
"Epoch 24/300\n",
"938/938 [==============================] - 0s 77us/step - loss: 0.5974 - acc: 0.7804 - val_loss: 0.6913 - val_acc: 0.7333\n",
"\n",
"Epoch 00024: val_loss did not improve from 0.68972\n",
"Epoch 25/300\n",
"938/938 [==============================] - 0s 77us/step - loss: 0.5967 - acc: 0.7761 - val_loss: 0.6933 - val_acc: 0.7333\n",
"\n",
"Epoch 00025: ReduceLROnPlateau reducing learning rate to 0.00020000000949949026.\n",
"\n",
"Epoch 00025: val_loss did not improve from 0.68972\n",
"Epoch 26/300\n",
"938/938 [==============================] - 0s 86us/step - loss: 0.5963 - acc: 0.7751 - val_loss: 0.6924 - val_acc: 0.7333\n",
"\n",
"Epoch 00026: val_loss did not improve from 0.68972\n",
"Epoch 27/300\n",
"938/938 [==============================] - 0s 94us/step - loss: 0.5949 - acc: 0.7729 - val_loss: 0.6920 - val_acc: 0.7333\n",
"\n",
"Epoch 00027: val_loss did not improve from 0.68972\n",
"Epoch 28/300\n",
"938/938 [==============================] - 0s 79us/step - loss: 0.5952 - acc: 0.7783 - val_loss: 0.6918 - val_acc: 0.7333\n",
"\n",
"Epoch 00028: ReduceLROnPlateau reducing learning rate to 4.0000001899898055e-05.\n",
"\n",
"Epoch 00028: val_loss did not improve from 0.68972\n",
"Epoch 29/300\n",
"938/938 [==============================] - 0s 87us/step - loss: 0.6045 - acc: 0.7665 - val_loss: 0.6918 - val_acc: 0.7333\n",
"\n",
"Epoch 00029: val_loss did not improve from 0.68972\n",
"Epoch 30/300\n",
"938/938 [==============================] - 0s 83us/step - loss: 0.5944 - acc: 0.7719 - val_loss: 0.6917 - val_acc: 0.7333\n",
"\n",
"Epoch 00030: val_loss did not improve from 0.68972\n",
"Epoch 31/300\n",
"938/938 [==============================] - 0s 65us/step - loss: 0.6018 - acc: 0.7740 - val_loss: 0.6916 - val_acc: 0.7333\n",
"\n",
"Epoch 00031: ReduceLROnPlateau reducing learning rate to 8.000000525498762e-06.\n",
"\n",
"Epoch 00031: val_loss did not improve from 0.68972\n",
"Epoch 32/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 0.5872 - acc: 0.7697 - val_loss: 0.6916 - val_acc: 0.7333\n",
"\n",
"Epoch 00032: val_loss did not improve from 0.68972\n",
"Epoch 00032: early stopping\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYQAAAEKCAYAAAASByJ7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzt3Xd4VGX2wPHvoauAdERAisaCiiABO4KF4loQVEBXLCiuBRH9KWJZFbGuilIsrA0roKCwNgRldVVQAgvSBDHgElAJvUlLzu+Pc2OGmDJJJtyZ5Hye5z7J3LnlvAy5Z95y3yuqinPOOVcu7ACcc87FB08IzjnnAE8IzjnnAp4QnHPOAZ4QnHPOBTwhOOecAzwhOOecC3hCcM45B3hCcM45F6gQdgCFUadOHW3atGnYYTjnXEKZPXv2WlWtW9B2CZUQmjZtSkpKSthhOOdcQhGRn6PZzpuMnHPOAZ4QnHPOBTwhOOecAzwhOOecC3hCcM45B3hCcM45F/CE4JxzDvCE4JwrQcuWwbPPwrp1YUfiouEJwTlXItasgbPOghtvhEaN4NprYf78sKNy+fGE4JyLuR07oFs3Swpjx8Lll8Obb0LLlnDGGfD++5CREXaUZvZsr8FkiSohiEgXEVkiIstE5M5c3h8mInODZamIbAzWd4xYP1dEdohIt+C9V0VkecR7rWJbNOf2jR07IC0t7CjihypcfTXMmAGvvw49e8Lo0bByJTz2GPz0E1x4IRx2GDzxBGzYEE6cc+dCly6QnAwdO8LmzeHEEVdUNd8FKA/8BDQHKgHzgBb5bN8feDmX9bWA9cD+wetXgYsKOn/k0qZNG3UunqSlqbZqpVqpkurUqWFHEx/uv18VVB9+OPf3d+9WnTBB9fTTbbv991e97jrVOXNUf/+95OP76SfVSy+1c9esqTpggGr58qrnnKO6Z0/Jnz8MQIpGcY2NZnK7dsAyVU0FEJGxwAXAojy27w3cl8v6i4CPVXV7FOd0Lu7Nmwd/+Qts2gRNm1oTyWefwQknhB1ZeN5+G+6/H664Au78U1uCqVABune3Zd48GDECxoyBF16w92vXtj6Hhg1z/9moERx4YOFjW7MGhg6F55+3GO68EwYNgho14Mgj4frr4bbb4Omni1z8xFdQxsAu5C9GvL4cGJnHtk2AX4Dyubz3OXBuxOtXgSXA98AwoHJBsXgNwcWLDz9UrVpVtVEj1blzVVevVj30UPvGOX9+2NGF45tvVCtXVj3tNNUdOwq3b3q66pgxqkOHql5/vep556kef7xqvXr2TT7n0ry5au/eqk8/rTpjRv41i82brdZStarVBPr1U1216s/bDRhgx3722cLFngiIsoYQTUK4OJeEMCKPbQfl9h7QAEgHKuZYJ0BlYAzw9zyO2Q9IAVIOOeSQEv5nc65go0apliun2rr13heW1FTVBg1sSU0NL74su3erfvaZ6g03qDZtqnrWWaqTJ5dMs8jy5XbxPvRQu7jH0s6ddvyvvlIdO1b10UdVe/SwZJyVICpWVG3bVvWmm1Rff1116VJLSiNGZCeV7t1VFy/O+zx79qj+5S+WNKZMiW0Zso4/f77qiy+qXnON6rHHqjZponrffaq//BL780WKZUI4CZgS8XowMDiPbf8LnJzL+gHA6HzO0QH4oKBYvIbgwrRnj+qtt9pfzbnnqm7Z8udtFixQrVXLvsGuXr3vY9yxQ/WDD1Svvlq1dm2Ldb/9LN6GDbO/XT/1lOqGDbE556ZNqscco1qjRv4X3JKwapXqxImqgwapduigesABeycJsL6KGTOiO97mzXahrl5ddeHC4sW2erXqe++p3nmnaseOVkPJiq1mTdXOnW3JivWvf1X97rvinTMvsUwIFYBUoBnZncpH57LdEcAKQHJ5bybQMce6BsFPAZ4GHi0oFk8IrrjWrLEmiT597I9127bo9tu6VfWCC+wvpn///L9lz5xpF6Zjj1Vdvz42cRcU27vvWhNKtWoWY/XqqpddZp23WWXctUt1/HjVU0+1bQ44wGoPxbmI796t2rWrfaueNi025SmOPXtU581THT3aPqcPP1TNzCzcMX7+WbV+fdVmzez/S2Gkp6vedptq48Z7J6bkZNUbb1R97TXVJUv2jmnpUtWbb87+7E48UfWtt6xmFCsxSwh2LM4BlmKjje4O1g0Bzo/Y5v7cLupAU2AVUC7H+s+B+cAC4A2gakFxeEIoezZutAv4uHGF/8OOlJmp+sYb9q25YkX7Fp81wuWii+wPcNOm3Pf95Rf7gy5XTvWZZ6I739SpNvLopJPsgl0SfvpJ9ZJLrAYAqnXqqPbtq/rRRwW34c+erXrllRYjqHbqZDWLjIzCxdC/v+3/wgtFL0c8mjlTtUoVS57R9Ids3Wr9H9Wr2/+T7t1Vhw2zfpVoR05t2qQ6fLhqUpL9mzZooDpkiOqvvxavLKoxTgjxsnhCKFu2bbMOyqxvWsnJ1iZeWD//bN9is759LVhg35anTbNkc9BB9l6lStaG/PLLqmvX2r7z56secogljsmTC3feCRPs4tCpU+E7WfOzZ4/qk09aTNWqWbv555/bt/XC+u03u5AdfLD9Gxx2mDWLDRtmtYlvvrF/v127/rzvyJG2z8CBxS9TPBo3zsp3+eV5fxnZtUv1+eez/w9dcEHxm5oyMiypd+mS/f+yTx/7HIrKE4JLaLt22bhwEdU331R95ZXsaninTjZmvSAZGdapWLWqXTyfeSb3pp6MDOuwHDjQOvnAmkA6drRvfA0aqKakFK0cL71kx7v44th05n7/vXWeZvVjrFxZ/GOq2r/322/bN+IqVbKTcNYiYhe95GS76PXta/9G555besfuq6o++KCVf+jQvddnZqq+8072t/lTTrH/Q7H2ww/W1FSrVvH6pDwhuISVkWHt4TmbIn7/XfWJJ7Kbey691JpNcrNokerJJ2cnkOXLozt3ZqZd/AcPVj3ySPtD/9//ileeJ5+0OK65pujNXjt2qN57r2qFCqp169rFuzhNaPnJzLQa0rx51gY/erTq3/9uSaBz5+wO5PbtrRO2NMvMtM5esBqTqtXG2rWzdS1aWM2xpD6LLMWtYUabEMS2TQzJycmakpISdhhlzu7dNilZamp029eqZVMBiBT+XKpw0002Q+ajj9qNQzlt3AiPP243EO3ZA3/7G9xzD9SrB7t22fQIQ4dC1aowbJjNo1OUWGLpnnvgoYdgwAC4/Xa7ySpa33wD11wDixdbWZ56CurUKblY3d527oQzz7Q5j045xW4+bNQIHnzQPo/y5cOOsGAiMltVkwvczhNCYlm5En7+Obptq1e3/7g1a0Z/QVS1c8ycCd9+a8ucOfD774WL8+yz7c7TZs0Kt9+999rF/I477MKen9Wr4YEH4KWXYL/9bFbNjz6y5NWzJzzzDNSvX7jzl5TIRAeWEE44IXtJToYDDth7ny1b4K67YNQoaNzY/j27dNn3sTtIT4cTT7R5l+66y/6v7bdf2FFFzxNCKfT11zZT5K5dhduvSpX8pwLYujX74v/tt/Drr7Zf5crQpk32RevII6P7NvTFFzB4sM1mOXQo3HxzdPs99ZRNHXDNNTYZWrRJ7Icf4O67YeJEK9Ozz8L550e3776kCrNm2aRvWf/WWbWucuXgmGOy/62rVrWkuHKlJZKHHoJq1cKNv6zbutX+T+ZM3InAE0Ipk5pqF4qaNW3ul4IusKrWtLJqlc3EmfUzLc2+WeeWVA4/fO9vrS1bQqVKRYt35Uq44Qb44AM71osv2gUvLy+/DH37wsUX23w4RamGr1hhTSlVqxYt5jCkp8N332XXyL77zuZGAjjqKKv9nHRSuDG6xOcJoRTZtMkuCr/+aheNpKTiHS8zE9auzU4SlSpB27bW9h9LqjBuHPTvb2UYPNiq25Ur773dxImWCM46CyZP/vP7ZUlmJixdal8AzjyzbP9buNjxhFBK7NljM2p+/jlMnQodOoQdUeGtXQsDB8Ibb/z5W++0aVa+Nm2sfIlYHXcu3kWbEPyJaXFM1drfP/3UOhQTMRmANeO8/rp1+G7bZiM1br7ZRmt06wZHHAEffujJwLmweUIoQampdsH79NOi7T9iBDz3nHUuXn11bGMLQ9eusGCBjdAYOdKaiOrXhylTrG/EORcuTwglRBX69YNJk6BzZ7jySli/Pvr9P/zQmlm6dYNHHimxMPe5atUs0X31FfTpY81EDRqEHZVzDojqiWmuCN56y5pEnnrK2tAffxw+/tguhhdfnP+QyvnzoVcvOO44a3cvVwrT9skn2+Kcix+l8FITvg0b4NZboV07ayt/6CFISbGbi3r2tG/9q1blvu+vv8K559pNZf/6l7erO+f2HU8IJWDwYFi3zjqCs8bTH3ecjTV//HHrU2jRwm6+yszM3u/33y1ZrF1ryaAw0xs451xxeUKIsRkzLBEMGACtWu39XoUKNo/N/Pk2zPK66+zO4x9/tMRw5ZV2n8Ebb8Dxx4cSvnOuDPOEEEO7d9tFvlEjm2MnL4cdZv0L//wnzJ1rdwSfey6MH28Tul144b6L2TnnsnhCiKGnn7Zv/yNGFDx9gojN2bNokU1Y9vHHcNVVNsTUOefCEFVCEJEuIrJERJaJyJ25vD9MROYGy1IR2RjxXkbEe5Mj1jcTkW9F5EcRGSciRZw1Jwr33mvzJ5Sgn3+G+++3SdW6dYt+v4MPtqkbFiywGkPY0zQ758quAhOCiJQHRgFdgRZAbxFpEbmNqg5U1Vaq2goYAUyMePv3rPdUNXIOyseAYaqaBGwA+hazLHnbtMl6cH/7rUQOnzW1MVjtoLBE4OijE2Nededc6RVNDaEdsExVU1V1FzAWuCCf7XsDb+d3QBER4Azg3WDVGKAQ36sLqX9/m97zhRdK5PDvv2+zej7wABxySImcwjnnSlw0CaEhsDLidVqw7k9EpAnQDPg8YnUVEUkRkZkiknXRrw1sVNU9URyzX7B/Snp6ehTh5iIpCc45x+aByOdhAi+/bE/6mjo1+kNv2WL5pmVLG1nknHOJKpqEkFurdl5TpPYC3lXVjIh1hwSz7F0KPC0ihxbmmKo6WlWTVTW5bt26UYSbh5tvtru+3nkn17fXrbObyb78Ejp1snl2oplY9e9/t+cLvPACVKxY9PCccy5s0SSENKBxxOtGwOo8tu1FjuYiVV0d/EwF/g20BtYCNUQka+qM/I4ZG2efbdNqDh+e69tDh9q3/ZQUGy00d649I6BXL1i2LPdDzpljh+vXzx6v55xziSyahDALSApGBVXCLvqTc24kIkcANYEZEetqikjl4Pc6wCnAIrWHMEwHLgo2vQKYVJyCFKhcOaslfPed3f0VITXVnlt79dXQurU1/aSm2uCkf/3L5vC/8cbsR0uCPR7yb3+zqZ1L0+Rzzrmyq8CEELTz3wRMARYD41V1oYgMEZHIUUO9gbG69xN3jgJSRGQelgAeVdVFwXuDgFtFZBnWp/BS8YtTgD59bJKgZ57Za/Xdd9tdxJE3k1WvDkOGwE8/WQ1g9Gg49FBrItq8GZ5/3p6PO2yYT93snCsdyt4T02691caG/vwzHHwws2bZJHT33AMPPpj3bsuW2TbjxlmtYOdOe1bwp5/6vQPOufjmT0zLy003WXvP88+janML1a1rP/Nz2GEwdqz1MbRqZUng2Wc9GTjnSo+y9zyE5s3hvPPg+ef5pPVdfPFFFUaOtCaiaGQ9+3fPHmtmcs650qLs1RDAOpfT0/nPTeNISrI+gsLyZOCcK23K5mXtjDPYcPDRXLT6Gdq804eKFb3dxznnymQNYdt24ZFtN3M8/6X7Qd+EHY5zzsWFMpkQhg2DUZsuY3e1msjwZwrewTnnyoAylxDWrIHHHoPOFx5Axb9dY3NPr1xZ8I7OOVfKlbmEMGSIPbv4kUew249VbdI755wr48pUQli61Cah69fPpjWiSRN7ms3o0ZYlnHOuDCtTCeGuu6ByZbjvvoiVN99sU52+9VZocTnnXDwoMwlhxgyYMMGeWVy/fsQb7dvbwwyGD7fmI+ecK6PKRELImqLioINsKqO9iFgt4fvv4YsvQonPOefiQZlICJMmwddf22ymVavmssGll0Lt2nk+K8E558qCMpEQnngCjjzSnneQq/32s57mSZNgxYp9GZpzzsWNMpEQPvzQnpyZ7/xD11+fPYWpc86VQWUiIRx4IBxzTAEbNW4MPXrAyJHw7rv7JC7nnIsnUSUEEekiIktEZJmI3JnL+8NEZG6wLBWRjcH6ViIyQ0QWisj3ItIzYp9XRWR5xH6tYlesIho+3B52cPHFNjY1MzPsiJxzbp8pcLZTESkPjALOBtKAWSIyOeJRmKjqwIjt+wOtg5fbgT6q+qOIHAzMFpEpqroxeP92VY2fr+P168P06faw5CFDYP58eO21PHqinXOudImmhtAOWKaqqaq6CxgLXJDP9r2BtwFUdamq/hj8vhpYA9QtXsglrHJlePllmwFv0iQ4+WRYvjzsqJxzrsRFkxAaApGzv6UF6/5ERJoAzYDPc3mvHVAJ+Cli9UNBU9IwEakcddQlTQRuuQU+/tgmvmvbFv7977Cjcs65EhVNQsjt6TF53dLbC3hXVTP2OoBIA+B14CpVzWqYHwwcCbQFagGDcj25SD8RSRGRlPT09CjCjaFOneC776BePTj7bJ8EzzlXqkWTENKAxhGvGwGr89i2F0FzURYRqQ58CNyjqjOz1qvqL2p2Aq9gTVN/oqqjVTVZVZPr1g2htSkpyea96NwZbrjB+hd27dr3cTjnXAmLJiHMApJEpJmIVMIu+pNzbiQiRwA1gRkR6yoB7wGvqeo7ObZvEPwUoBuwoKiFKHEHHmj9CYMG2XSpZ50F+7q24pxzJazAhKCqe4CbgCnAYmC8qi4UkSEicn7Epr2Bsap7zRB3CdAeuDKX4aVvish8YD5QBxgag/KUnPLl4dFH4c03YdYsOPFEnzLbOVeqiCbQDJ/JycmakpISdhjw3nvQvbvdAn3OOWFH45xz+RKR2aqaXNB2ZeJO5Zjr2hX2399GITnnXCnhCaEoqlSBjh09ITjnShVPCEXVpQv89BMsWxZ2JM45FxOeEIqqa1f76bUE51wp4QmhqA491O5R8ITgnCslPCEUR9euNhmeDz91zpUCnhCKo2tX2LHDn8XsnCsVPCEUx+mn24ijTz4JOxLnnCs2TwjFsd9+0KGD9yM450oFTwjF1bUrLF0KqalhR+Kcc8XiCaG4fPipc66U8IRQXElJNgTV+xGccwnOE0IsdOkCn39uI46ccy5BeUKIha5dYft2+M9/wo7EOeeKzBNCLHTsCJUrez+Ccy6heUKIhf33t3sSPCE45xKYJ4RY6doVfvgBVqwIOxLnnCuSqBKCiHQRkSUiskxE7szl/WERj8hcKiIbI967QkR+DJYrIta3EZH5wTGHB89WTlxduthPH23knEtQBSYEESkPjAK6Ai2A3iLSInIbVR2oqq1UtRUwApgY7FsLuA84AWgH3CciNYPdngP6AUnB0iUmJQrLEUdA06bebOScS1jR1BDaActUNVVVdwFjgQvy2b438Hbwe2dgqqquV9UNwFSgi4g0AKqr6gy1hzq/BnQrcinigYg1G332GezcGXY0zjlXaNEkhIbAyojXacG6PxGRJkAz4PMC9m0Y/B7NMfuJSIqIpKSnp0cRboi6doVt2+Drr8OOxDnnCi2ahJBb277msW0v4F1VzShg36iPqaqjVTVZVZPr1q1bYLCh6tgRKlXyZiPnXEKKJiGkAY0jXjcCVuexbS+ym4vy2zct+D2aYyaOqlXhtNM8ITjnElI0CWEWkCQizUSkEnbRn5xzIxE5AqgJzIhYPQXoJCI1g87kTsAUVf0F2CIiJwaji/oAk4pZlvjQtSssXAgrVxa8rXPOxZECE4Kq7gFuwi7ui4HxqrpQRIaIyPkRm/YGxgadxFn7rgcexJLKLGBIsA7geuBFYBnwE1A6vlb77KfOuQQlEdfvuJecnKwpKSlhh5E/VRt+2qYNTJwYdjTOOYeIzFbV5IK28zuVY03EblKbNg127Qo7Gueci5onhJLQtSts2QLffBN2JM45FzVPCCXhzDOhYkXvR3DOJRRPCCWhWjU49VSf18g5l1A8IZSUrl3h++9h1aqwI3HOuah4QigpPvupcy7BeEIoKcccAw0bej+Ccy5heEIoKVmzn06dCrt3hx2Nc84VyBNCSTrnHNi82e5JcM65OOcJoSSdcw40aABPPhl2JM45VyBPCCWpcmW45RZ7aM6cOWFH45xz+fKEUNKuu87uS3jiibAjcc65fHlCKGkHHgj9+sH48bBiRdjROOdcnjwh7AsDBtioo2HDwo7EOefy5AlhX2jcGC69FF58EdavL3h755wLgSeEfeX//g+2b4fnngs7Euecy1VUCUFEuojIEhFZJiJ35rHNJSKySEQWishbwbqOIjI3YtkhIt2C914VkeUR77WKXbHi0LHH2nQWw4fDjh1hR+Occ39SYEIQkfLAKKAr0ALoLSItcmyTBAwGTlHVo4FbAFR1uqq2UtVWwBnAduDTiF1vz3pfVefGpETx7PbbYc0aeO21sCNxzrk/iaaG0A5YpqqpqroLGAtckGOba4FRqroBQFXX5HKci4CPVXV7cQJOaB072qM1n3wSMjPDjsY55/YSTUJoCKyMeJ0WrIt0OHC4iHwtIjNFpEsux+kFvJ1j3UMi8r2IDBORylFHnahErJawdClMnhx2NM45t5doEoLksk5zvK4AJAEdgN7AiyJS448DiDQAjgWmROwzGDgSaAvUAgblenKRfiKSIiIp6enpUYQb53r0gKZN4fHHw47EOef2Ek1CSAMaR7xuBKzOZZtJqrpbVZcDS7AEkeUS4D1V/WPaT1X9Rc1O4BWsaepPVHW0qiaranLdunWjCDfOVagAt94KM2bA11+HHY1zzv0hmoQwC0gSkWYiUglr+snZ3vE+0BFAROpgTUipEe/3JkdzUVBrQEQE6AYsKEoBEtLVV0OtWvCPf4QdiXPO/aHAhKCqe4CbsOaexcB4VV0oIkNE5PxgsynAOhFZBEzHRg+tAxCRplgN44sch35TROYD84E6wNDiFydBHHAA3Hij9SP88EPY0TjnHACimrM7IH4lJydrSkpK2GHExpo10KQJ/PWv8M9/hh2Nc64UE5HZqppc0HZ+p3JY6tWDK66wexJ+/TXsaJxzzhNCqG67zR6vOWJE2JE455wnhFAlJcGFF8Kzz8LWrWFH45wr4zwhhO3222HjRpsJ1TnnQuQJIWwnnginnmrPSti9u+DtY23uXPj++31/Xudc3PGEEA/uuAP+9z/4299g27Z9c87//Q8uuwxat4auXX1uJeecJ4S4cO65lhReeQWOPx5Kcmjt1q1w771wxBEwcaIlg9WrYebMkjuncy4heEKIByLw2GMwbZo9ROekk+DhhyEjI3bnyMiAl1+2juyhQ6F7d1iyBMaOhUqVYMKE2J3LOZeQPCHEkzPOsPb87t3h7ruhQwdYsaL4x/33vyE5Gfr2hWbNbB6lN9+EQw6B6tXh7LMtISTQTYrOudjzhBBvata0b+2vv27JoWVL+70oF+sff7RhrR07woYNdtyvv7aO7Eg9esDPP8OcObEpg3MuIXlCiEciNqXFvHlw3HHQpw/06mUX9fysWWPNTsOGweWXw9FH2+uHH4bFi6FnTzt2TuefD+XLe7ORc2Wcz2UU7zIyrH/hvvvgoINgzBj7hr9wIcyfv/eyJuJBdfXqQbdu8MADtl9Bzj7bRh798EPuScM5l7Cincuowr4IxhVD+fJw113QqZMNEz3zTLtgZyXy/fe3msC558Kxx2Yv9eoV7jw9esD111uiOeaY2JfDORf3PCEkiuRka+MfNsxqDVkX/ubNLWkUV7ducMMN8O67nhCcK6O8ychla9/e+inmzw87EudcDPn0167wevSABQtg6dKwI3HOhcATgsvWvbv99NFGzpVJUSUEEekiIktEZJmI3JnHNpeIyCIRWSgib0WszxCRucEyOWJ9MxH5VkR+FJFxwfOaXZgaN4YTTvCE4FwZVWBCEJHywCigK9AC6C0iLXJskwQMBk5R1aOBWyLe/l1VWwXL+RHrHwOGqWoSsAHoW7yiuJjo0QNmz47NHdLOuYQSTQ2hHbBMVVNVdRcwFrggxzbXAqNUdQOAqq4hHyIiwBnAu8GqMUC3wgTuSkiPHvZz4sRw43DO7XPRJISGwMqI12nBukiHA4eLyNciMlNEukS8V0VEUoL1WRf92sBGVd2TzzFdGJo3h1atvNnIuTIomoSQ222rOceqVgCSgA5Ab+BFEakRvHdIMNzpUuBpETk0ymPayUX6BQklJT09PYpwXbH16AHffGPTYjvnyoxoEkIa0DjidSMg55UiDZikqrtVdTmwBEsQqOrq4Gcq8G+gNbAWqCEiFfI5JsF+o1U1WVWT69atG1WhXDFlNRu99164cTjn9qloEsIsICkYFVQJ6AVMzrHN+0BHABGpgzUhpYpITRGpHLH+FGCR2t1w04GLgv2vACYVtzAuRo46ypZ33y14W+dcqVFgQgja+W8CpgCLgfGqulBEhohI1qihKcA6EVmEXehvV9V1wFFAiojMC9Y/qqqLgn0GAbeKyDKsT+GlWBbMFVOPHvDll+DNdM6VGT51hcvd3Ln2vOXRo+Haa8OOxjlXDD51hSue446zEUc+2si5MsMTgsudiDUbffZZwQ/mcc6VCp4QXN569IA9e+Bf/wo7EufcPuAJweWtbVto1MibjZwrIzwhuLyVK2czoE6ZAlu2hB2Nc66EeUJw+evRA3buhI8+CjsS51wJ84Tg8nfKKVC/vt+k5lwZ4AnB5a98ebjwQqshbN8edjTOuRLkCcEVrEcPSwZTpoQdiXOuBHlCcAU7/XSoVQvefhs2boQEurvdORe9CgVv4sq8ihWt2eill+Cdd6wZqVYtqF07e6lTZ+/XdevaUqeO/axRw252c87FLU8ILjqPPQbt28PatbBu3d7LihWQkmK/79yZ+/4VKmQnh6yfzZvD4MFQvfo+LYpzLneeEFx0ateGPn3y30bV+hrWrrUlPT37Z+QU1ku9AAAXkklEQVSydq1NnvfOO5ZERo/eN2VwzuXLE4KLHRE44ABbmjQpePs77oB//AN69YIzzij5+Jxz+fJOZReeBx6ApCS45hrYti3saJwr8zwhuPDstx+8+CIsXw733rvvzvvvf8OcOfvufM4lCE8ILlzt28P118PTT8PMmSV/vnXr4LzzoFs32LGj5M/nXAKJKiGISBcRWSIiy0Tkzjy2uUREFonIQhF5K1jXSkRmBOu+F5GeEdu/KiLLRWRusLSKTZFcwnn0UZtV9eqr8x6lFCvPPANbt8LKlfDssyV7LucSTIGP0BSR8sBS4GwgDZgF9I54NjIikgSMB85Q1Q0iUk9V14jI4YCq6o8icjAwGzhKVTeKyKvAB6oa9SQ5/gjNUuyTT6BrV7jnHnjwwZI5x8aN0LQpnHmm9VnMmgWpqXDggSVzPufiRCwfodkOWKaqqaq6CxgLXJBjm2uBUaq6AUBV1wQ/l6rqj8Hvq4E1QN3oi+HKjC5dbFjro4/CvHklc44RI2DTJuuveOQRWL8eHn+8ZM7lXAKKJiE0BFZGvE4L1kU6HDhcRL4WkZki0iXnQUSkHVAJ+Cli9UNBU9IwEamc28lFpJ+IpIhISnp6ehThuoT11FN2B3TfvvaktljassX6Kc47D1q1gtatoXdvGDYMVq+O7bmcS1DRJITc5hvI2c5UAUgCOgC9gRdFpMYfBxBpALwOXKWqmcHqwcCRQFugFjAot5Or6mhVTVbV5Lp1vXJRqtWuDaNGwezZ8OSTsT32s89ajSByNNPQoZZ4hgyJ7bmcS1DRJIQ0oHHE60ZAzq9UacAkVd2tqsuBJViCQESqAx8C96jqH8NIVPUXNTuBV7CmKVfWXXSRPaXtvvtg6dLYHHPbNkswnTvbY0GzNG8O111nQ19jdS7nElg0CWEWkCQizUSkEtALmJxjm/eBjgAiUgdrQkoNtn8PeE1V34ncIag1ICICdAMWFKcgrhQZOdLuUejbFzIzC96+IC+8YFNm/P3vf37v3nvtXHffXfzzOJfgCkwIqroHuAmYAiwGxqvqQhEZIiLnB5tNAdaJyCJgOnC7qq4DLgHaA1fmMrz0TRGZD8wH6gBDY1oyl7gaNLC2/a++gueeK96xfv/dpsc44ww4+eQ/v1+vHtx2mz0R7rvvincu5xJcgcNO44kPOy1DVG0Y6tdfw4IF0c2NlJuRI6F/f5g+HTp0yH2bLVvg0EPhmGPgs898mm5X6sRy2Klz+56INfWoWjt/Ub647Nxp03afeqo95Ccv1apZ09H06fDpp0WP2bkE5wnBxa8mTey+hClT7Ga1wiaFV1+FtDTrOyjoW/9110GzZnDnnbHptygKVdi1y2osPtmfC4EnBBffbrgB/vpXG3U0YED0F+vduy2ZnHACnHVWwdtXqmRJZ+5cGDu2eDHn5bvv4KSToEULa6Jq1MgeFHTggVClCpQrB5Ur2wODatWCCRNKJg7n8uB9CC7+ZWbC//2fdTT36gVjxtgFPD+vvGJzI33wAfzlL9Gf5/jjYfNm+OGHgs9RGNu3Q8uW9vOUU+zCX7mynSPr98hl3DiLISUFjjgidnG4MinaPgRUNWGWNm3aqCujMjNVH31UFVQ7dVLdsiXvbXfvVj3sMNXjj7f9CuPjj+0cw4cXL96cBg60406fHt32K1eq1qmj2qJF/mV1LgpAikZxjU34J6bt3r2btLQ0dpSyqYyrVKlCo0aNqFixYtihxAcRGDTImliuvdaGkX70kT2fOaexY2HZMnjvvcKPGOrcGTp2tOajK6+0Dufi+uYbmzbj+uvzHumUU6NGVo5OnaBfP3jzTR/95EpeNFkjXpbcagipqamanp6umYX9JhjHMjMzNT09XVNTU8MOJT5NmqRapYrqEUeorlix93t79qgeeaTqsceqZmQU7fjffmvf5u+7r9ih6vbtFmeTJqqbNxd+/6FDLZaRI4sfiyuziLKGkPCdyjt27KB27dpIKfr2JCLUrl271NV6Yub882146K+/Wnv8woXZ702YYG3v99xjnbRF0a4d9Ohh012sWVO8WO+/H5YsgX/+s2i1jcGDrQ9k4MB98wAhV6YlfEIASlUyyFIayxRTp50G//mPdQSfdpo1y2Rm2oR1Rx5pF/TieOghu8v53nuLdg8E2KiiJ56wZ0affXbRjlGuHLz+OjRsCBdfbFNwOFdCSkVCcGXUscdaIqhTx4aWDhwI8+db7aB8+eId+4gj4OabYfRoO25h703YudNGOTVoYEmhOGrWtJpPejpceilkZBTveM7lwROCS2xNm9r0FkcfDcOHw2GHQc+eBe4WlSeegFtuscduXnZZ4R7vOXSoNWWNHh2bJ7Idf7xNDT5tmjVDOVcCEn6UUTzo1q0bK1euZMeOHQwYMIB+/frxySefcNddd5GRkUGdOnX47LPP2Lp1K/379yclJQUR4b777qNHcZs2nI08+vxzuOMOmz67Qoz+W5crZw/tOfhgO/batTBxYsF9Af/9rz2RrU8fOOec2MQCNvvrN99YsjnxxOjur1i2zPovXnvN7n6uUyf3pXbt7N+POw5q1Cj42K7USfgb0xYvXsxRRx0F2Je5uXNje85WrWzEYH7Wr19PrVq1+P3332nbti2fffYZycnJfPnllzRr1uyP9wcNGsTOnTt5Ojjghg0bqFmzZp7HjSybC9lrr1kT0HHH2XDX+vVz327XLuuU/u03qyHUqhXbOH7/3WZtXbEC5syx6TZy2rkT3n/faieff27NZ+edZ7WptWv/vGzduvf+NWtaH0q/fsVvenNxIdob07yGEAPDhw/nvffeA2DlypWMHj2a9u3b0yz4Y60VXBSmTZvG2IhpEfJLBi7O9OljNZGLLrKRTVOm2PQTOWU9E/r992OfDMCe3TBhArRpYx3n33xj016APeTnn/+0OZzWrrUE8NBDcNVV1peRlx07YN0622f1apsu/IYbLKGMHGnldWVDNGNT42XJ7T6ERYsWFWt8bnFNnz5dTznlFN22bZuqqp5++uk6adIkveyyy/60bevWrfXHH3+M+thhl83lYuZM1dq1VevVU509e+/3vv9etWJF1d69Sz6OyZPt/oSrrlJ96y3VDh3sdYUKqt27q37ySdHvw8jMVB03TrVRIzvm5Zerrl4d2/gT0bvvqn70UdhRFAll5T6EsG3atImaNWuy//7788MPPzBz5kx27tzJF198wfLlywFrUgLo1KkTI0eO/GPfDRs2hBKzK4YTTrBO7P32sym1p02z9Xv22DfxGjWsc7uknXee3aPwyis28ujnn+Hhh+F//7MaROfORb8PQwQuuQQWL7ZzjBtno66eesomDSyLHn/caofnnmv/HqVVNFkD6II9J3kZcGce21wCLAIWAm9FrL8C+DFYrohY3wZ7WtoyYDhBf0Z+SzzWEHbs2KFdunTRY489Vi+66CI9/fTTdfr06frRRx9pq1attGXLlnrWWWepquqWLVu0T58+evTRR2vLli11woQJ+R477LK5fKxaZXdDV6yo+vbbqg8/bN+mx4/fdzHs3q361FOqn35a9NpANJYuVe3a1cp31FGqU6eW3LniTWam6l13Wdl79lQ97TTV8uWttpBAiLKGEE0yKA/8BDQHKgHzgBY5tkkC/gvUDF7XC37WAlKDnzWD37O2+Q44CRDgY6BrQbHEY0IoSaW5bKXChg2q7dtnN9X06BF2RCUnM9OaqZo3t/L26KG6fHnYUZWsjAzVm26y8l57rU2Lsnmz6skn2+c9aVLRj52ZqTpihOoll9j0JiUs2oQQTZ2yHbBMVVNVdRcwFrggxzbXAqNUdUNQ68i6378zMFVV1wfvTQW6iEgDoLqqzgiCfQ3oFkUszsWPGjWsc/mSS2zU0ahRYUdUckSsmWrhQpv476OPrFP9nHPsedSFuUcjEezZY6PKRo60Z26/8IKNuKpWzcp+/PHWhPTRR4U/9saNNiCgf38YP97uho8T0SSEhsDKiNdpwbpIhwOHi8jXIjJTRLoUsG/D4Pf8julc/KtSxdqUV6zIeyhqaVKlit0JvmQJ3HWX3Rl+8cU2tcaAATbCKtHt3Jn93I0hQ2zUVeRUMgceCJ98YnfKd+9euMeuzpplyeRf/7K5sq67zvpm/vOf2JejCKJJCLlNqpPz5oUKWLNRB6A38KKI1Mhn32iOaScX6SciKSKSku7zuLh4Faub4RJF48ZWU1ixwi6OZ54Jzz9vN+60aWO1pWAwRULZvh0uuMA65p9+2r695zavWM2algiOPNK2//zz/I+rCiNG2BDePXvgyy/h1lvtbvimTW2q9Zz3g4QgmoSQBjSOeN0IWJ3LNpNUdbeqLsc6oJPy2Tct+D2/YwKgqqNVNVlVk+vWrRtFuM65faZ8eRvRNG6c3cMwfLjN+3TTTXaHd69e1qQ0Y4bdJ7F+fXjPrC7Ipk1WlqlT4aWXrMaTn9q1bdtDD7XmtC+/zH27jRuteenmm+34c+fao1QBqla1kWLLl9vzPkJW4J3KIlIBWAqcCawCZgGXqurCiG26AL1V9QoRqYN1MLfCvvXPBo4PNp0DtFHV9SIyC+gPfAt8BIxQ1Xwb5Aq6U7m0Kc1lc6Xcf/9rF7o33oCcw6vLlbOb9iKny6hd29btt58tVarYkvV75LoDDrBv1bVrx+6hQenp0KULfP+9PYzokkui3/e33+zBRytXWq3h5JOz30tJsWOtXGk3Ld56a+4xDxxoNZJp06y2FWMxu1NZVfeIyE3AFGzE0cuqulBEhmA915OD9zqJyCIgA7hdVdcFgTyIJRGAIaqaVY+8HngV2A8bZfRxYQronItjrVvb8vjj1s+QdSd0bj+XL7cL5/r1NjVHtGrUgKQkWw47LPv3pKTC3SW+apVNT758OUyaVPj5p+rXtyaj00+3pDJtGrRtm90hfdBBVnvIqhXk5uGH4eOPrSP7++9jMyFiEZSquYzCUrVqVbaWQPtfPJTNuX1K1eaD+v13m1Ijt59bttjF+8cfbVm2zG7Mi7yW1aplTTmVKlkn8a5d2UvO1zt2WK3jgw/sol5UaWm2/7p1cOqp8OGHNgHhmDFWmynIt99a7eLKK63JKoZ8LiPnXOIRgcqVbSmMnTshNXXvJPHTT9ZfUb26Ha9SpdyXypWtWadly+LF3qhRdk3hk0+sdnTbbdHfMX7CCTar7qOP2uilaGazjTFPCDGkqtxxxx18/PHHiAj33HMPPXv25JdffqFnz55s3ryZPXv28Nxzz3HyySfTt2/fP6bCvvrqqxk4cGDYRXAuMVWuDEcdZUuYmjSxoaXr1tkIpMK6/36rqVxzTcnMlluA0pUQwpr/OjBx4kTmzp3LvHnzWLt2LW3btqV9+/a89dZbdO7cmbvvvpuMjAy2b9/O3LlzWbVqFQsWLABg48aNsY3bOReOunVtKYrKlW2q9Xbt7Ma1N9+MbWwF8MntYuirr76id+/elC9fnvr163P66acza9Ys2rZtyyuvvML999/P/PnzqVatGs2bNyc1NZX+/fvzySefUL169bDDd87Fg9at7f6Ht96y+yH2odJVQ4jym3xJyauDvn379nz55Zd8+OGHXH755dx+++306dOHefPmMWXKFEaNGsX48eN5+eWX93HEzrm4NHgwTJ4Mf/sbnHYa1Ku3T07rNYQYat++PePGjSMjI4P09HS+/PJL2rVrx88//0y9evW49tpr6du3L3PmzGHt2rVkZmbSo0cPHnzwQebMmRN2+M65eFGxoo1O2rwZrr9+7xFUJah01RBCduGFFzJjxgyOO+44RITHH3+cgw46iDFjxvCPf/yDihUrUrVqVV577TVWrVrFVVddRWZw1+YjjzwScvTOubhy9NE2PcigQfD22/bcixLm9yHEsdJcNudcFDIyrMlo8WIbdXTwwUU6TLT3IXiTkXPOxavy5a3p6MQTbVK8EuZNRs45F8+Skmxai33AawjOOeeAUpIQEqkfJFqlsUzOufiW8AmhSpUqrFu3rlRdQFWVdevWUaVKlbBDcc6VIQnfh9CoUSPS0tIobU9Tq1KlCo0aNSp4Q+eci5GETwgVK1akWbNmYYfhnHMJL+GbjJxzzsWGJwTnnHOAJwTnnHOBhJq6QkTSgZ9zrK4DrA0hnJJQWspSWsoBXpZ4VVrKsq/K0URVC3xIQ0IlhNyISEo0c3QkgtJSltJSDvCyxKvSUpZ4K4c3GTnnnAM8ITjnnAuUhoQwOuwAYqi0lKW0lAO8LPGqtJQlrsqR8H0IzjnnYqM01BCcc87FQMImBBHpIiJLRGSZiNwZdjzFISIrRGS+iMwVkZSC94gfIvKyiKwRkQUR62qJyFQR+TH4WTPMGKOVR1nuF5FVwWczV0TOCTPGaIhIYxGZLiKLRWShiAwI1ifc55JPWRLxc6kiIt+JyLygLA8E65uJyLfB5zJORCqFFmMiNhmJSHlgKXA2kAbMAnqr6qJQAysiEVkBJKtqwo2rFpH2wFbgNVU9Jlj3OLBeVR8NknVNVR0UZpzRyKMs9wNbVfWJMGMrDBFpADRQ1TkiUg2YDXQDriTBPpd8ynIJife5CHCAqm4VkYrAV8AA4FZgoqqOFZHngXmq+lwYMSZqDaEdsExVU1V1FzAWuCDkmMokVf0SWJ9j9QXAmOD3MdgfcNzLoywJR1V/UdU5we9bgMVAQxLwc8mnLAlHzdbgZcVgUeAM4N1gfaifS6ImhIbAyojXaSTof5KAAp+KyGwR6Rd2MDFQX1V/AfuDBuqFHE9x3SQi3wdNSnHfzBJJRJoCrYFvSfDPJUdZIAE/FxEpLyJzgTXAVOAnYKOqZj0wOdRrWaImBMllXeK1fWU7RVWPB7oCNwZNFy4+PAccCrQCfgGeDDec6IlIVWACcIuqbg47nuLIpSwJ+bmoaoaqtgIaYS0dR+W22b6NKluiJoQ0oHHE60bA6pBiKTZVXR38XAO8h/1HSWS/BW2/WW3Aa0KOp8hU9bfgjzgT+CcJ8tkEbdQTgDdVdWKwOiE/l9zKkqifSxZV3Qj8GzgRqCEiWc+mCfValqgJYRaQFPTOVwJ6AZNDjqlIROSAoLMMETkA6AQsyH+vuDcZuCL4/QpgUoixFEvWBTRwIQnw2QSdly8Bi1X1qYi3Eu5zyassCfq51BWRGsHv+wFnYX0i04GLgs1C/VwScpQRQDDM7GmgPPCyqj4UckhFIiLNsVoB2BPs3kqksojI20AHbNbG34D7gPeB8cAhwP+Ai1U17jtr8yhLB6xZQoEVwHVZ7fDxSkROBf4DzAcyg9V3YW3vCfW55FOW3iTe59IS6zQuj30ZH6+qQ4JrwFigFvBf4K+qujOUGBM1ITjnnIutRG0ycs45F2OeEJxzzgGeEJxzzgU8ITjnnAM8ITjnnAt4QnCuBIlIBxH5IOw4nIuGJwTnnHOAJwTnABCRvwZz1c8VkReCSci2isiTIjJHRD4TkbrBtq1EZGYwsdp7WROrichhIjItmO9+jogcGhy+qoi8KyI/iMibwd23iMijIrIoOE7CTOPsSi9PCK7ME5GjgJ7YJIOtgAzgMuAAYE4w8eAX2J3LAK8Bg1S1JXYHbdb6N4FRqnoccDI26RrYDJ23AC2A5sApIlILm3Lh6OA4Q0u2lM4VzBOCc3Am0AaYFUxNfCZ24c4ExgXbvAGcKiIHAjVU9Ytg/RigfTAfVUNVfQ9AVXeo6vZgm+9UNS2YiG0u0BTYDOwAXhSR7kDWts6FxhOCczad+hhVbRUsR6jq/blsl988L7lNyZ4lcl6aDKBCMP99O2wWz27AJ4WM2bmY84TgHHwGXCQi9eCPZw83wf4+smahvBT4SlU3ARtE5LRg/eXAF8Ec/Wki0i04RmUR2T+vEwbz+x+oqh9hzUmtSqJgzhVGhYI3ca50U9VFInIP9tS6csBu4EZgG3C0iMwGNmH9DGBTFD8fXPBTgauC9ZcDL4jIkOAYF+dz2mrAJBGpgtUuBsa4WM4Vms926lweRGSrqlYNOw7n9hVvMnLOOQd4DcE551zAawjOOecATwjOOecCnhCcc84BnhCcc84FPCE455wDPCE455wL/D9qRMUdLYe36gAAAABJRU5ErkJggg==\n",
"text/plain": [
"<matplotlib.figure.Figure at 0x7fcb4879bf28>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"fill_data('Embarked') # id:62,830"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Train on 938 samples, validate on 105 samples\n",
"Epoch 1/300\n",
"938/938 [==============================] - 0s 253us/step - loss: 8.9094 - mean_absolute_error: 2.5688 - val_loss: 5.5439 - val_mean_absolute_error: 1.9672\n",
"\n",
"Epoch 00001: val_loss improved from inf to 5.54393, saving model to checkpoint_Fare.hdf5\n",
"Epoch 2/300\n",
"938/938 [==============================] - 0s 73us/step - loss: 4.6612 - mean_absolute_error: 1.8467 - val_loss: 3.4742 - val_mean_absolute_error: 1.4843\n",
"\n",
"Epoch 00002: val_loss improved from 5.54393 to 3.47424, saving model to checkpoint_Fare.hdf5\n",
"Epoch 3/300\n",
"938/938 [==============================] - 0s 72us/step - loss: 2.7566 - mean_absolute_error: 1.3448 - val_loss: 2.4206 - val_mean_absolute_error: 1.1232\n",
"\n",
"Epoch 00003: val_loss improved from 3.47424 to 2.42062, saving model to checkpoint_Fare.hdf5\n",
"Epoch 4/300\n",
"938/938 [==============================] - 0s 80us/step - loss: 1.7961 - mean_absolute_error: 0.9854 - val_loss: 1.8958 - val_mean_absolute_error: 0.8475\n",
"\n",
"Epoch 00004: val_loss improved from 2.42062 to 1.89583, saving model to checkpoint_Fare.hdf5\n",
"Epoch 5/300\n",
"938/938 [==============================] - 0s 91us/step - loss: 1.3640 - mean_absolute_error: 0.7712 - val_loss: 1.7291 - val_mean_absolute_error: 0.7299\n",
"\n",
"Epoch 00005: val_loss improved from 1.89583 to 1.72906, saving model to checkpoint_Fare.hdf5\n",
"Epoch 6/300\n",
"938/938 [==============================] - 0s 96us/step - loss: 1.2310 - mean_absolute_error: 0.6862 - val_loss: 1.6974 - val_mean_absolute_error: 0.7071\n",
"\n",
"Epoch 00006: val_loss improved from 1.72906 to 1.69739, saving model to checkpoint_Fare.hdf5\n",
"Epoch 7/300\n",
"938/938 [==============================] - 0s 90us/step - loss: 1.1849 - mean_absolute_error: 0.6521 - val_loss: 1.6919 - val_mean_absolute_error: 0.7020\n",
"\n",
"Epoch 00007: val_loss improved from 1.69739 to 1.69192, saving model to checkpoint_Fare.hdf5\n",
"Epoch 8/300\n",
"938/938 [==============================] - 0s 90us/step - loss: 1.1712 - mean_absolute_error: 0.6380 - val_loss: 1.6902 - val_mean_absolute_error: 0.6990\n",
"\n",
"Epoch 00008: val_loss improved from 1.69192 to 1.69016, saving model to checkpoint_Fare.hdf5\n",
"Epoch 9/300\n",
"938/938 [==============================] - 0s 72us/step - loss: 1.1354 - mean_absolute_error: 0.6196 - val_loss: 1.6893 - val_mean_absolute_error: 0.6972\n",
"\n",
"Epoch 00009: val_loss improved from 1.69016 to 1.68933, saving model to checkpoint_Fare.hdf5\n",
"Epoch 10/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 1.1181 - mean_absolute_error: 0.6091 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00010: val_loss improved from 1.68933 to 1.68909, saving model to checkpoint_Fare.hdf5\n",
"Epoch 11/300\n",
"938/938 [==============================] - 0s 70us/step - loss: 1.1126 - mean_absolute_error: 0.6037 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00011: val_loss did not improve from 1.68909\n",
"Epoch 12/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 1.1241 - mean_absolute_error: 0.6080 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00012: val_loss did not improve from 1.68909\n",
"Epoch 13/300\n",
"938/938 [==============================] - 0s 76us/step - loss: 1.1258 - mean_absolute_error: 0.6088 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00013: ReduceLROnPlateau reducing learning rate to 0.00020000000949949026.\n",
"\n",
"Epoch 00013: val_loss did not improve from 1.68909\n",
"Epoch 14/300\n",
"938/938 [==============================] - 0s 65us/step - loss: 1.1162 - mean_absolute_error: 0.6043 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00014: val_loss did not improve from 1.68909\n",
"Epoch 15/300\n",
"938/938 [==============================] - 0s 74us/step - loss: 1.1207 - mean_absolute_error: 0.6061 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00015: val_loss did not improve from 1.68909\n",
"Epoch 16/300\n",
"938/938 [==============================] - 0s 76us/step - loss: 1.1153 - mean_absolute_error: 0.6049 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00016: ReduceLROnPlateau reducing learning rate to 4.0000001899898055e-05.\n",
"\n",
"Epoch 00016: val_loss did not improve from 1.68909\n",
"Epoch 17/300\n",
"938/938 [==============================] - 0s 81us/step - loss: 1.1196 - mean_absolute_error: 0.6068 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00017: val_loss did not improve from 1.68909\n",
"Epoch 18/300\n",
"938/938 [==============================] - 0s 88us/step - loss: 1.1197 - mean_absolute_error: 0.6043 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00018: val_loss did not improve from 1.68909\n",
"Epoch 19/300\n",
"938/938 [==============================] - 0s 84us/step - loss: 1.1266 - mean_absolute_error: 0.6088 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00019: ReduceLROnPlateau reducing learning rate to 8.000000525498762e-06.\n",
"\n",
"Epoch 00019: val_loss did not improve from 1.68909\n",
"Epoch 20/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 1.1196 - mean_absolute_error: 0.6043 - val_loss: 1.6891 - val_mean_absolute_error: 0.6967\n",
"\n",
"Epoch 00020: val_loss did not improve from 1.68909\n",
"Epoch 00020: early stopping\n"
]
}
],
"source": [
"fill_data('Fare') # id:1044"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Train on 938 samples, validate on 105 samples\n",
"Epoch 1/300\n",
"938/938 [==============================] - 0s 341us/step - loss: 7.6284 - mean_absolute_error: 2.3602 - val_loss: 4.4499 - val_mean_absolute_error: 1.8288\n",
"\n",
"Epoch 00001: val_loss improved from inf to 4.44989, saving model to checkpoint_Age.hdf5\n",
"Epoch 2/300\n",
"938/938 [==============================] - 0s 82us/step - loss: 3.8868 - mean_absolute_error: 1.6588 - val_loss: 2.4728 - val_mean_absolute_error: 1.3419\n",
"\n",
"Epoch 00002: val_loss improved from 4.44989 to 2.47278, saving model to checkpoint_Age.hdf5\n",
"Epoch 3/300\n",
"938/938 [==============================] - 0s 81us/step - loss: 2.2828 - mean_absolute_error: 1.2684 - val_loss: 1.5693 - val_mean_absolute_error: 1.0532\n",
"\n",
"Epoch 00003: val_loss improved from 2.47278 to 1.56933, saving model to checkpoint_Age.hdf5\n",
"Epoch 4/300\n",
"938/938 [==============================] - 0s 77us/step - loss: 1.5540 - mean_absolute_error: 1.0170 - val_loss: 1.1551 - val_mean_absolute_error: 0.8729\n",
"\n",
"Epoch 00004: val_loss improved from 1.56933 to 1.15514, saving model to checkpoint_Age.hdf5\n",
"Epoch 5/300\n",
"938/938 [==============================] - 0s 80us/step - loss: 1.1987 - mean_absolute_error: 0.8832 - val_loss: 1.0211 - val_mean_absolute_error: 0.8047\n",
"\n",
"Epoch 00005: val_loss improved from 1.15514 to 1.02112, saving model to checkpoint_Age.hdf5\n",
"Epoch 6/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 1.0803 - mean_absolute_error: 0.8229 - val_loss: 0.9843 - val_mean_absolute_error: 0.7832\n",
"\n",
"Epoch 00006: val_loss improved from 1.02112 to 0.98428, saving model to checkpoint_Age.hdf5\n",
"Epoch 7/300\n",
"938/938 [==============================] - 0s 79us/step - loss: 1.0654 - mean_absolute_error: 0.8107 - val_loss: 0.9686 - val_mean_absolute_error: 0.7786\n",
"\n",
"Epoch 00007: val_loss improved from 0.98428 to 0.96857, saving model to checkpoint_Age.hdf5\n",
"Epoch 8/300\n",
"938/938 [==============================] - 0s 82us/step - loss: 1.0416 - mean_absolute_error: 0.8012 - val_loss: 0.9594 - val_mean_absolute_error: 0.7756\n",
"\n",
"Epoch 00008: val_loss improved from 0.96857 to 0.95938, saving model to checkpoint_Age.hdf5\n",
"Epoch 9/300\n",
"938/938 [==============================] - 0s 74us/step - loss: 1.0249 - mean_absolute_error: 0.7952 - val_loss: 0.9534 - val_mean_absolute_error: 0.7736\n",
"\n",
"Epoch 00009: val_loss improved from 0.95938 to 0.95341, saving model to checkpoint_Age.hdf5\n",
"Epoch 10/300\n",
"938/938 [==============================] - 0s 76us/step - loss: 1.0173 - mean_absolute_error: 0.7899 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00010: val_loss improved from 0.95341 to 0.95071, saving model to checkpoint_Age.hdf5\n",
"Epoch 11/300\n",
"938/938 [==============================] - 0s 80us/step - loss: 1.0108 - mean_absolute_error: 0.7877 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00011: val_loss did not improve from 0.95071\n",
"Epoch 12/300\n",
"938/938 [==============================] - 0s 80us/step - loss: 1.0022 - mean_absolute_error: 0.7844 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00012: val_loss did not improve from 0.95071\n",
"Epoch 13/300\n",
"938/938 [==============================] - 0s 75us/step - loss: 1.0092 - mean_absolute_error: 0.7845 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00013: ReduceLROnPlateau reducing learning rate to 0.00020000000949949026.\n",
"\n",
"Epoch 00013: val_loss did not improve from 0.95071\n",
"Epoch 14/300\n",
"938/938 [==============================] - 0s 73us/step - loss: 0.9999 - mean_absolute_error: 0.7827 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00014: val_loss did not improve from 0.95071\n",
"Epoch 15/300\n",
"938/938 [==============================] - 0s 68us/step - loss: 1.0042 - mean_absolute_error: 0.7825 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00015: val_loss did not improve from 0.95071\n",
"Epoch 16/300\n",
"938/938 [==============================] - 0s 70us/step - loss: 1.0074 - mean_absolute_error: 0.7871 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00016: ReduceLROnPlateau reducing learning rate to 4.0000001899898055e-05.\n",
"\n",
"Epoch 00016: val_loss did not improve from 0.95071\n",
"Epoch 17/300\n",
"938/938 [==============================] - 0s 76us/step - loss: 1.0043 - mean_absolute_error: 0.7832 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00017: val_loss did not improve from 0.95071\n",
"Epoch 18/300\n",
"938/938 [==============================] - 0s 73us/step - loss: 1.0017 - mean_absolute_error: 0.7830 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00018: val_loss did not improve from 0.95071\n",
"Epoch 19/300\n",
"938/938 [==============================] - 0s 72us/step - loss: 1.0063 - mean_absolute_error: 0.7838 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00019: ReduceLROnPlateau reducing learning rate to 8.000000525498762e-06.\n",
"\n",
"Epoch 00019: val_loss did not improve from 0.95071\n",
"Epoch 20/300\n",
"938/938 [==============================] - 0s 74us/step - loss: 1.0131 - mean_absolute_error: 0.7872 - val_loss: 0.9507 - val_mean_absolute_error: 0.7725\n",
"\n",
"Epoch 00020: val_loss did not improve from 0.95071\n",
"Epoch 00020: early stopping\n"
]
}
],
"source": [
"fill_data('Age') # id: 6,18,20,27,29,30"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Split the Data back to Train and Test "
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Pclass</th>\n",
" <th>Sex</th>\n",
" <th>Age</th>\n",
" <th>SibSp</th>\n",
" <th>Parch</th>\n",
" <th>Fare</th>\n",
" <th>Cabin</th>\n",
" <th>Embarked</th>\n",
" <th>Title</th>\n",
" </tr>\n",
" <tr>\n",
" <th>PassengerId</th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>3</td>\n",
" <td>0</td>\n",
" <td>-0.546789</td>\n",
" <td>0.481104</td>\n",
" <td>-0.444829</td>\n",
" <td>-0.503210</td>\n",
" <td>0</td>\n",
" <td>0.0</td>\n",
" <td>-0.541826</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0.563282</td>\n",
" <td>0.481104</td>\n",
" <td>-0.444829</td>\n",
" <td>0.733941</td>\n",
" <td>1</td>\n",
" <td>1.0</td>\n",
" <td>0.053183</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>3</td>\n",
" <td>1</td>\n",
" <td>-0.269271</td>\n",
" <td>-0.478904</td>\n",
" <td>-0.444829</td>\n",
" <td>-0.490169</td>\n",
" <td>0</td>\n",
" <td>0.0</td>\n",
" <td>0.648192</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0.355144</td>\n",
" <td>0.481104</td>\n",
" <td>-0.444829</td>\n",
" <td>0.382632</td>\n",
" <td>1</td>\n",
" <td>0.0</td>\n",
" <td>0.053183</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>3</td>\n",
" <td>0</td>\n",
" <td>0.355144</td>\n",
" <td>-0.478904</td>\n",
" <td>-0.444829</td>\n",
" <td>-0.487754</td>\n",
" <td>0</td>\n",
" <td>0.0</td>\n",
" <td>-0.541826</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Pclass Sex Age SibSp Parch Fare Cabin \\\n",
"PassengerId \n",
"1 3 0 -0.546789 0.481104 -0.444829 -0.503210 0 \n",
"2 1 1 0.563282 0.481104 -0.444829 0.733941 1 \n",
"3 3 1 -0.269271 -0.478904 -0.444829 -0.490169 0 \n",
"4 1 1 0.355144 0.481104 -0.444829 0.382632 1 \n",
"5 3 0 0.355144 -0.478904 -0.444829 -0.487754 0 \n",
"\n",
" Embarked Title \n",
"PassengerId \n",
"1 0.0 -0.541826 \n",
"2 1.0 0.053183 \n",
"3 0.0 0.648192 \n",
"4 0.0 0.053183 \n",
"5 0.0 -0.541826 "
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"#df0 = df0.drop(['Title'], axis=1)\n",
"\n",
"train0 = df0[0:891].copy()\n",
"test0 = df0[891:].copy()\n",
"\n",
"train0.head()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Model to estimate Survived on Test data for submission"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Train on 712 samples, validate on 179 samples\n",
"Epoch 1/300\n",
"712/712 [==============================] - 1s 915us/step - loss: 0.6920 - acc: 0.6236 - val_loss: 0.4706 - val_acc: 0.7989\n",
"\n",
"Epoch 00001: val_loss improved from inf to 0.47056, saving model to checkpoint_final.hdf5\n",
"Epoch 2/300\n",
"712/712 [==============================] - 0s 466us/step - loss: 0.5920 - acc: 0.7233 - val_loss: 0.4519 - val_acc: 0.8045\n",
"\n",
"Epoch 00002: val_loss improved from 0.47056 to 0.45195, saving model to checkpoint_final.hdf5\n",
"Epoch 3/300\n",
"712/712 [==============================] - 0s 579us/step - loss: 0.5272 - acc: 0.7598 - val_loss: 0.4405 - val_acc: 0.7933\n",
"\n",
"Epoch 00003: val_loss improved from 0.45195 to 0.44048, saving model to checkpoint_final.hdf5\n",
"Epoch 4/300\n",
"712/712 [==============================] - 0s 495us/step - loss: 0.5220 - acc: 0.7612 - val_loss: 0.4308 - val_acc: 0.7877\n",
"\n",
"Epoch 00004: val_loss improved from 0.44048 to 0.43079, saving model to checkpoint_final.hdf5\n",
"Epoch 5/300\n",
"712/712 [==============================] - 0s 446us/step - loss: 0.5136 - acc: 0.7640 - val_loss: 0.4141 - val_acc: 0.8101\n",
"\n",
"Epoch 00005: val_loss improved from 0.43079 to 0.41408, saving model to checkpoint_final.hdf5\n",
"Epoch 6/300\n",
"712/712 [==============================] - 0s 564us/step - loss: 0.5008 - acc: 0.7921 - val_loss: 0.4178 - val_acc: 0.8156\n",
"\n",
"Epoch 00006: val_loss did not improve from 0.41408\n",
"Epoch 7/300\n",
"712/712 [==============================] - 0s 457us/step - loss: 0.4654 - acc: 0.7935 - val_loss: 0.4178 - val_acc: 0.8045\n",
"\n",
"Epoch 00007: val_loss did not improve from 0.41408\n",
"Epoch 8/300\n",
"712/712 [==============================] - 0s 456us/step - loss: 0.4761 - acc: 0.7907 - val_loss: 0.4091 - val_acc: 0.8156\n",
"\n",
"Epoch 00008: val_loss improved from 0.41408 to 0.40911, saving model to checkpoint_final.hdf5\n",
"Epoch 9/300\n",
"712/712 [==============================] - 0s 510us/step - loss: 0.4743 - acc: 0.8048 - val_loss: 0.4105 - val_acc: 0.8101\n",
"\n",
"Epoch 00009: val_loss did not improve from 0.40911\n",
"Epoch 10/300\n",
"712/712 [==============================] - 0s 450us/step - loss: 0.4678 - acc: 0.7949 - val_loss: 0.3936 - val_acc: 0.8101\n",
"\n",
"Epoch 00010: val_loss improved from 0.40911 to 0.39362, saving model to checkpoint_final.hdf5\n",
"Epoch 11/300\n",
"712/712 [==============================] - 0s 470us/step - loss: 0.4531 - acc: 0.8132 - val_loss: 0.4029 - val_acc: 0.8212\n",
"\n",
"Epoch 00011: val_loss did not improve from 0.39362\n",
"Epoch 12/300\n",
"712/712 [==============================] - 0s 483us/step - loss: 0.4576 - acc: 0.8076 - val_loss: 0.3877 - val_acc: 0.8156\n",
"\n",
"Epoch 00012: val_loss improved from 0.39362 to 0.38769, saving model to checkpoint_final.hdf5\n",
"Epoch 13/300\n",
"712/712 [==============================] - 0s 468us/step - loss: 0.4707 - acc: 0.8034 - val_loss: 0.3819 - val_acc: 0.8212\n",
"\n",
"Epoch 00013: val_loss improved from 0.38769 to 0.38185, saving model to checkpoint_final.hdf5\n",
"Epoch 14/300\n",
"712/712 [==============================] - 0s 464us/step - loss: 0.4626 - acc: 0.8034 - val_loss: 0.3824 - val_acc: 0.8212\n",
"\n",
"Epoch 00014: val_loss did not improve from 0.38185\n",
"Epoch 15/300\n",
"712/712 [==============================] - 0s 465us/step - loss: 0.4725 - acc: 0.7935 - val_loss: 0.3826 - val_acc: 0.8212\n",
"\n",
"Epoch 00015: val_loss did not improve from 0.38185\n",
"Epoch 16/300\n",
"712/712 [==============================] - 0s 458us/step - loss: 0.4554 - acc: 0.8104 - val_loss: 0.3769 - val_acc: 0.8268\n",
"\n",
"Epoch 00016: val_loss improved from 0.38185 to 0.37686, saving model to checkpoint_final.hdf5\n",
"Epoch 17/300\n",
"712/712 [==============================] - 0s 461us/step - loss: 0.4565 - acc: 0.8104 - val_loss: 0.3707 - val_acc: 0.8380\n",
"\n",
"Epoch 00017: val_loss improved from 0.37686 to 0.37075, saving model to checkpoint_final.hdf5\n",
"Epoch 18/300\n",
"712/712 [==============================] - 0s 462us/step - loss: 0.4378 - acc: 0.8132 - val_loss: 0.3780 - val_acc: 0.8324\n",
"\n",
"Epoch 00018: val_loss did not improve from 0.37075\n",
"Epoch 19/300\n",
"712/712 [==============================] - 0s 457us/step - loss: 0.4452 - acc: 0.8076 - val_loss: 0.3650 - val_acc: 0.8380\n",
"\n",
"Epoch 00019: val_loss improved from 0.37075 to 0.36499, saving model to checkpoint_final.hdf5\n",
"Epoch 20/300\n",
"712/712 [==============================] - 0s 462us/step - loss: 0.4525 - acc: 0.8076 - val_loss: 0.3754 - val_acc: 0.8436\n",
"\n",
"Epoch 00020: val_loss did not improve from 0.36499\n",
"Epoch 21/300\n",
"712/712 [==============================] - 0s 461us/step - loss: 0.4346 - acc: 0.8174 - val_loss: 0.3658 - val_acc: 0.8380\n",
"\n",
"Epoch 00021: val_loss did not improve from 0.36499\n",
"Epoch 22/300\n",
"712/712 [==============================] - 0s 456us/step - loss: 0.4429 - acc: 0.7963 - val_loss: 0.3620 - val_acc: 0.8324\n",
"\n",
"Epoch 00022: val_loss improved from 0.36499 to 0.36197, saving model to checkpoint_final.hdf5\n",
"Epoch 23/300\n",
"712/712 [==============================] - 0s 451us/step - loss: 0.4420 - acc: 0.8048 - val_loss: 0.3654 - val_acc: 0.8380\n",
"\n",
"Epoch 00023: val_loss did not improve from 0.36197\n",
"Epoch 24/300\n",
"712/712 [==============================] - 0s 459us/step - loss: 0.4237 - acc: 0.8188 - val_loss: 0.3661 - val_acc: 0.8324\n",
"\n",
"Epoch 00024: val_loss did not improve from 0.36197\n",
"Epoch 25/300\n",
"712/712 [==============================] - 0s 461us/step - loss: 0.4378 - acc: 0.8174 - val_loss: 0.3621 - val_acc: 0.8380\n",
"\n",
"Epoch 00025: ReduceLROnPlateau reducing learning rate to 0.00020000000949949026.\n",
"\n",
"Epoch 00025: val_loss did not improve from 0.36197\n",
"Epoch 26/300\n",
"712/712 [==============================] - 0s 447us/step - loss: 0.4346 - acc: 0.8132 - val_loss: 0.3625 - val_acc: 0.8380\n",
"\n",
"Epoch 00026: val_loss did not improve from 0.36197\n",
"Epoch 27/300\n",
"712/712 [==============================] - 0s 474us/step - loss: 0.4328 - acc: 0.8188 - val_loss: 0.3624 - val_acc: 0.8380\n",
"\n",
"Epoch 00027: val_loss did not improve from 0.36197\n",
"Epoch 28/300\n",
"712/712 [==============================] - 0s 460us/step - loss: 0.4285 - acc: 0.8174 - val_loss: 0.3616 - val_acc: 0.8380\n",
"\n",
"Epoch 00028: val_loss improved from 0.36197 to 0.36160, saving model to checkpoint_final.hdf5\n",
"Epoch 29/300\n",
"712/712 [==============================] - 0s 462us/step - loss: 0.4219 - acc: 0.8202 - val_loss: 0.3602 - val_acc: 0.8380\n",
"\n",
"Epoch 00029: val_loss improved from 0.36160 to 0.36017, saving model to checkpoint_final.hdf5\n",
"Epoch 30/300\n",
"712/712 [==============================] - 0s 462us/step - loss: 0.4244 - acc: 0.8188 - val_loss: 0.3642 - val_acc: 0.8324\n",
"\n",
"Epoch 00030: val_loss did not improve from 0.36017\n",
"Epoch 31/300\n",
"712/712 [==============================] - 0s 459us/step - loss: 0.4344 - acc: 0.8034 - val_loss: 0.3621 - val_acc: 0.8380\n",
"\n",
"Epoch 00031: val_loss did not improve from 0.36017\n",
"Epoch 32/300\n",
"712/712 [==============================] - 0s 469us/step - loss: 0.4240 - acc: 0.8174 - val_loss: 0.3607 - val_acc: 0.8380\n",
"\n",
"Epoch 00032: ReduceLROnPlateau reducing learning rate to 4.0000001899898055e-05.\n",
"\n",
"Epoch 00032: val_loss did not improve from 0.36017\n",
"Epoch 33/300\n",
"712/712 [==============================] - 0s 459us/step - loss: 0.4373 - acc: 0.8104 - val_loss: 0.3609 - val_acc: 0.8380\n",
"\n",
"Epoch 00033: val_loss did not improve from 0.36017\n",
"Epoch 34/300\n",
"712/712 [==============================] - 0s 485us/step - loss: 0.4190 - acc: 0.8188 - val_loss: 0.3609 - val_acc: 0.8380\n",
"\n",
"Epoch 00034: val_loss did not improve from 0.36017\n",
"Epoch 35/300\n",
"712/712 [==============================] - 0s 456us/step - loss: 0.4233 - acc: 0.8230 - val_loss: 0.3610 - val_acc: 0.8380\n",
"\n",
"Epoch 00035: ReduceLROnPlateau reducing learning rate to 8.000000525498762e-06.\n",
"\n",
"Epoch 00035: val_loss did not improve from 0.36017\n",
"Epoch 36/300\n",
"712/712 [==============================] - 0s 462us/step - loss: 0.4290 - acc: 0.8244 - val_loss: 0.3610 - val_acc: 0.8380\n",
"\n",
"Epoch 00036: val_loss did not improve from 0.36017\n",
"Epoch 37/300\n",
"712/712 [==============================] - 0s 465us/step - loss: 0.4351 - acc: 0.8146 - val_loss: 0.3610 - val_acc: 0.8380\n",
"\n",
"Epoch 00037: val_loss did not improve from 0.36017\n",
"Epoch 38/300\n",
"712/712 [==============================] - 0s 472us/step - loss: 0.4340 - acc: 0.8174 - val_loss: 0.3611 - val_acc: 0.8380\n",
"\n",
"Epoch 00038: ReduceLROnPlateau reducing learning rate to 1.6000001778593287e-06.\n",
"\n",
"Epoch 00038: val_loss did not improve from 0.36017\n",
"Epoch 39/300\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"712/712 [==============================] - 0s 448us/step - loss: 0.4410 - acc: 0.8076 - val_loss: 0.3611 - val_acc: 0.8380\n",
"\n",
"Epoch 00039: val_loss did not improve from 0.36017\n",
"Epoch 00039: early stopping\n"
]
}
],
"source": [
"df0_cols = len(df0.columns)\n",
"\n",
"model = Sequential()\n",
"model.add(Dense(64, activation='relu', input_shape=(df0_cols,), kernel_initializer=initializer))\n",
"model.add(Dropout(0.5, seed=random_n))\n",
"\n",
"model.add(Dense(2, activation='softmax', kernel_initializer=initializer))\n",
"model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['acc'])\n",
"\n",
"reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.000001,verbose=1)\n",
"checkpointer = ModelCheckpoint(filepath='checkpoint_final.hdf5', verbose=1, save_best_only=True)\n",
"early_stopping = EarlyStopping(patience=10, verbose=1)\n",
"\n",
"epochs = 300\n",
"hist = model.fit(train0, train.Survived, \n",
" epochs=epochs, \n",
" batch_size=5, \n",
" verbose=1,\n",
" validation_split=0.2,\n",
" callbacks=[reduce_lr, early_stopping, checkpointer])\n",
"\n",
"model.load_weights('checkpoint_final.hdf5')\n",
"pred = model.predict(test0)"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {
"scrolled": false
},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEKCAYAAAD+XoUoAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzt3Xl8FeX1+PHPSdhkEYgEFxYBBSUgqATcKoJWBWtBrVrXorai/sS1bri24PbVAi5YkSotLoiKBaliKSqLG0oQUAggCCiLSGTfhCzn98eZkEvIckNuMje55/16zSu5c5+ZOTOBc5/7zDPPI6qKc865xJAUdgDOOecqjyd955xLIJ70nXMugXjSd865BOJJ3znnEognfeecSyCe9J1zLoF40nfOuQTiSd855xJIjbADKKxJkybaqlWrsMNwzrkqZfbs2T+rampp5eIu6bdq1YqMjIyww3DOuSpFRL6Pppw37zjnXALxpO+ccwnEk75zziUQT/rOOZdAPOk751wC8aTvnHMJxJO+c84lkLjrp++cc2Hbvh3WrYOsLPuZv6SlwW9/CyJhR7j/POk75xJWTg7MmQMffQRTp8LixZbcd+wofpszzoBnn4X27SsvzljypO+cK5ctWyAzE+bPh+xsaNp076VRo/ipGeflwTffWIL/6COYPt3iB+jQAU45BQ4+eO/4U1Pt50EHwSuvwH33QadOcNtt8MAD0KBBuOdUVqKqYcewl/T0dPVhGFxVlJMDyckVk+B27oQ774SlSwuSUFFLs2ZQo4xVuZ074b33YNw42Lhx32QX+XrjRkvu8+fDggX284cfSt5/jRoF+7r4Yhg4cP+uUXY2JCXZNS6rFSvgkUdg/HhYv97WHXkknH66LT16WLKPRlYW3HMPjBoFhx0GQ4bA739f+jnt3m3XIqmC7qSKyGxVTS+1nCd958onNxdeegnuvRd+/Wt4/fXYJv6ffoI+fWDWLDj+eEtaP/1kybqwAw+E7t0LktkxxxSdZHbtgv/9D8aOhYkTYds2S3qHH17Qjr19e/Ex1aoFRx8NHTtaDTn/Z926e7eBR7aJf/stfPwx3HADDB9etuT37bfWlr5lC1x/PVx3HRxySOnbrVplyf6ll+x4F19sf6OePaFFi+iPX5SZM+HGG+Grr2x/zz5r1yAnB777bu8Pxvnz7RwOOwyGDYMLLoh95SDapI+qxtXSpUsXdeHIyws7gopVEef3xReq6emqoHrkkfbziSdit//581UPP1y1bl3V8eP3fm/bNtVly1RnzlSdOFF15EjV665TbdvW4gDVgw5S/d3vVJ97TnXBAtXJk1Wvvlq1YUN7PyVFtX9/1Q8/VM3J2Xf/y5fbOf7nP6ovvaT65puqCxeqZmeX/Vzy8lTvusuOe/nlqrt3R7fd9OkWZ5MmqmefbdvXrGn7mDmz6G1+/FH1lltUa9e2sjfcoLpqVdljLk1Ojurzz6s2bqxao4bqMcfYMfOvv4jqEUeo9u2rOnCgaqdOtv7MM1UXLYptLECGRpFjQ0/yhRdP+uFYskT1sMNUO3ZUffhhe11V5eaqfved6jvvqD76qOpll9l/tlq1VI86SvXBB1UzM8t3jKws1Wuvtf/Uhxyi+uqrdtyLLlJNSlL96KPyn8fkyaoHHqh66KGqGRll2/aHH1RHj1a96irVli0LkhDYPvv1U33//egTb6zk5dnfBCwR7txZcvmXX7akffTRqkuX2rrFi1Vvvlm1QQPbT7duqq+8ovrLL/Z3ufNO1QMOUE1OVv3jH+2Dq6JlZakOGKDaq5fqHXeo/utf9jfbvn3vctnZqs88Y3+DmjVV77nHPlxjwZO+i9ratapt2lit8JRTCpJDly6qTz6p+v33YUdo/1keekj14ouLXy68ULVrV6sVRya5li1VzzlH9bbbVHv2tEQN9kHwyCMFySQa+TW7lBRLKrffrrp5c8H7W7ZYgkpNVV25cv/Pd8QI23+nTpbAyyMvz87xpZdUJ0woPdFWhuHD7W9w+umqW7fu+35enuoDDxSU2bBh3zJbtqg++6x9kINq06aq9evb3/eKK+K74rJ2rX3wgmrz5vYNqrzfRKNN+t6mn+C2brWbWIsWWW+GE06AlSvhzTfhjTesHRng5JPhkkvg3HOhdevKjTE3F66+2npOtG1b8o28Zs0K2pg7drR+1Q0b7l3mxx/tpuXYsfDZZ7YuPd1uxqWlFb/vHTvg8cdh9my7ZsOH27EKW7gQunWz40+fbu3fZTnXu++2m4PnnGMxVrXeIdF65RX7u6anw6RJkJJi63/5Ba65xu6NXHMNPP98ydcwLw+mTIEXXrB7CvfeW/LfMZ589pndF5g71+41PPus3SvZHzFt0wd6AYuBpcA9RbzfEpgKzAG+Bs6JeG9gsN1i4OzSjuU1/cqza5e1LSYnq773XtFlli612nB+WySotmpl7cKvvFIx7aSRcnPtKzqoDh4c+/1//719m+nSZe9vB8Uthx6q+vrrpdfK3nrLyt94Y/SxbNumet55tt2AAfvXbl7V/Pvf1ux2zDHWDr9uXcG3zcceq/73mVTt2+Pw4aqNGql26LD/50ysavoikgx8C5wJrAJmAZeqamZEmZHAHFV9XkTSgEmq2ir4/XWgG3AY8AHQTlVzizue1/T3z8KFcOWV0LUrPPyw9SkuSV6elR8zBv75T7jqqtKPsXix1aimTrVl40Zb366d9RTp2dN6JxRHBI49FurVi+6cVK0W9PzzcP/9MHhwdNvtrxUrrJdJSTp0iD7+O+6wGvsrr8AVVxRfTtV60Nx7r33jGjYMbr456rCrvClT4Lzz7N9OXh6sWQMvvwwXXRR2ZJVr3To792OP3b/tY1bTB04CJke8HggMLFTmBeDuiPKfFVUWmAycVNLxvKZfdh98YL0xGje2WvtBB1lPjtzc4rf5858LalP7IzdX9auvVIcMUf3NbwpuqpW2NGumOnZs6bWZvDzrfQHW46Mq1viys1VPO81uKs6bt+/7eXmqkyYV9P5p29ZuriaiTz+1f8NNm6p+/nnY0VRNxOpGLnAh8GLE6yuB4YXKHAp8g30T2Ah0CdYPB66IKPcScGFJx/OkXzYvvmhdxTp0UF2xQvXrr1W7d7e/bNeuql9+ue82f/ubvX/TTbFLptnZqrNmqU6ZUvzy9tuqxx2ne27OLVhQ9L7y8qwHBljir4oJP9/atdYr6ogjVDduLFj/4YeqJ59c0Fw2alRiNOeUZPVq6wXj9k8sk/5FRST9ZwuVuR34sxbU9DOxETyfKyLp/66IY/QHMoCMli1bVsoFqupyc1Xvvtv+gmefrbppU8F7eXmqr71m7c8i1rUw/z/Tq6/aNhddtG+/7MqQk6P6979b+2WNGvaNY8uWvcvcf7/FeMMNVTvh5/v0UzvX3/5WdcYM1R49Cr71jBhh91acK69YJv1omncWAC0iXi8DmnrzTsXYvt0euAHV668vvoa4ebN1KUxOti6Gd91lyadHj/C77a1bV3CD9tBDVceMsQQ/aJCt+9OfSm6eqmqeeaagievgg1Wffjr8v4GrXmKZ9GsESbw1UAuYB3QoVOZ94Krg9/bAGkCADkH52sH2y4Dkko6XKEk/J0f1p59Uv/nGHuQZO9bu4L/9tj2pV1wi//FHexhFRHXo0OhqwvPnF9QuO3Xa+1tB2GbOLOg5k5ZmP//wh+qV8FXt7zR4sDWtFX5gx7lYiDbpR9VPX0TOAZ4CkoFRqvqIiAwKDjIx6KXzD6A+oMBdqvq/YNv7gGuAHOBWVX2/pGNV1947u3dD//6QkWF36X/+2ep9xald24ZujRzbpEED6NfPxjMZMwb69o3++KrW46Zz59J79lS23Fx48UXrvXLOOfCvf+3foFrOJTIfcC3ODBliXfh+8xsb6KmoUQxTUmD16n1HMVy5smA/hx4K//kPdOkS3rlUlNxcT/bO7a9ok76Pp18J1q6Fv/7VEv6775Zc9pBD9k3omzfbB8DSpXDmmZb4qyNP+M5VPE/6lWDgQHu0fNiw/du+YUMbBuHkk2Mbl3Mu8fjE6BXsiy+sjfq222zcGOecC5Mn/QqUl2eP0x9yiA0j4JxzYfPmnQr08svw5Zf2s7qOlOicq1q8pl9BNm+2eTRPOgkuvzzsaJxzznhNv4IMHmz98d99t+ImQnbOubLydFQBFi2Cp5+2CSDSSx/o1DnnKo0n/RhThVtvtRl8Hn007Gicc25v3rwTY+++C5Mnw9Ch9pStc87FE6/px9Avv1gtv317GDAg7Gicc25fXtOPoWHDYNky+N//oGbNsKNxzrl9edLfD7/8YvPF5g+Mlj842vLlNtfnmWeGHaFzzhXNk36E3FzYsMG6WuYvWVkFv69daz1zliyxp23BavRHHQUnnAB/+hNcf3245+CccyXxpB9Yvx66drXaemFJSdCkid2YTUuDiy+28e07drTxdLwpxzlXVXjSD4wYYQn/scegdet9x7n3YX+dc9VBVElfRHoBT2MzZ72oqo8Xen8Y0DN4WRdoqqqNgvdygW+C935Q1T6xCDyWdu2C4cPh7LNt6ATnnKuuSk36IpIMPAecCawCZonIRFXNzC+jqrdFlL8JOC5iFztV9djYhRx7Y8ZYe/3LL4cdiXPOVaxo+ul3A5aq6jJV3Q2MBUqanfVS4PVYBFcZVG0qw06d4Ne/Djsa55yrWNEk/WZAxCytrArW7UNEDgdaAx9FrK4jIhkiMlNEztvvSCvI5MnW3fL220Ek7Gicc65iRdOmX1QqLG429UuAcaqaG7GupaquEZE2wEci8o2qfrfXAUT6A/0BWrZsGUVIsTNkiM05e+mllXpY55wLRTQ1/VVAi4jXzYE1xZS9hEJNO6q6Jvi5DJjG3u39+WVGqmq6qqanpqZGEVJszJsHH3xgs1vVqlVph3XOudBEk/RnAW1FpLWI1MIS+8TChUTkKKAx8HnEusYiUjv4vQlwCpBZeNuwDB0K9erBddeFHYlzzlWOUpt3VDVHRAYAk7Eum6NUdYGIDAIyVDX/A+BSYKyqRjb9tAdeEJE87APm8cheP2FaswZef92eoG3cOOxonHOuckTVT19VJwGTCq17sNDrvxSx3WfAMeWIr8I8+6wNu3DrrWFH4pxzlSchh1bets2ewD3/fGjTJuxonHOu8iRk0h81CjZtgj//OexInHOuciVc0s/NhaeegpNPhpNOCjsa55yrXAmX9MePt4HVvJbvnEtECZX0VeFvf4MjjoC+JQ0k4Zxz1VRCDa382WfwxRfWc8eHSnbOJaKEqukPGWJ98q++OuxInHMuHAmT9NevhwkToH9/ewrXOecSUcIk/QULrE2/R4+wI3HOufAkTNLPDAZ/6NAh3Diccy5MCZX069eH5s3DjsQ558KTUEk/Lc0nSnHOJbaESfoLFljSd865RJYQSX/DBpv43JO+cy7RJUTSX7jQfvpNXOdcoosq6YtILxFZLCJLReSeIt4fJiJzg+VbEdkU8V4/EVkSLP1iGXy08nvueE3fOZfoSh2GQUSSgeeAM7H5cmeJyMTIGbBU9baI8jcRzIMrIinAQ0A6Npn67GDbjTE9i1JkZkLdulDJc64751zciaam3w1YqqrLVHU3MBYoabiySymYHP1sYIqqbggS/RSgV3kC3h8LFkD79pCUEI1ZzjlXvGjSYDNgZcTrVcG6fYjI4UBr4KOybluR8rtrOudcoosm6RfVs12LWAdwCTBOVXPLsq2I9BeRDBHJyMrKiiKk6G3eDKtXe9J3zjmILumvAlpEvG4OrCmm7CUUNO1Eva2qjlTVdFVNT01NjSKk6HnPHeecKxBN0p8FtBWR1iJSC0vsEwsXEpGjgMbA5xGrJwNniUhjEWkMnBWsqxiqNh9iBO+545xzBUpN+qqaAwzAkvVC4E1VXSAig0SkT0TRS4GxqqoR224ABmMfHLOAQcG62Fu1ClJS4JVX9lqdmQl16kCrVhVyVOecq1KimjlLVScBkwqte7DQ678Us+0oYNR+xhe9Qw6BnTsLqvaBBQvg6KN9piznnIPq9ERujRpw1FH7JH3vueOccwWqT9IHy+4RSX/rVvjhB7+J65xz+apf0l+xArZvB2DRooLVzjnnqmPSV4XFiwHvueOcc4VVv6QPe7L9ggVQqxa0aRNiTM45F0eqV9I/8ki7oRsk/cxMu7dbI6o+Ss45V/1Vr6Rfsya0a7dX0vemHeecK1C9kj5Yll+wgO3b7Z6u99xxzrkC1TPpL1vGt/N2ouo1feeci1Q9k35eHqunfrvnpXPOOVM9kz6w7ctMatSwe7vOOedM9Uv67dpBUhKyKJN27ezernPOOVP9kn7t2nDkkTRanek3cZ1zrpDql/SB3KPSaLk909vznXOukGqZ9H9umkZbltCx3e6wQ3HOubhSLZP+0todqEEunesuCTsU55yLK1ElfRHpJSKLRWSpiNxTTJmLRSRTRBaIyJiI9bkiMjdY9plmsSLM2WXtOq12ZJZS0jnnEkupo9KISDLwHHAmNtH5LBGZqKqZEWXaAgOBU1R1o4g0jdjFTlU9NsZxl+iTrKP4fwg1l3jSd865SNHU9LsBS1V1maruBsYCfQuVuRZ4TlU3AqjqutiGWTZzFx/AT/Xa7DOLlnPOJbpokn4zYGXE61XBukjtgHYi8qmIzBSRXhHv1RGRjGD9eUUdQET6B2UysrKyynQChe3aBUuXwubD0jzpO+dcIdEkfSlinRZ6XQNoC/QALgVeFJFGwXstVTUduAx4SkSO2GdnqiNVNV1V01NTU6MOvijffgu5uZB3dJpNppKTU679OedcdRJN0l8FtIh43RxYU0SZd1Q1W1WXA4uxDwFUdU3wcxkwDTiunDGXKL9yX79bGmRnw3ffVeThnHOuSokm6c8C2opIaxGpBVwCFO6FMwHoCSAiTbDmnmUi0lhEakesPwWo0DaXzExISoKDe+49i5Zzzrkokr6q5gADgMnAQuBNVV0gIoNEpE9QbDKwXkQyganAnaq6HmgPZIjIvGD945G9fipCZiYccQTU7nx0wQrnnHNAFF02AVR1EjCp0LoHI35X4PZgiSzzGXBM+cOM3p7ZsurXh8MPt4lynXPOAdXsidzsbLuRu2fMnTTvweOcc5GqVdJfssQ66+yV9Bctsu48zjnnqlfSz6/U75X0d+2C5ctDi8k55+JJtUv6InB0cA93T/b3Jh7nnAOqYdJv3Rrq1g1WtG9f8IZzzrnql/T3mjilYUNo1syTvnPOBapN0s/JsVEX9pkty3vwOOfcHtUm6a9dC02aFJP0Fy6EvLxQ4nLOuXgS1cNZVUHz5rB6NWjhoeDS0mDHDvjhB2jVKozQnHMublSbmn4+KTwmqPfgcc65Papd0t+H9+Bxzrk9qn/SP+ggOPhgT/rOOUciJH2ADh086TvnHImS9PO7be5zl9c55xJL4iT9rVute49zziWwqJK+iPQSkcUislRE7immzMUikikiC0RkTMT6fiKyJFj6xSrwMvEePM45B0SR9EUkGXgO6A2kAZeKSFqhMm2BgcApqtoBuDVYnwI8BJwAdAMeEpHGMT2DaHjSd845ILqafjdgqaouU9XdwFigb6Ey1wLPqepGAFVdF6w/G5iiqhuC96YAvWITehmkptrjuj6LlnMuwUWT9JsBKyNerwrWRWoHtBORT0Vkpoj0KsO2lcPH4HHOuaiSfuFnXAEKd4OpAbQFegCXAi+KSKMot0VE+otIhohkZGVlRRHSfvAePM45F1XSXwW0iHjdHFhTRJl3VDVbVZcDi7EPgWi2RVVHqmq6qqanpqaWJf7opaXBpk02MptzziWoaJL+LKCtiLQWkVrAJcDEQmUmAD0BRKQJ1tyzDJgMnCUijYMbuGcF6yqf38x1zrnSk76q5gADsGS9EHhTVReIyCAR6RMUmwysF5FMYCpwp6quV9UNwGDsg2MWMChYV/k6dLCfX38dyuGdcy4eiMZZG3d6erpmZGRUzM7btIFOnWDChIrZv3POhUREZqtqemnlEuOJ3Hw9e8L06ZCbG3YkzjkXisRL+ps2wbx5YUfinHOhSLykDzB1arhxOOdcSBIr6TdrBm3betJ3ziWsxEr6YLX9jz+GnJywI3HOuUqXmEl/yxaYMyfsSJxzrtIlXtLv0cN+ehOPcy4BJV7SP+QQmyzdk75zLgElXtKHgnb97OywI3HOuUqVmEm/Rw/Yvh0q6slf55yLU4mb9MGbeJxzCScxk35qKnTs6EnfOZdwEjPpg7Xrf/op7NoVdiTOOVdpEjvp79wJX34ZdiTOOVdpEjfpn3YaiMC0aWFH4pxzlSaqpC8ivURksYgsFZF7inj/KhHJEpG5wfKniPdyI9YXnnErPCkp0Lmzt+s75xJKjdIKiEgy8BxwJjbn7SwRmaiqhecdfENVBxSxi52qemz5Q60APXvC3/8Ov/wCdeqEHY1zzlW4aGr63YClqrpMVXcDY4G+FRtWJenZ027kzpwZdiTOOVcpokn6zYCVEa9XBesK+52IfC0i40SkRcT6OiKSISIzReS88gQbc927Q1KSN/E45xJGNElfilhXeGLd/wCtVLUT8AEwOuK9lsG8jZcBT4nIEfscQKR/8MGQkZWVFWXoMdCwIRx/vCd951zCiCbprwIia+7NgTWRBVR1varmd3j/B9Al4r01wc9lwDTguMIHUNWRqpququmpqallOoFy69nTmnd27Kjc4zrnXAiiSfqzgLYi0lpEagGXAHv1whGRQyNe9gEWBusbi0jt4PcmwClA4RvA4erRwwZe++yzsCNxzrkKV2rSV9UcYAAwGUvmb6rqAhEZJCJ9gmI3i8gCEZkH3AxcFaxvD2QE66cCjxfR6ydcp54KycnexOOcSwiiWrh5Plzp6emaUdmjX554oiX+Tz+t3OM651yMiMjs4P5piRL3idxIPXvacAzbtoUdiXPOVShP+mBJPyfHa/rOuWrPkz7AKadAzZreru+cq/Y86QPUqwfdunnSd85Ve5708/XsCbNnw5YtYUfinHMVxpN+vp49ITcXnn4aNmwIOxrnnKsQnvTznXyyDbX84IPQtCn8+tfw3HOwenXYkTnnXMx40s9Xpw589RV88QXceSesWgUDBkDz5nDCCfDYY7BoUdhROudcuXjSj5SUZDd08xP8woXw6KOgCvfeC+3bw+mne9dO51yV5Um/JEcfDQMH2oNbK1fCk0/CggXwq19Br14+v65zrsrxpB+t5s3hjjtg2TJ44gnIyLBmnz59YM6csKNzzrmoeNIvq3r1rM1/+XJ4+GH4+GMbk/93v4P588OOzjnnSuRJf381aAD33WfJ/8EHYcoU6NTJvg3k5IQdnXPOFcmTfnk1agR//SusWAHXXQdDhlh3z59+Cjsy55zbhyf9WElJgeefh5dfthu8xx8Pn38edlTOObcXT/qxduWVluzr1IHTTrMHvOJszgLnXOKKKumLSC8RWSwiS0XkniLev0pEskRkbrD8KeK9fiKyJFj6xTL4uNW5s/XuOesse8DrD3/wOXidc3Gh1KQvIsnAc0BvIA24VETSiij6hqoeGywvBtumAA8BJwDdgIdEpHHMoo9njRvDxIkwaBC89hqcdBJ8913YUTnnElw0Nf1uwFJVXaaqu4GxQN8o9382MEVVN6jqRmAK0Gv/Qq2CkpLggQdg0iR7uKtLFxvmwTnnQhJN0m8GrIx4vSpYV9jvRORrERknIi3Ksq2I9BeRDBHJyMrKijL0KqRXLxu2uV49uPvusKNxziWwaJK+FLGu8J3J/wCtVLUT8AEwugzboqojVTVdVdNTU1OjCKkKat3a+vBPn+61fedcaKJJ+quAFhGvmwNrIguo6npV3RW8/AfQJdptE8q111pb///9X9iROOcSVDRJfxbQVkRai0gt4BJgYmQBETk04mUfYGHw+2TgLBFpHNzAPStYl5jq14cbb4QJE2Dx4rCjcc4loFKTvqrmAAOwZL0QeFNVF4jIIBHpExS7WUQWiMg84GbgqmDbDcBg7INjFjAoWJe4broJate2ETudc66SicbZg0Pp6emakZERdhgV68Yb4cUXbdyeww4LOxrnXDUgIrNVNb20cv5Ebhj+/GcblO3pp8OOxDmXYDzph6FNG7j4YhgxAjZvDjsa51wC8aQflrvugi1bLPE751wl8aQfluOOs7F5nnoKfvkl7GiccwnCk36Y7r4b1q6FV14JOxLnXILw3jthUoWuXa2ZZ+FCSE4uvmxWFjz+uH0raNbMlsMOK/j9wANBinoA2jmXCKLtvVOjMoIpr+zsbFatWsUv1bAZpM7TT9P8t7+l5oQJNs9uYarw+utw88324VC/PmzcuG+5evVs8vYzz4T+/eGYYyo+eOdclVMlavrLly+nQYMGHHTQQUg1qs2qKut//pmtEyfS+oUXbEyeyPNbuRJuuAHeew9OOAFeegk6dLCx+desgdWrC36uXm1DN0+eDLt321DO/ftbL6G6dSvrhPb/28aMGTBwIPzmN3DvvbGNy7kEEG1NH1WNq6VLly5aWGZmpubl5e2zvjrIy8vTzC++UAXVjz6ylbm5qiNGqDZooFq3ruqwYao5OdHtMCtLdehQ1aOPtn02bKh6442q8+ZV3Emoqn78sWqzZqqnnqo6ZYpqtH+vNWtUL7/cYq1Xz34+9ljFxupcNQRkaBQ5tsrcyK1ONfxIImJNNgcfbAOxLVkCp58O118P3brBN9/ArbeW3N4fqUkTuO02yMy02vNvf2tP/3buDCeeCJ98EtsTULUpIXv2tOElli2zJqaTT4b33y9+qsjsbBg2DI46CsaNs3kH1q6Fyy6zGr8/uOZcxYjmk6Eyl+Jq+tVZZmam6qOPWi23Th2rnb/4YvS15dKsX6/61FOqrVqp1qql+tZbsdnvzp2qV19tcZ97rurGjaq//KL6/POqLVva+vR01Xfe2ftcpk1T7dDB3u/dW3XJkoL3srNVL7jA3nvhhdjE6VwCIMqafuhJvvCSsEl/40bVFi1UzztPdfXqijnQ+vWqJ5+sKqL6zDPl29cPP1hCB9UHH7QmqUi7dtkHV5s2VqZzZ9VXX1W97DJ7ffjhqhMmFP3BtmuX6jnnWJyjR5cvTucShCf9CtCBueYcAAAUoUlEQVS3b189/vjjNS0tTV8IaqHvv/++HnfccdqpUyc9/fTTVVV169atetVVV2nHjh31mGOO0XHjxpW43z3nVxn3LXbssA8WUL377n2TdTSmTVNNTbV7DhMmlFw2O9sSd9u2dszatVUfeEB1+/aSt9u5U/WMM1STklTfeKPsMTqXYKJN+lWiy2akW2+FuXNju89jj7UHY0szatQoUlJS2LlzJ127dqVv375ce+21zJgxg9atW7Nhg40aPXjwYBo2bMg333wDwMaiulgWpTLuWxxwgLWh33ST3UNYvdp6BdWqVfq2qvDss3D77XDkkTYvwNFHl7xNjRrwhz/A5ZfDBx9A27Y29lBp6tSBd96B3r1t29q1oW+0UzM754pT5ZJ+mJ555hnGjx8PwMqVKxk5ciTdu3endevWAKSkpADwwQcfMHbs2D3bNW7cuPKDLUlyst18bdYM7r8ffvoJ3n4bGjQouvzGjfDRR/DaazB+PPTpY08RH3hg2Y559tlli7NePXj3XRuu4uKLYeLE4veRnQ3bttnMZM65YkWV9EWkF/A0kAy8qKqPF1PuQuAtoKuqZohIK2zilfxpomaq6vXlCTiaGnlFmDZtGh988AGff/45devWpUePHnTu3JnFRcyAparx39tIBO67zxL/n/4Ep50GkybBIYdYP/+ZM2HKFFtmzYK8PPtQGDzY+tEnVVLHrwMPtF5Ap58O551nD7Bt2WIfRJs22bJxI2zfbuUvvhjGjIm+t5NzCabUpC8iycBzwJnYnLezRGSiqmYWKtcAmzWr8Kzf36nqsTGKNzSbN2+mcePG1K1bl0WLFjFz5kx27drF9OnTWb58+Z7mnZSUFM466yyGDx/OU8En1MaNG+Ovtp/vqqusu+hFF9kDXR07wrRpVmtOSrKHwu6/32rb3bpBzZqVH2Pjxvbhc9FF8Omn0KiRrWvbtuD3Ro1g3ToYPtzO5+mnY99clpVlH0K1a8d2v85Vomhq+t2Apaq6DEBExgJ9gcxC5QYDTwB3xDTCONGrVy9GjBhBp06dOOqoozjxxBNJTU1l5MiRXHDBBeTl5dG0aVOmTJnC/fffz4033kjHjh1JTk7moYce4oILLgj7FIrXuzdMnWpJddEiuPJK62vfs6cl03jQpInFWJpatWDoUDj8cJusJhZmzbJ9vvWWfQhOmVJ5Tzk7F2PRJP1mwMqI16uAEyILiMhxQAtVfVdECif91iIyB9gC3K+qH5cn4LDUrl2b999/v8j3evfuvdfr+vXrM3r06MoIK3a6drXpG+O9Wao0Tz5pw1fccYeNRfT73+/ffnJz4T//sWT/8cdWw7/sMnj1VWtCGj8+nG89zpVTNA2zRWWBPY9ZikgSMAwoqlr1I9BSVY8DbgfGiMg+d/9EpL+IZIhIRlZWVnSRu9ir6gkfrEnq5ZfhV7+yXkMzZpRt++3b4e9/t15J558PP/xgiX/lStvv88/bWEh//KPd53Cuiokm6a8CWkS8bg6siXjdAOgITBORFcCJwEQRSVfVXaq6HkBVZwPfAe0KH0BVR6pquqqmp6am7t+ZOJcvv7tnmzbWzTOzcEtkEX780W5st2xpE9enpMAbb8DSpTasRX5Ppeuus5vZr7wCd95Z/DATzsWpaJp3ZgFtRaQ1sBq4BLgs/01V3Qw0yX8tItOAO4LeO6nABlXNFZE2QFtgWQzjd65oKSnW6+ekk+yexeef2/wDhX39tdXkx4yxyer79rV7AaecUvw3n/vus5vGQ4faTeO77io9nt277UNk7Vr7AMlfGjbc+3XjxtXjG5eLW6UmfVXNEZEBwGSsy+YoVV0gIoOwJ8AmlrB5d2CQiOQAucD1qrohFoE7V6pWrawppnt3G7J5xgzrdqpqQ1APGWIPjNWtazX4W26xh85KI2J9h3/+2WY/a9IErrmm6LLZ2fatYPBgWLGi9H1feKHNn1DDH6FJKDt32t99xw4YMKBijxXNY7uVucTzMAwVpbqfX+jef181OVn1rLNsPKC0NBsS4rDDbBjn9ev3b7+7dtk+k5JsULlI+cNPHHGE7hl4btIk1a1bbWylhQtVv/jChqEeN0511CjV226zstdeWzlDcrjwLV+uetddqikp9rc/9dT9/tvjY+9UHdX9/OLCSy/ZP/f8wd9eftmSdnlt3ararZuNjjp9us178Nprqu3a2bGOO0514sTo/yPfe69t99BDZYvju+9Ujz9e9dBDVX/zGxvfaPx41e+/T4wPkF277Nr97W9hR1K6vDz7sO/b1yoMSUk2suzUqeX6W0Wb9P07ZJTq16/Ptm3bwg7D7a9rrrH28oYN7fmDWLWb169vTUinnmpzFzRrZvMdd+pk3Tr79i3bsR5+2GZD++tf4dBDrdmpNJ9/bsfJyYFzz7XBqf77X+t2CnDQQXD88bYcfbTF2Ly5LcUNvRGGhQvtwbdoxmaKtG6dPWOS31OrRg1rqqtM8+ZZd96VK+3fWf6SkrL3608/tQcIFy2yZsG777a5M1q2rLxYo/lkqMwlXmv69erVq7B9x8P5uXL6/nsbLjotzeYr2J/RS/Pt3m1DSyclWW29JG+8YSOXHnmk6uLFBeu3b1edOVP1739X/eMf7RtHzZoF33bylwMPVG3fXvXMM21uhKFDVefOLV/8ZbVpk+pNN9n51qqlOmhQ9N/CZs+2Icnr1FF95RXV88+3IbnHjq3YmFVVf/xRdcgQ++YIdn3btrURaGvU2Pda5y9duqj+6182kmwM4c07sZWf9PPy8vSOO+7QDh06aMeOHXVs8I9rzZo1euqpp2rnzp21Q4cOOmPGDM3JydF+/frtKTt06NAi9x0P5+diIDs7dk0p27YVNBt9/PG+7+flFUy886tf2TSZpdm1y5qBpk+3JqgnnlC9+WZrWjjhBLvHkZ+YmjRRvegim7bz228rpokoL8/iOPhgS/g33qh6ySV2/LQ01U8+KXn7MWNUDzjAkv7s2bZuxw67HrVqqX74Yexj3rHDPlDOOcfuE4H9nYYPV/35573PbcsWqwzMmWNToY4bp/rllxXW3BZt0q96zTthjq0M/Pvf/2bu3LnMmzePn3/+ma5du9K9e3fGjBnD2WefzX333Udubi47duxg7ty5rF69mvnz5wOwadOm2Mbt4ksse9zUq2fNRqecYs1Gn34KaWn2Xna2NQmMGmVPCY8aFd14QLVqWdNJSc0nK1faiKoffQQffmhDTwC0aFEw6N2555b/XBcutOchpk61p8Hfew+6dLH3rrwS/t//swfsrrsOHn987+FAcnNtSs0nn7RmtXHjoGlTe++AA2w01lNPtVhnzLD/3+WRmwvTp1vvmrfegs2brWnsrrss1vbt991GxJrOGjSo3KabKFSZOXLjxSeffMKll15KcnIyBx98MKeddhqzZs2ia9eu/POf/+Qvf/kL33zzDQ0aNKBNmzYsW7aMm266if/+978cWJahiJ1r0sTa5uvUgV69YNUqG1W0d29L9A8+aO3IsRwArkUL6NcPRo+2D4DFi+0J5RNOsGEpzj/fxjV66CF7v6x27LBRWjt3hjlz7Annzz8vSPgA55wD8+fbvA3/+Icl1bfesu8gGzda99snn4QbbrAut/kJP1/jxvaMRsOGdq2i6SpbmKqNuXTbbXZNzjjDkn6fPnbMFSvg0UeLTvjxLpqvA5W5xHvzzi233KIvvfTSnvVXXHGFvhN011u9erWOHDlSO3bsqKODaf62bt2q48aN03PPPVevvvrqIvcdD+fn4ticOTZLWYcO1v5es2Y400hmZ9tMab17W7t5UpJqnz6q771nvZaKkpenunat6owZqs89Z/c9QLVfP9Wffir9mBkZ1isJrFfSkUfa+Y8cWfq28+erNmpkPamiaf5Sta60DzxgxwFrJurb1+6dlDbbW8jwNv3Yyk/6b7/9tp511lmak5Oj69at05YtW+qPP/6oK1as0OzsbFVVHTZsmN5yyy2alZWlmzdvVlXVOXPmaOfOnYvcdzycn4tzH35oya5RI+vaF7Zly1QHDlRt2lT3zHn8yCOWHAcNUr3iCtWuXVUbNtS9bmJ26GD3FMoiO9tumNata+3/pbX1R/r4Y7svcsIJdp+ksE2b7IPsppsKnt8Qsak6X3xRdcOGssUaomiTvljZ+JGenq4ZGRl7rVu4cCHtQ/4ald9lU1W56667eP/99xER7r//fn7/+98zevRonnzySWrWrEn9+vV5+eWX2bJlC1dffTV5wcBcjz322D4jckJ8nJ+rAubMse6X8dRGvHu3jXM0YoTdB8jXogW0awdHHWVL/u+HH77/E/D89JONbBrMUBe1CRNs8p1evWwojC++sPsVH34IGRk2cN4BB9h9gN69bRTVoobsiHMiMltV00st50k/fNX9/FyCWLbMZjVr29ZuRMeTESPsHkBSkiX55GS7T3HGGbaceGKVnxwn2qRf9XrvOOfiU1kfqqpM1weztC5ZYr2QunePrwfTKpEnfedcYri+XNNzVxveZdM55xJIlUn68XbvIVaq63k55+JTlUj6derUYf369dUuQaoq69evp06dOmGH4pxLEFWiTb958+asWrWK6jh/bp06dWjevHnYYTjnEkRUSV9EegFPYzNnvaiqjxdT7kLgLaCrqmYE6wYCf8RmzrpZVSeXNciaNWvSunXrsm7mnHOukFKTvogkA88BZ2KTpM8SkYmqmlmoXAPgZuCLiHVp2Jy6HYDDgA9EpJ2q5sbuFJxzzkUrmjb9bsBSVV2mqruBsUDfIsoNBp4AfolY1xcYq6q7VHU5sDTYn3POuRBEk/SbAZHD6a0K1u0hIscBLVT13bJuG2zfX0QyRCSjOrbbO+dcvIimTb+oud72dKMRkSRgGHBVWbfds0J1JDAy2F+WiHwfRVzFaQL8XI7tK5rHVz4eX/l4fOUTz/EdHk2haJL+KqBFxOvmwJqI1w2AjsA0sblADwEmikifKLbdh6qmRhFTsUQkI5rxJ8Li8ZWPx1c+Hl/5xHt80YimeWcW0FZEWotILezG7MT8N1V1s6o2UdVWqtoKmAn0CXrvTAQuEZHaItIaaAt8GfOzcM45F5VSa/qqmiMiA4DJWJfNUaq6QEQGYeM3Tyxh2wUi8iaQCeQAN3rPHeecC09U/fRVdRIwqdC6B4sp26PQ60eAR/Yzvv0xshKPtT88vvLx+MrH4yufeI+vVHE3nr5zzrmKUyXG3nHOORcb1Sbpi0gvEVksIktF5J6w4ylMRFaIyDciMldEMkrfouKJyCgRWSci8yPWpYjIFBFZEvxsHGfx/UVEVgfXca6InBNSbC1EZKqILBSRBSJyS7A+Lq5fCfHFy/WrIyJfisi8IL6/Butbi8gXwfV7I+g8Ek/x/UtElkdcv2PDiK9coplIN94X7Abzd0AboBYwD0gLO65CMa4AmoQdR6GYugPHA/Mj1j0B3BP8fg/wf3EW31+AO+Lg2h0KHB/83gD4FkiLl+tXQnzxcv0EqB/8XhMbvuVE4E3gkmD9COCGOIvvX8CFYV+/8izVpaYf7VARLoKqzgA2FFrdFxgd/D4aOK9Sg4pQTHxxQVV/VNWvgt+3Aguxp83j4vqVEF9cULMteFkzWBQ4HRgXrA/z+hUXX5VXXZJ+VMM9hEyB/4nIbBHpH3YwJThYVX8ESxxA05DjKcoAEfk6aP4Jrfkpn4i0Ao7DaoNxd/0KxQdxcv1EJFlE5gLrgCnYt/VNqpoTFAn1/3Hh+FQ1//o9Ely/YSJS5WZTry5JP6rhHkJ2iqoeD/QGbhSR7mEHVEU9DxwBHAv8CAwJMxgRqQ+8DdyqqlvCjKUoRcQXN9dPVXNV9VjsSf1uQPuiilVuVBEHLhSfiHQEBgJHA12BFODusOLbX9Ul6Zd5uIfKpqprgp/rgPHE72ijP4nIoQDBz3Uhx7MXVf0p+M+YB/yDEK+jiNTEEuprqvrvYHXcXL+i4oun65dPVTcB07A280Yikv/8UFz8P46Ir1fQbKaqugv4J3Fw/cqquiT9EoeKCJuI1AvmG0BE6gFnAfNL3io0E4F+we/9gHdCjGUf+Qk1cD4hXUexgaZeAhaq6tCIt+Li+hUXXxxdv1QRaRT8fgDwa+y+w1TgwqBYmNevqPgWRXygC3a/IV7/Hxer2jycFXQ9e4qCoSIq8yngEolIG6x2D/YU9Jh4iE9EXgd6YCMH/gQ8BEzAelC0BH4ALlLVUG6mFhNfD6xpQrEeUdflt6FXcmy/Aj4GvgHygtX3Yu3moV+/EuK7lPi4fp2wG7XJWOXzTVUdFPxfGYs1ncwBrghq1fES30dAKtakPBe4PuKGb5VQbZK+c8650lWX5h3nnHNR8KTvnHMJxJO+c84lEE/6zjmXQDzpO+dcAvGk71wMiEgPEXk37DicK40nfeecSyCe9F1CEZErgnHS54rIC8GgWttEZIiIfCUiH4pIalD2WBGZGQyuNT5/cDIROVJEPgjGWv9KRI4Idl9fRMaJyCIReS14ahMReVxEMoP9/C2kU3cO8KTvEoiItAd+jw1+dyyQC1wO1AO+CgbEm449+QvwMnC3qnbCnmzNX/8a8JyqdgZOxgYuAxvJ8lZs3Po2wCkikoINd9Ah2M/DFXuWzpXMk75LJGcAXYBZwZC5Z2DJOQ94IyjzKvArEWkINFLV6cH60UD3YAylZqo6HkBVf1HVHUGZL1V1VTCY2VygFbAF+AV4UUQuAPLLOhcKT/oukQgwWlWPDZajVPUvRZQraWySoobxzhc5RkwuUCMYG74bNtrlecB/yxizczHlSd8lkg+BC0WkKeyZz/Zw7P9B/siOlwGfqOpmYKOInBqsvxKYHoxJv0pEzgv2UVtE6hZ3wGA8+4aqOglr+ql6c6q6aqVG6UWcqx5UNVNE7sdmMEsCsoEbge1ABxGZDWzG2v3BhvYdEST1ZcDVwforgRdEZFCwj4tKOGwD4B0RqYN9S7gtxqflXJn4KJsu4YnINlWtH3YczlUGb95xzrkE4jV955xLIF7Td865BOJJ3znnEognfeecSyCe9J1zLoF40nfOuQTiSd855xLI/wc+b+a+8JwUJwAAAABJRU5ErkJggg==\n",
"text/plain": [
"<matplotlib.figure.Figure at 0x7fc9b43e4828>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# print(model.metrics_names)\n",
"plt.plot(hist.history['acc'], 'b-', label='acc' )\n",
"plt.plot(hist.history['loss'], 'r-', label='loss' )\n",
"plt.xlabel('epochs')\n",
"plt.legend()\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 23,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"result = pred.argmax(axis=1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Submission file:"
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Diff: 0\n",
"Survived: 160\n"
]
}
],
"source": [
"# compare to the previous result\n",
"prev = pd.read_csv('submission.csv', index_col=0)\n",
"print('Diff: ', np.sum(prev.Survived.values != result))\n",
"print('Survived: ', result.sum())\n",
"\n",
"# submission data to csv file \n",
"submission = pd.DataFrame({'PassengerId': test.index, 'Survived': result})\n",
"submission.to_csv('submission.csv', index=False)"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1,\n",
" 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1,\n",
" 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1,\n",
" 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1,\n",
" 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0,\n",
" 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1,\n",
" 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1,\n",
" 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1,\n",
" 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1,\n",
" 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0,\n",
" 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,\n",
" 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1,\n",
" 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0,\n",
" 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1,\n",
" 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0,\n",
" 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0,\n",
" 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0,\n",
" 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1,\n",
" 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0])"
]
},
"execution_count": 25,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"result"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "py36",
"language": "python",
"name": "py36"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment