Skip to content

Instantly share code, notes, and snippets.

@Eligijus112
Created September 25, 2022 11:09
Show Gist options
  • Save Eligijus112/5309d9dd037236b1a95a7b906f2406cf to your computer and use it in GitHub Desktop.
Save Eligijus112/5309d9dd037236b1a95a7b906f2406cf to your computer and use it in GitHub Desktop.
Regularization in Python
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Importing packages "
]
},
{
"cell_type": "code",
"execution_count": 122,
"metadata": {},
"outputs": [],
"source": [
"# Data wrangling \n",
"import pandas as pd \n",
"\n",
"# Sklearn elastic net implementation \n",
"from sklearn.linear_model import ElasticNet\n",
"\n",
"# Tensorflow \n",
"import tensorflow as tf\n",
"\n",
"# Keras\n",
"from keras.models import Sequential, Model\n",
"from keras.layers import Dense, Input\n",
"\n",
"# Regularizer\n",
"from keras.regularizers import L1L2\n",
"\n",
"# Distance between points calculation \n",
"import haversine as hs\n",
"\n",
"# Ploting \n",
"import matplotlib.pyplot as plt\n",
"\n",
"# Training on cpu \n",
"import os\n",
"os.environ['CUDA_VISIBLE_DEVICES'] = '-1'\n",
"\n",
"# Array math \n",
"import numpy as np "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Reading the data "
]
},
{
"cell_type": "code",
"execution_count": 123,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Shape of data: (21613, 21)\n",
"Features in data: ['id' 'date' 'price' 'bedrooms' 'bathrooms' 'sqft_living' 'sqft_lot'\n",
" 'floors' 'waterfront' 'view' 'condition' 'grade' 'sqft_above'\n",
" 'sqft_basement' 'yr_built' 'yr_renovated' 'zipcode' 'lat' 'long'\n",
" 'sqft_living15' 'sqft_lot15']\n"
]
}
],
"source": [
"d = pd.read_csv('data/kc_house_data.csv')\n",
"\n",
"print(f\"Shape of data: {d.shape}\")\n",
"print(f\"Features in data: {d.columns.values}\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Feature engineering \n",
"\n",
"## Distance from city center "
]
},
{
"cell_type": "code",
"execution_count": 124,
"metadata": {},
"outputs": [],
"source": [
"# Defining the Seattle coordinates \n",
"lat_center, long_center = 47.6062, -122.3321\n",
"\n",
"# Calculating the distance from the center of Seattle (in meters)\n",
"d['distance_from_center'] = d.apply(lambda x: hs.haversine((x.lat, x.long), (lat_center, long_center)) * 1000, axis=1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Flag for renovation event "
]
},
{
"cell_type": "code",
"execution_count": 125,
"metadata": {},
"outputs": [],
"source": [
"d['is_renovated'] = d.apply(lambda x: 1 if x.yr_renovated > 0 else 0, axis=1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Age\n",
"\n",
"We will hold that the year 2015 is the last year in the dataset."
]
},
{
"cell_type": "code",
"execution_count": 126,
"metadata": {},
"outputs": [],
"source": [
"# Converting date column to datetime\n",
"d['date'] = pd.to_datetime(d['date'])\n",
"\n",
"# Getting the age of the house \n",
"d['age'] = d.apply(lambda x: x.date.year - x.yr_built, axis=1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Defining the final feature list "
]
},
{
"cell_type": "code",
"execution_count": 127,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>age</th>\n",
" <th>distance_from_center</th>\n",
" <th>floors</th>\n",
" <th>sqft_living</th>\n",
" <th>sqft_lot</th>\n",
" <th>sqft_above</th>\n",
" <th>price</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>59</td>\n",
" <td>11972.703646</td>\n",
" <td>1.0</td>\n",
" <td>1180</td>\n",
" <td>5650</td>\n",
" <td>1180</td>\n",
" <td>221900.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>63</td>\n",
" <td>12802.836812</td>\n",
" <td>2.0</td>\n",
" <td>2570</td>\n",
" <td>7242</td>\n",
" <td>2170</td>\n",
" <td>538000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>82</td>\n",
" <td>16416.983068</td>\n",
" <td>1.0</td>\n",
" <td>770</td>\n",
" <td>10000</td>\n",
" <td>770</td>\n",
" <td>180000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>49</td>\n",
" <td>10538.247653</td>\n",
" <td>1.0</td>\n",
" <td>1960</td>\n",
" <td>5000</td>\n",
" <td>1050</td>\n",
" <td>604000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>28</td>\n",
" <td>21554.008860</td>\n",
" <td>1.0</td>\n",
" <td>1680</td>\n",
" <td>8080</td>\n",
" <td>1680</td>\n",
" <td>510000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>13</td>\n",
" <td>25131.222621</td>\n",
" <td>1.0</td>\n",
" <td>5420</td>\n",
" <td>101930</td>\n",
" <td>3890</td>\n",
" <td>1225000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>19</td>\n",
" <td>32971.570823</td>\n",
" <td>2.0</td>\n",
" <td>1715</td>\n",
" <td>6819</td>\n",
" <td>1715</td>\n",
" <td>257500.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>52</td>\n",
" <td>21909.751716</td>\n",
" <td>1.0</td>\n",
" <td>1060</td>\n",
" <td>9711</td>\n",
" <td>1060</td>\n",
" <td>291850.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>55</td>\n",
" <td>10447.689945</td>\n",
" <td>1.0</td>\n",
" <td>1780</td>\n",
" <td>7470</td>\n",
" <td>1050</td>\n",
" <td>229500.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9</th>\n",
" <td>12</td>\n",
" <td>34800.386344</td>\n",
" <td>2.0</td>\n",
" <td>1890</td>\n",
" <td>6560</td>\n",
" <td>1890</td>\n",
" <td>323000.0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" age distance_from_center floors sqft_living sqft_lot sqft_above \\\n",
"0 59 11972.703646 1.0 1180 5650 1180 \n",
"1 63 12802.836812 2.0 2570 7242 2170 \n",
"2 82 16416.983068 1.0 770 10000 770 \n",
"3 49 10538.247653 1.0 1960 5000 1050 \n",
"4 28 21554.008860 1.0 1680 8080 1680 \n",
"5 13 25131.222621 1.0 5420 101930 3890 \n",
"6 19 32971.570823 2.0 1715 6819 1715 \n",
"7 52 21909.751716 1.0 1060 9711 1060 \n",
"8 55 10447.689945 1.0 1780 7470 1050 \n",
"9 12 34800.386344 2.0 1890 6560 1890 \n",
"\n",
" price \n",
"0 221900.0 \n",
"1 538000.0 \n",
"2 180000.0 \n",
"3 604000.0 \n",
"4 510000.0 \n",
"5 1225000.0 \n",
"6 257500.0 \n",
"7 291850.0 \n",
"8 229500.0 \n",
"9 323000.0 "
]
},
"execution_count": 127,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"features = [\n",
" 'age',\n",
" 'distance_from_center',\n",
" 'floors',\n",
" 'sqft_living',\n",
" 'sqft_lot',\n",
" 'sqft_above',\n",
"]\n",
"\n",
"y_var = ['price']\n",
"\n",
"d = d[features + y_var].copy()\n",
"\n",
"d.head(10)"
]
},
{
"cell_type": "code",
"execution_count": 128,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>age</th>\n",
" <th>distance_from_center</th>\n",
" <th>floors</th>\n",
" <th>sqft_living</th>\n",
" <th>sqft_lot</th>\n",
" <th>sqft_above</th>\n",
" <th>price</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.533853</td>\n",
" <td>-0.609459</td>\n",
" <td>-0.915406</td>\n",
" <td>-0.979812</td>\n",
" <td>-0.228316</td>\n",
" <td>-0.734691</td>\n",
" <td>221900.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>0.670020</td>\n",
" <td>-0.531453</td>\n",
" <td>0.936484</td>\n",
" <td>0.533622</td>\n",
" <td>-0.189881</td>\n",
" <td>0.460830</td>\n",
" <td>538000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>1.316818</td>\n",
" <td>-0.191836</td>\n",
" <td>-0.915406</td>\n",
" <td>-1.426221</td>\n",
" <td>-0.123296</td>\n",
" <td>-1.229805</td>\n",
" <td>180000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>0.193433</td>\n",
" <td>-0.744253</td>\n",
" <td>-0.915406</td>\n",
" <td>-0.130547</td>\n",
" <td>-0.244009</td>\n",
" <td>-0.891678</td>\n",
" <td>604000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>-0.521449</td>\n",
" <td>0.290883</td>\n",
" <td>-0.915406</td>\n",
" <td>-0.435412</td>\n",
" <td>-0.169649</td>\n",
" <td>-0.130892</td>\n",
" <td>510000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>-1.032079</td>\n",
" <td>0.627029</td>\n",
" <td>-0.915406</td>\n",
" <td>3.636707</td>\n",
" <td>2.096136</td>\n",
" <td>2.537897</td>\n",
" <td>1225000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>-0.827827</td>\n",
" <td>1.363775</td>\n",
" <td>0.936484</td>\n",
" <td>-0.397303</td>\n",
" <td>-0.200093</td>\n",
" <td>-0.088626</td>\n",
" <td>257500.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>0.295559</td>\n",
" <td>0.324311</td>\n",
" <td>-0.915406</td>\n",
" <td>-1.110469</td>\n",
" <td>-0.130273</td>\n",
" <td>-0.879602</td>\n",
" <td>291850.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>0.397685</td>\n",
" <td>-0.752763</td>\n",
" <td>-0.915406</td>\n",
" <td>-0.326531</td>\n",
" <td>-0.184376</td>\n",
" <td>-0.891678</td>\n",
" <td>229500.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9</th>\n",
" <td>-1.066121</td>\n",
" <td>1.535626</td>\n",
" <td>0.936484</td>\n",
" <td>-0.206763</td>\n",
" <td>-0.206346</td>\n",
" <td>0.122703</td>\n",
" <td>323000.0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" age distance_from_center floors sqft_living sqft_lot \\\n",
"0 0.533853 -0.609459 -0.915406 -0.979812 -0.228316 \n",
"1 0.670020 -0.531453 0.936484 0.533622 -0.189881 \n",
"2 1.316818 -0.191836 -0.915406 -1.426221 -0.123296 \n",
"3 0.193433 -0.744253 -0.915406 -0.130547 -0.244009 \n",
"4 -0.521449 0.290883 -0.915406 -0.435412 -0.169649 \n",
"5 -1.032079 0.627029 -0.915406 3.636707 2.096136 \n",
"6 -0.827827 1.363775 0.936484 -0.397303 -0.200093 \n",
"7 0.295559 0.324311 -0.915406 -1.110469 -0.130273 \n",
"8 0.397685 -0.752763 -0.915406 -0.326531 -0.184376 \n",
"9 -1.066121 1.535626 0.936484 -0.206763 -0.206346 \n",
"\n",
" sqft_above price \n",
"0 -0.734691 221900.0 \n",
"1 0.460830 538000.0 \n",
"2 -1.229805 180000.0 \n",
"3 -0.891678 604000.0 \n",
"4 -0.130892 510000.0 \n",
"5 2.537897 1225000.0 \n",
"6 -0.088626 257500.0 \n",
"7 -0.879602 291850.0 \n",
"8 -0.891678 229500.0 \n",
"9 0.122703 323000.0 "
]
},
"execution_count": 128,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Standart scaling all the numeric features\n",
"for feature in features:\n",
" d[feature] = (d[feature] - d[feature].mean()) / d[feature].std()\n",
"\n",
"# Dropping missing values \n",
"d = d.dropna()\n",
"\n",
"d.head(10)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# EDA "
]
},
{
"cell_type": "code",
"execution_count": 129,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>age</th>\n",
" <th>distance_from_center</th>\n",
" <th>floors</th>\n",
" <th>sqft_living</th>\n",
" <th>sqft_lot</th>\n",
" <th>sqft_above</th>\n",
" <th>price</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>count</th>\n",
" <td>2.161300e+04</td>\n",
" <td>2.161300e+04</td>\n",
" <td>2.161300e+04</td>\n",
" <td>2.161300e+04</td>\n",
" <td>2.161300e+04</td>\n",
" <td>2.161300e+04</td>\n",
" <td>2.161300e+04</td>\n",
" </tr>\n",
" <tr>\n",
" <th>mean</th>\n",
" <td>1.262427e-16</td>\n",
" <td>-2.511704e-16</td>\n",
" <td>-4.208091e-17</td>\n",
" <td>2.393352e-16</td>\n",
" <td>1.315028e-17</td>\n",
" <td>1.209826e-16</td>\n",
" <td>5.400881e+05</td>\n",
" </tr>\n",
" <tr>\n",
" <th>std</th>\n",
" <td>1.000000e+00</td>\n",
" <td>1.000000e+00</td>\n",
" <td>1.000000e+00</td>\n",
" <td>1.000000e+00</td>\n",
" <td>1.000000e+00</td>\n",
" <td>1.000000e+00</td>\n",
" <td>3.671272e+05</td>\n",
" </tr>\n",
" <tr>\n",
" <th>min</th>\n",
" <td>-1.508666e+00</td>\n",
" <td>-1.642135e+00</td>\n",
" <td>-9.154058e-01</td>\n",
" <td>-1.948846e+00</td>\n",
" <td>-3.521677e-01</td>\n",
" <td>-1.809452e+00</td>\n",
" <td>7.500000e+04</td>\n",
" </tr>\n",
" <tr>\n",
" <th>25%</th>\n",
" <td>-8.618687e-01</td>\n",
" <td>-8.151646e-01</td>\n",
" <td>-9.154058e-01</td>\n",
" <td>-7.108783e-01</td>\n",
" <td>-2.430431e-01</td>\n",
" <td>-7.226147e-01</td>\n",
" <td>3.219500e+05</td>\n",
" </tr>\n",
" <tr>\n",
" <th>50%</th>\n",
" <td>-1.129451e-01</td>\n",
" <td>-1.831002e-01</td>\n",
" <td>1.053914e-02</td>\n",
" <td>-1.849871e-01</td>\n",
" <td>-1.808034e-01</td>\n",
" <td>-2.758039e-01</td>\n",
" <td>4.500000e+05</td>\n",
" </tr>\n",
" <tr>\n",
" <th>75%</th>\n",
" <td>6.700205e-01</td>\n",
" <td>6.397055e-01</td>\n",
" <td>9.364841e-01</td>\n",
" <td>5.118460e-01</td>\n",
" <td>-1.066855e-01</td>\n",
" <td>5.091340e-01</td>\n",
" <td>6.450000e+05</td>\n",
" </tr>\n",
" <tr>\n",
" <th>max</th>\n",
" <td>2.440204e+00</td>\n",
" <td>5.509889e+00</td>\n",
" <td>3.714319e+00</td>\n",
" <td>1.247778e+01</td>\n",
" <td>3.950342e+01</td>\n",
" <td>9.203831e+00</td>\n",
" <td>7.700000e+06</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" age distance_from_center floors sqft_living \\\n",
"count 2.161300e+04 2.161300e+04 2.161300e+04 2.161300e+04 \n",
"mean 1.262427e-16 -2.511704e-16 -4.208091e-17 2.393352e-16 \n",
"std 1.000000e+00 1.000000e+00 1.000000e+00 1.000000e+00 \n",
"min -1.508666e+00 -1.642135e+00 -9.154058e-01 -1.948846e+00 \n",
"25% -8.618687e-01 -8.151646e-01 -9.154058e-01 -7.108783e-01 \n",
"50% -1.129451e-01 -1.831002e-01 1.053914e-02 -1.849871e-01 \n",
"75% 6.700205e-01 6.397055e-01 9.364841e-01 5.118460e-01 \n",
"max 2.440204e+00 5.509889e+00 3.714319e+00 1.247778e+01 \n",
"\n",
" sqft_lot sqft_above price \n",
"count 2.161300e+04 2.161300e+04 2.161300e+04 \n",
"mean 1.315028e-17 1.209826e-16 5.400881e+05 \n",
"std 1.000000e+00 1.000000e+00 3.671272e+05 \n",
"min -3.521677e-01 -1.809452e+00 7.500000e+04 \n",
"25% -2.430431e-01 -7.226147e-01 3.219500e+05 \n",
"50% -1.808034e-01 -2.758039e-01 4.500000e+05 \n",
"75% -1.066855e-01 5.091340e-01 6.450000e+05 \n",
"max 3.950342e+01 9.203831e+00 7.700000e+06 "
]
},
"execution_count": 129,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"d.describe()"
]
},
{
"cell_type": "code",
"execution_count": 130,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>age</th>\n",
" <th>distance_from_center</th>\n",
" <th>floors</th>\n",
" <th>sqft_living</th>\n",
" <th>sqft_lot</th>\n",
" <th>sqft_above</th>\n",
" <th>price</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.533853</td>\n",
" <td>-0.609459</td>\n",
" <td>-0.915406</td>\n",
" <td>-0.979812</td>\n",
" <td>-0.228316</td>\n",
" <td>-0.734691</td>\n",
" <td>221900.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>0.670020</td>\n",
" <td>-0.531453</td>\n",
" <td>0.936484</td>\n",
" <td>0.533622</td>\n",
" <td>-0.189881</td>\n",
" <td>0.460830</td>\n",
" <td>538000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>1.316818</td>\n",
" <td>-0.191836</td>\n",
" <td>-0.915406</td>\n",
" <td>-1.426221</td>\n",
" <td>-0.123296</td>\n",
" <td>-1.229805</td>\n",
" <td>180000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>0.193433</td>\n",
" <td>-0.744253</td>\n",
" <td>-0.915406</td>\n",
" <td>-0.130547</td>\n",
" <td>-0.244009</td>\n",
" <td>-0.891678</td>\n",
" <td>604000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>-0.521449</td>\n",
" <td>0.290883</td>\n",
" <td>-0.915406</td>\n",
" <td>-0.435412</td>\n",
" <td>-0.169649</td>\n",
" <td>-0.130892</td>\n",
" <td>510000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>-1.032079</td>\n",
" <td>0.627029</td>\n",
" <td>-0.915406</td>\n",
" <td>3.636707</td>\n",
" <td>2.096136</td>\n",
" <td>2.537897</td>\n",
" <td>1225000.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>-0.827827</td>\n",
" <td>1.363775</td>\n",
" <td>0.936484</td>\n",
" <td>-0.397303</td>\n",
" <td>-0.200093</td>\n",
" <td>-0.088626</td>\n",
" <td>257500.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>0.295559</td>\n",
" <td>0.324311</td>\n",
" <td>-0.915406</td>\n",
" <td>-1.110469</td>\n",
" <td>-0.130273</td>\n",
" <td>-0.879602</td>\n",
" <td>291850.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>0.397685</td>\n",
" <td>-0.752763</td>\n",
" <td>-0.915406</td>\n",
" <td>-0.326531</td>\n",
" <td>-0.184376</td>\n",
" <td>-0.891678</td>\n",
" <td>229500.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9</th>\n",
" <td>-1.066121</td>\n",
" <td>1.535626</td>\n",
" <td>0.936484</td>\n",
" <td>-0.206763</td>\n",
" <td>-0.206346</td>\n",
" <td>0.122703</td>\n",
" <td>323000.0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" age distance_from_center floors sqft_living sqft_lot \\\n",
"0 0.533853 -0.609459 -0.915406 -0.979812 -0.228316 \n",
"1 0.670020 -0.531453 0.936484 0.533622 -0.189881 \n",
"2 1.316818 -0.191836 -0.915406 -1.426221 -0.123296 \n",
"3 0.193433 -0.744253 -0.915406 -0.130547 -0.244009 \n",
"4 -0.521449 0.290883 -0.915406 -0.435412 -0.169649 \n",
"5 -1.032079 0.627029 -0.915406 3.636707 2.096136 \n",
"6 -0.827827 1.363775 0.936484 -0.397303 -0.200093 \n",
"7 0.295559 0.324311 -0.915406 -1.110469 -0.130273 \n",
"8 0.397685 -0.752763 -0.915406 -0.326531 -0.184376 \n",
"9 -1.066121 1.535626 0.936484 -0.206763 -0.206346 \n",
"\n",
" sqft_above price \n",
"0 -0.734691 221900.0 \n",
"1 0.460830 538000.0 \n",
"2 -1.229805 180000.0 \n",
"3 -0.891678 604000.0 \n",
"4 -0.130892 510000.0 \n",
"5 2.537897 1225000.0 \n",
"6 -0.088626 257500.0 \n",
"7 -0.879602 291850.0 \n",
"8 -0.891678 229500.0 \n",
"9 0.122703 323000.0 "
]
},
"execution_count": 130,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"d.head(10)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Elastic net \n",
"\n",
"We will use $\\alpha = 1$ and $\\lambda=0.4$. The equation in sklearn is: \n",
"\n",
"$$ MSE = \\dfrac{1}{2} \\alpha (1 - \\lambda) \\sum_{i=1}^p (\\beta_i)^{2} + \\alpha \\lambda \\sum_{i=1}^p |\\beta_i|+ \\frac{1}{2N} ||y - X \\beta||^2_2$$\n",
"\n",
"## Scikit learn "
]
},
{
"cell_type": "code",
"execution_count": 131,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/tmp/ipykernel_21610/4269339002.py:16: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n",
" coefs_df = coefs_df.append({'feature': 'intercept', 'coef_sk': el.intercept_[0]}, ignore_index=True)\n"
]
}
],
"source": [
"# Defining the params \n",
"alpha = 1.0 \n",
"l1_ratio = 0.02\n",
"\n",
"# Fitting the model to data using sklearn\n",
"el = ElasticNet(alpha=alpha, l1_ratio=l1_ratio)\n",
"el.fit(d[features], d[y_var])\n",
"\n",
"# Extracting the coefs \n",
"coefs = el.coef_\n",
"\n",
"# Creating a dataframe with the coefs\n",
"coefs_df = pd.DataFrame({'feature': features, 'coef_sk': coefs})\n",
"\n",
"# Appending the intercept\n",
"coefs_df = coefs_df.append({'feature': 'intercept', 'coef_sk': el.intercept_[0]}, ignore_index=True)"
]
},
{
"cell_type": "code",
"execution_count": 132,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>feature</th>\n",
" <th>coef_sk</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>age</td>\n",
" <td>12999.031247</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>distance_from_center</td>\n",
" <td>-62430.341480</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>floors</td>\n",
" <td>14022.123588</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>sqft_living</td>\n",
" <td>99113.890140</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>sqft_lot</td>\n",
" <td>9282.895480</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>sqft_above</td>\n",
" <td>72578.847965</td>\n",
" </tr>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>intercept</td>\n",
" <td>540088.141767</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" feature coef_sk\n",
"0 age 12999.031247\n",
"1 distance_from_center -62430.341480\n",
"2 floors 14022.123588\n",
"3 sqft_living 99113.890140\n",
"4 sqft_lot 9282.895480\n",
"5 sqft_above 72578.847965\n",
"6 intercept 540088.141767"
]
},
"execution_count": 132,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"coefs_df"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Tensorflow implementation\n",
"\n",
"Default equation:\n",
"\n",
"$$ MSE = \\lambda_{1} \\sum_{i=1}^p (\\beta_i)^{2} + \\lambda_{2} \\sum_{i=1}^p |\\beta_i|+ \\frac{1}{N} ||y - X \\beta||^2_2$$\n",
"\n",
"Our custom equation: \n",
"\n",
"$$ MSE = \\lambda_{1} \\sum_{i=1}^p (\\beta_i)^{2} + \\lambda_{2} \\sum_{i=1}^p |\\beta_i|+ \\frac{1}{2N} ||y - X \\beta||^2_2$$"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### From sklear to tensorflow \n",
"\n",
"$$ \\alpha \\lambda \\sum_{i=1}^p |\\beta_i| + 0.5 \\alpha (1 - \\lambda) \\sum_{i=1}^p (\\beta_i)^{2} = \\lambda_{1} \\sum_{i=1}^p |\\beta_i| + \\lambda_{2} \\sum_{i=1}^p (\\beta_i)^{2}$$\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"$$ \\rightarrow $$\n",
"\n",
"$$\\lambda_{1} = \\alpha \\lambda$$\n",
"\n",
"$$\\lambda_{2} = \\dfrac{\\alpha}{2} (1 - \\lambda) $$\n"
]
},
{
"cell_type": "code",
"execution_count": 133,
"metadata": {},
"outputs": [],
"source": [
"class NMSE(tf.keras.losses.Loss):\n",
" def __init__(self):\n",
" super().__init__()\n",
"\n",
" def call(self, y_true, y_pred):\n",
" # Calculating the mse; \n",
" # Adding an additional division by 2 to mimic sklearn\n",
" return tf.reduce_mean(tf.square(y_true - y_pred)) / 2"
]
},
{
"cell_type": "code",
"execution_count": 134,
"metadata": {},
"outputs": [],
"source": [
"def elastic_net_to_keras(alpha: float, l1_ratio: float):\n",
" \"\"\"\n",
" Converts ElasticNet parameters from sklearn to Keras regularizers.\n",
" \n",
" Arguments\n",
" ---------\n",
" alpha: float\n",
" The regularization strength of the model.\n",
" l1_ratio: float\n",
" The l1 regularization ratio of the model.\n",
" \n",
" Returns\n",
" -------\n",
" l1: float\n",
" The l1 regularization strength of the model in tensorflow\n",
" l2: float\n",
" The l2 regularization strength of the model in tensorflow\n",
" \"\"\"\n",
" l1 = alpha * l1_ratio\n",
" l2 = alpha * (1 - l1_ratio) / 2\n",
" return l1, l2"
]
},
{
"cell_type": "code",
"execution_count": 135,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(0.02, 0.49)"
]
},
"execution_count": 135,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"l1, l2 "
]
},
{
"cell_type": "code",
"execution_count": 136,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"model_12\"\n",
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
" input_13 (InputLayer) [(None, 6)] 0 \n",
" \n",
" dense_12 (Dense) (None, 1) 7 \n",
" \n",
"=================================================================\n",
"Total params: 7\n",
"Trainable params: 7\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n",
"Epoch 1/200\n",
"338/338 [==============================] - 0s 394us/step - loss: 206274887680.0000 - mse: 412501671936.0000\n",
"Epoch 2/200\n",
"338/338 [==============================] - 0s 345us/step - loss: 193244495872.0000 - mse: 386186182656.0000\n",
"Epoch 3/200\n",
"338/338 [==============================] - 0s 375us/step - loss: 181496119296.0000 - mse: 362235985920.0000\n",
"Epoch 4/200\n",
"338/338 [==============================] - 0s 393us/step - loss: 170868310016.0000 - mse: 340395819008.0000\n",
"Epoch 5/200\n",
"338/338 [==============================] - 0s 360us/step - loss: 161215430656.0000 - mse: 320397082624.0000\n",
"Epoch 6/200\n",
"338/338 [==============================] - 0s 368us/step - loss: 152422465536.0000 - mse: 302102052864.0000\n",
"Epoch 7/200\n",
"338/338 [==============================] - 0s 389us/step - loss: 144385933312.0000 - mse: 285247307776.0000\n",
"Epoch 8/200\n",
"338/338 [==============================] - 0s 354us/step - loss: 137028460544.0000 - mse: 269710311424.0000\n",
"Epoch 9/200\n",
"338/338 [==============================] - 0s 344us/step - loss: 130274344960.0000 - mse: 255431737344.0000\n",
"Epoch 10/200\n",
"338/338 [==============================] - 0s 367us/step - loss: 124059074560.0000 - mse: 242216435712.0000\n",
"Epoch 11/200\n",
"338/338 [==============================] - 0s 372us/step - loss: 118332104704.0000 - mse: 230005473280.0000\n",
"Epoch 12/200\n",
"338/338 [==============================] - 0s 359us/step - loss: 113047715840.0000 - mse: 218690551808.0000\n",
"Epoch 13/200\n",
"338/338 [==============================] - 0s 395us/step - loss: 108164202496.0000 - mse: 208248373248.0000\n",
"Epoch 14/200\n",
"338/338 [==============================] - 0s 369us/step - loss: 103643545600.0000 - mse: 198523518976.0000\n",
"Epoch 15/200\n",
"338/338 [==============================] - 0s 407us/step - loss: 99455606784.0000 - mse: 189497753600.0000\n",
"Epoch 16/200\n",
"338/338 [==============================] - 0s 413us/step - loss: 95573467136.0000 - mse: 181111865344.0000\n",
"Epoch 17/200\n",
"338/338 [==============================] - 0s 369us/step - loss: 91970748416.0000 - mse: 173352239104.0000\n",
"Epoch 18/200\n",
"338/338 [==============================] - 0s 366us/step - loss: 88621850624.0000 - mse: 166103203840.0000\n",
"Epoch 19/200\n",
"338/338 [==============================] - 0s 351us/step - loss: 85512527872.0000 - mse: 159400067072.0000\n",
"Epoch 20/200\n",
"338/338 [==============================] - 0s 371us/step - loss: 82620383232.0000 - mse: 153112035328.0000\n",
"Epoch 21/200\n",
"338/338 [==============================] - 0s 358us/step - loss: 79930638336.0000 - mse: 147299631104.0000\n",
"Epoch 22/200\n",
"338/338 [==============================] - 0s 368us/step - loss: 77426401280.0000 - mse: 141875871744.0000\n",
"Epoch 23/200\n",
"338/338 [==============================] - 0s 395us/step - loss: 75096211456.0000 - mse: 136811773952.0000\n",
"Epoch 24/200\n",
"338/338 [==============================] - 0s 377us/step - loss: 72926289920.0000 - mse: 132045127680.0000\n",
"Epoch 25/200\n",
"338/338 [==============================] - 0s 353us/step - loss: 70904840192.0000 - mse: 127649497088.0000\n",
"Epoch 26/200\n",
"338/338 [==============================] - 0s 337us/step - loss: 69020778496.0000 - mse: 123567669248.0000\n",
"Epoch 27/200\n",
"338/338 [==============================] - 0s 340us/step - loss: 67262943232.0000 - mse: 119739949056.0000\n",
"Epoch 28/200\n",
"338/338 [==============================] - 0s 348us/step - loss: 65624674304.0000 - mse: 116201660416.0000\n",
"Epoch 29/200\n",
"338/338 [==============================] - 0s 397us/step - loss: 64097054720.0000 - mse: 112872407040.0000\n",
"Epoch 30/200\n",
"338/338 [==============================] - 0s 384us/step - loss: 62670221312.0000 - mse: 109811343360.0000\n",
"Epoch 31/200\n",
"338/338 [==============================] - 0s 395us/step - loss: 61341265920.0000 - mse: 106886447104.0000\n",
"Epoch 32/200\n",
"338/338 [==============================] - 0s 414us/step - loss: 60101472256.0000 - mse: 104174436352.0000\n",
"Epoch 33/200\n",
"338/338 [==============================] - 0s 414us/step - loss: 58944765952.0000 - mse: 101672935424.0000\n",
"Epoch 34/200\n",
"338/338 [==============================] - 0s 382us/step - loss: 57864351744.0000 - mse: 99319455744.0000\n",
"Epoch 35/200\n",
"338/338 [==============================] - 0s 399us/step - loss: 56856948736.0000 - mse: 97047609344.0000\n",
"Epoch 36/200\n",
"338/338 [==============================] - 0s 372us/step - loss: 55916310528.0000 - mse: 95060148224.0000\n",
"Epoch 37/200\n",
"338/338 [==============================] - 0s 367us/step - loss: 55038193664.0000 - mse: 93173899264.0000\n",
"Epoch 38/200\n",
"338/338 [==============================] - 0s 353us/step - loss: 54217347072.0000 - mse: 91413626880.0000\n",
"Epoch 39/200\n",
"338/338 [==============================] - 0s 396us/step - loss: 53451636736.0000 - mse: 89735749632.0000\n",
"Epoch 40/200\n",
"338/338 [==============================] - 0s 381us/step - loss: 52736319488.0000 - mse: 88201805824.0000\n",
"Epoch 41/200\n",
"338/338 [==============================] - 0s 410us/step - loss: 52068491264.0000 - mse: 86733119488.0000\n",
"Epoch 42/200\n",
"338/338 [==============================] - 0s 399us/step - loss: 51444543488.0000 - mse: 85351202816.0000\n",
"Epoch 43/200\n",
"338/338 [==============================] - 0s 360us/step - loss: 50861912064.0000 - mse: 84086382592.0000\n",
"Epoch 44/200\n",
"338/338 [==============================] - 0s 355us/step - loss: 50317934592.0000 - mse: 82952634368.0000\n",
"Epoch 45/200\n",
"338/338 [==============================] - 0s 363us/step - loss: 49809772544.0000 - mse: 81794867200.0000\n",
"Epoch 46/200\n",
"338/338 [==============================] - 0s 386us/step - loss: 49334956032.0000 - mse: 80817192960.0000\n",
"Epoch 47/200\n",
"338/338 [==============================] - 0s 363us/step - loss: 48891490304.0000 - mse: 79800524800.0000\n",
"Epoch 48/200\n",
"338/338 [==============================] - 0s 378us/step - loss: 48477274112.0000 - mse: 78897700864.0000\n",
"Epoch 49/200\n",
"338/338 [==============================] - 0s 411us/step - loss: 48090263552.0000 - mse: 78076960768.0000\n",
"Epoch 50/200\n",
"338/338 [==============================] - 0s 351us/step - loss: 47728848896.0000 - mse: 77270269952.0000\n",
"Epoch 51/200\n",
"338/338 [==============================] - 0s 356us/step - loss: 47391395840.0000 - mse: 76552691712.0000\n",
"Epoch 52/200\n",
"338/338 [==============================] - 0s 343us/step - loss: 47076229120.0000 - mse: 75875401728.0000\n",
"Epoch 53/200\n",
"338/338 [==============================] - 0s 375us/step - loss: 46780915712.0000 - mse: 75237933056.0000\n",
"Epoch 54/200\n",
"338/338 [==============================] - 0s 398us/step - loss: 46505439232.0000 - mse: 74636312576.0000\n",
"Epoch 55/200\n",
"338/338 [==============================] - 0s 382us/step - loss: 46248009728.0000 - mse: 74075267072.0000\n",
"Epoch 56/200\n",
"338/338 [==============================] - 0s 359us/step - loss: 46007173120.0000 - mse: 73550430208.0000\n",
"Epoch 57/200\n",
"338/338 [==============================] - 0s 367us/step - loss: 45782642688.0000 - mse: 73065971712.0000\n",
"Epoch 58/200\n",
"338/338 [==============================] - 0s 342us/step - loss: 45572628480.0000 - mse: 72615288832.0000\n",
"Epoch 59/200\n",
"338/338 [==============================] - 0s 377us/step - loss: 45376401408.0000 - mse: 72177319936.0000\n",
"Epoch 60/200\n",
"338/338 [==============================] - 0s 360us/step - loss: 45193064448.0000 - mse: 71787954176.0000\n",
"Epoch 61/200\n",
"338/338 [==============================] - 0s 417us/step - loss: 45021675520.0000 - mse: 71414800384.0000\n",
"Epoch 62/200\n",
"338/338 [==============================] - 0s 364us/step - loss: 44861636608.0000 - mse: 71079583744.0000\n",
"Epoch 63/200\n",
"338/338 [==============================] - 0s 381us/step - loss: 44711817216.0000 - mse: 70731726848.0000\n",
"Epoch 64/200\n",
"338/338 [==============================] - 0s 359us/step - loss: 44572012544.0000 - mse: 70423928832.0000\n",
"Epoch 65/200\n",
"338/338 [==============================] - 0s 374us/step - loss: 44441284608.0000 - mse: 70137282560.0000\n",
"Epoch 66/200\n",
"338/338 [==============================] - 0s 377us/step - loss: 44319338496.0000 - mse: 69887574016.0000\n",
"Epoch 67/200\n",
"338/338 [==============================] - 0s 374us/step - loss: 44205203456.0000 - mse: 69650989056.0000\n",
"Epoch 68/200\n",
"338/338 [==============================] - 0s 356us/step - loss: 44098433024.0000 - mse: 69435883520.0000\n",
"Epoch 69/200\n",
"338/338 [==============================] - 0s 388us/step - loss: 43998797824.0000 - mse: 69203419136.0000\n",
"Epoch 70/200\n",
"338/338 [==============================] - 0s 353us/step - loss: 43905495040.0000 - mse: 68959895552.0000\n",
"Epoch 71/200\n",
"338/338 [==============================] - 0s 360us/step - loss: 43818659840.0000 - mse: 68808581120.0000\n",
"Epoch 72/200\n",
"338/338 [==============================] - 0s 369us/step - loss: 43737378816.0000 - mse: 68607901696.0000\n",
"Epoch 73/200\n",
"338/338 [==============================] - 0s 366us/step - loss: 43661410304.0000 - mse: 68440006656.0000\n",
"Epoch 74/200\n",
"338/338 [==============================] - 0s 371us/step - loss: 43590406144.0000 - mse: 68296835072.0000\n",
"Epoch 75/200\n",
"338/338 [==============================] - 0s 388us/step - loss: 43524157440.0000 - mse: 68159770624.0000\n",
"Epoch 76/200\n",
"338/338 [==============================] - 0s 407us/step - loss: 43462270976.0000 - mse: 68039700480.0000\n",
"Epoch 77/200\n",
"338/338 [==============================] - 0s 404us/step - loss: 43404161024.0000 - mse: 67903074304.0000\n",
"Epoch 78/200\n",
"338/338 [==============================] - 0s 378us/step - loss: 43349868544.0000 - mse: 67784548352.0000\n",
"Epoch 79/200\n",
"338/338 [==============================] - 0s 416us/step - loss: 43299160064.0000 - mse: 67685990400.0000\n",
"Epoch 80/200\n",
"338/338 [==============================] - 0s 356us/step - loss: 43251802112.0000 - mse: 67560267776.0000\n",
"Epoch 81/200\n",
"338/338 [==============================] - 0s 387us/step - loss: 43207610368.0000 - mse: 67455328256.0000\n",
"Epoch 82/200\n",
"338/338 [==============================] - 0s 360us/step - loss: 43166228480.0000 - mse: 67385950208.0000\n",
"Epoch 83/200\n",
"338/338 [==============================] - 0s 375us/step - loss: 43127775232.0000 - mse: 67303653376.0000\n",
"Epoch 84/200\n",
"338/338 [==============================] - 0s 378us/step - loss: 43091623936.0000 - mse: 67233656832.0000\n",
"Epoch 85/200\n",
"338/338 [==============================] - 0s 358us/step - loss: 43057770496.0000 - mse: 67152650240.0000\n",
"Epoch 86/200\n",
"338/338 [==============================] - 0s 348us/step - loss: 43026161664.0000 - mse: 67078746112.0000\n",
"Epoch 87/200\n",
"338/338 [==============================] - 0s 398us/step - loss: 42996768768.0000 - mse: 67029282816.0000\n",
"Epoch 88/200\n",
"338/338 [==============================] - 0s 334us/step - loss: 42969206784.0000 - mse: 66998095872.0000\n",
"Epoch 89/200\n",
"338/338 [==============================] - 0s 382us/step - loss: 42943467520.0000 - mse: 66882469888.0000\n",
"Epoch 90/200\n",
"338/338 [==============================] - 0s 369us/step - loss: 42919522304.0000 - mse: 66820456448.0000\n",
"Epoch 91/200\n",
"338/338 [==============================] - 0s 412us/step - loss: 42896994304.0000 - mse: 66811310080.0000\n",
"Epoch 92/200\n",
"338/338 [==============================] - 0s 368us/step - loss: 42876010496.0000 - mse: 66776371200.0000\n",
"Epoch 93/200\n",
"338/338 [==============================] - 0s 407us/step - loss: 42856435712.0000 - mse: 66713563136.0000\n",
"Epoch 94/200\n",
"338/338 [==============================] - 0s 335us/step - loss: 42837970944.0000 - mse: 66683179008.0000\n",
"Epoch 95/200\n",
"338/338 [==============================] - 0s 358us/step - loss: 42820812800.0000 - mse: 66634297344.0000\n",
"Epoch 96/200\n",
"338/338 [==============================] - 0s 392us/step - loss: 42804662272.0000 - mse: 66603941888.0000\n",
"Epoch 97/200\n",
"338/338 [==============================] - 0s 389us/step - loss: 42789752832.0000 - mse: 66577432576.0000\n",
"Epoch 98/200\n",
"338/338 [==============================] - 0s 397us/step - loss: 42775777280.0000 - mse: 66533130240.0000\n",
"Epoch 99/200\n",
"338/338 [==============================] - 0s 372us/step - loss: 42762645504.0000 - mse: 66513850368.0000\n",
"Epoch 100/200\n",
"338/338 [==============================] - 0s 358us/step - loss: 42750521344.0000 - mse: 66486448128.0000\n",
"Epoch 101/200\n",
"338/338 [==============================] - 0s 385us/step - loss: 42739052544.0000 - mse: 66484039680.0000\n",
"Epoch 102/200\n",
"338/338 [==============================] - 0s 370us/step - loss: 42728382464.0000 - mse: 66457776128.0000\n",
"Epoch 103/200\n",
"338/338 [==============================] - 0s 406us/step - loss: 42718224384.0000 - mse: 66418016256.0000\n",
"Epoch 104/200\n",
"338/338 [==============================] - 0s 391us/step - loss: 42708938752.0000 - mse: 66383454208.0000\n",
"Epoch 105/200\n",
"338/338 [==============================] - 0s 371us/step - loss: 42700156928.0000 - mse: 66390908928.0000\n",
"Epoch 106/200\n",
"338/338 [==============================] - 0s 386us/step - loss: 42691907584.0000 - mse: 66403315712.0000\n",
"Epoch 107/200\n",
"338/338 [==============================] - 0s 410us/step - loss: 42684366848.0000 - mse: 66340433920.0000\n",
"Epoch 108/200\n",
"338/338 [==============================] - 0s 348us/step - loss: 42677317632.0000 - mse: 66318635008.0000\n",
"Epoch 109/200\n",
"338/338 [==============================] - 0s 360us/step - loss: 42670653440.0000 - mse: 66335055872.0000\n",
"Epoch 110/200\n",
"338/338 [==============================] - 0s 411us/step - loss: 42664448000.0000 - mse: 66309390336.0000\n",
"Epoch 111/200\n",
"338/338 [==============================] - 0s 396us/step - loss: 42658668544.0000 - mse: 66281103360.0000\n",
"Epoch 112/200\n",
"338/338 [==============================] - 0s 375us/step - loss: 42653253632.0000 - mse: 66303295488.0000\n",
"Epoch 113/200\n",
"338/338 [==============================] - 0s 393us/step - loss: 42648092672.0000 - mse: 66238840832.0000\n",
"Epoch 114/200\n",
"338/338 [==============================] - 0s 346us/step - loss: 42643361792.0000 - mse: 66240102400.0000\n",
"Epoch 115/200\n",
"338/338 [==============================] - 0s 354us/step - loss: 42638954496.0000 - mse: 66269110272.0000\n",
"Epoch 116/200\n",
"338/338 [==============================] - 0s 349us/step - loss: 42634756096.0000 - mse: 66227503104.0000\n",
"Epoch 117/200\n",
"338/338 [==============================] - 0s 367us/step - loss: 42630963200.0000 - mse: 66240458752.0000\n",
"Epoch 118/200\n",
"338/338 [==============================] - 0s 353us/step - loss: 42627301376.0000 - mse: 66197602304.0000\n",
"Epoch 119/200\n",
"338/338 [==============================] - 0s 377us/step - loss: 42623844352.0000 - mse: 66232041472.0000\n",
"Epoch 120/200\n",
"338/338 [==============================] - 0s 359us/step - loss: 42620755968.0000 - mse: 66172043264.0000\n",
"Epoch 121/200\n",
"338/338 [==============================] - 0s 378us/step - loss: 42617692160.0000 - mse: 66206277632.0000\n",
"Epoch 122/200\n",
"338/338 [==============================] - 0s 409us/step - loss: 42614947840.0000 - mse: 66195410944.0000\n",
"Epoch 123/200\n",
"338/338 [==============================] - 0s 406us/step - loss: 42612305920.0000 - mse: 66158772224.0000\n",
"Epoch 124/200\n",
"338/338 [==============================] - 0s 375us/step - loss: 42610036736.0000 - mse: 66218995712.0000\n",
"Epoch 125/200\n",
"338/338 [==============================] - 0s 349us/step - loss: 42607689728.0000 - mse: 66123763712.0000\n",
"Epoch 126/200\n",
"338/338 [==============================] - 0s 400us/step - loss: 42605457408.0000 - mse: 66179592192.0000\n",
"Epoch 127/200\n",
"338/338 [==============================] - 0s 348us/step - loss: 42603565056.0000 - mse: 66121043968.0000\n",
"Epoch 128/200\n",
"338/338 [==============================] - 0s 397us/step - loss: 42601689088.0000 - mse: 66186555392.0000\n",
"Epoch 129/200\n",
"338/338 [==============================] - 0s 390us/step - loss: 42600181760.0000 - mse: 66168799232.0000\n",
"Epoch 130/200\n",
"338/338 [==============================] - 0s 399us/step - loss: 42598371328.0000 - mse: 66139451392.0000\n",
"Epoch 131/200\n",
"338/338 [==============================] - 0s 391us/step - loss: 42596941824.0000 - mse: 66123403264.0000\n",
"Epoch 132/200\n",
"338/338 [==============================] - 0s 377us/step - loss: 42595610624.0000 - mse: 66126913536.0000\n",
"Epoch 133/200\n",
"338/338 [==============================] - 0s 397us/step - loss: 42594254848.0000 - mse: 66110132224.0000\n",
"Epoch 134/200\n",
"338/338 [==============================] - 0s 390us/step - loss: 42593005568.0000 - mse: 66122592256.0000\n",
"Epoch 135/200\n",
"338/338 [==============================] - 0s 369us/step - loss: 42591793152.0000 - mse: 66119503872.0000\n",
"Epoch 136/200\n",
"338/338 [==============================] - 0s 364us/step - loss: 42590883840.0000 - mse: 66126974976.0000\n",
"Epoch 137/200\n",
"338/338 [==============================] - 0s 379us/step - loss: 42589782016.0000 - mse: 66113064960.0000\n",
"Epoch 138/200\n",
"338/338 [==============================] - 0s 393us/step - loss: 42588897280.0000 - mse: 66118746112.0000\n",
"Epoch 139/200\n",
"338/338 [==============================] - 0s 382us/step - loss: 42587951104.0000 - mse: 66126794752.0000\n",
"Epoch 140/200\n",
"338/338 [==============================] - 0s 388us/step - loss: 42587115520.0000 - mse: 66132807680.0000\n",
"Epoch 141/200\n",
"338/338 [==============================] - 0s 388us/step - loss: 42586288128.0000 - mse: 66135941120.0000\n",
"Epoch 142/200\n",
"338/338 [==============================] - 0s 363us/step - loss: 42585710592.0000 - mse: 66115424256.0000\n",
"Epoch 143/200\n",
"338/338 [==============================] - 0s 374us/step - loss: 42585042944.0000 - mse: 66122256384.0000\n",
"Epoch 144/200\n",
"338/338 [==============================] - 0s 401us/step - loss: 42584276992.0000 - mse: 66106929152.0000\n",
"Epoch 145/200\n",
"338/338 [==============================] - 0s 403us/step - loss: 42583920640.0000 - mse: 66107371520.0000\n",
"Epoch 146/200\n",
"338/338 [==============================] - 0s 406us/step - loss: 42583232512.0000 - mse: 66120310784.0000\n",
"Epoch 147/200\n",
"338/338 [==============================] - 0s 457us/step - loss: 42582687744.0000 - mse: 66099290112.0000\n",
"Epoch 148/200\n",
"338/338 [==============================] - 0s 421us/step - loss: 42582396928.0000 - mse: 66107858944.0000\n",
"Epoch 149/200\n",
"338/338 [==============================] - 0s 401us/step - loss: 42581934080.0000 - mse: 66115375104.0000\n",
"Epoch 150/200\n",
"338/338 [==============================] - 0s 400us/step - loss: 42581422080.0000 - mse: 66123862016.0000\n",
"Epoch 151/200\n",
"338/338 [==============================] - 0s 395us/step - loss: 42581086208.0000 - mse: 66113667072.0000\n",
"Epoch 152/200\n",
"338/338 [==============================] - 0s 381us/step - loss: 42580639744.0000 - mse: 66120757248.0000\n",
"Epoch 153/200\n",
"338/338 [==============================] - 0s 400us/step - loss: 42580205568.0000 - mse: 66109890560.0000\n",
"Epoch 154/200\n",
"338/338 [==============================] - 0s 368us/step - loss: 42579927040.0000 - mse: 66111180800.0000\n",
"Epoch 155/200\n",
"338/338 [==============================] - 0s 386us/step - loss: 42579632128.0000 - mse: 66126225408.0000\n",
"Epoch 156/200\n",
"338/338 [==============================] - 0s 388us/step - loss: 42579537920.0000 - mse: 66132385792.0000\n",
"Epoch 157/200\n",
"338/338 [==============================] - 0s 398us/step - loss: 42579177472.0000 - mse: 66137452544.0000\n",
"Epoch 158/200\n",
"338/338 [==============================] - 0s 452us/step - loss: 42578829312.0000 - mse: 66092154880.0000\n",
"Epoch 159/200\n",
"338/338 [==============================] - 0s 439us/step - loss: 42578571264.0000 - mse: 66107195392.0000\n",
"Epoch 160/200\n",
"338/338 [==============================] - 0s 460us/step - loss: 42578358272.0000 - mse: 66070446080.0000\n",
"Epoch 161/200\n",
"338/338 [==============================] - 0s 422us/step - loss: 42578399232.0000 - mse: 66081959936.0000\n",
"Epoch 162/200\n",
"338/338 [==============================] - 0s 402us/step - loss: 42578108416.0000 - mse: 66109128704.0000\n",
"Epoch 163/200\n",
"338/338 [==============================] - 0s 407us/step - loss: 42577891328.0000 - mse: 66150678528.0000\n",
"Epoch 164/200\n",
"338/338 [==============================] - 0s 439us/step - loss: 42577649664.0000 - mse: 66116964352.0000\n",
"Epoch 165/200\n",
"338/338 [==============================] - 0s 435us/step - loss: 42577588224.0000 - mse: 66096029696.0000\n",
"Epoch 166/200\n",
"338/338 [==============================] - 0s 417us/step - loss: 42577580032.0000 - mse: 66102984704.0000\n",
"Epoch 167/200\n",
"338/338 [==============================] - 0s 408us/step - loss: 42577326080.0000 - mse: 66111381504.0000\n",
"Epoch 168/200\n",
"338/338 [==============================] - 0s 414us/step - loss: 42577203200.0000 - mse: 66116399104.0000\n",
"Epoch 169/200\n",
"338/338 [==============================] - 0s 415us/step - loss: 42577170432.0000 - mse: 66090430464.0000\n",
"Epoch 170/200\n",
"338/338 [==============================] - 0s 406us/step - loss: 42576949248.0000 - mse: 66129068032.0000\n",
"Epoch 171/200\n",
"338/338 [==============================] - 0s 420us/step - loss: 42576850944.0000 - mse: 66073636864.0000\n",
"Epoch 172/200\n",
"338/338 [==============================] - 0s 402us/step - loss: 42576728064.0000 - mse: 66076454912.0000\n",
"Epoch 173/200\n",
"338/338 [==============================] - 0s 437us/step - loss: 42576691200.0000 - mse: 66124963840.0000\n",
"Epoch 174/200\n",
"338/338 [==============================] - 0s 465us/step - loss: 42576551936.0000 - mse: 66121678848.0000\n",
"Epoch 175/200\n",
"338/338 [==============================] - 0s 507us/step - loss: 42576510976.0000 - mse: 66118803456.0000\n",
"Epoch 176/200\n",
"338/338 [==============================] - 0s 480us/step - loss: 42576482304.0000 - mse: 66106109952.0000\n",
"Epoch 177/200\n",
"338/338 [==============================] - 0s 460us/step - loss: 42576306176.0000 - mse: 66105536512.0000\n",
"Epoch 178/200\n",
"338/338 [==============================] - 0s 476us/step - loss: 42576461824.0000 - mse: 66083454976.0000\n",
"Epoch 179/200\n",
"338/338 [==============================] - 0s 469us/step - loss: 42576179200.0000 - mse: 66048770048.0000\n",
"Epoch 180/200\n",
"338/338 [==============================] - 0s 481us/step - loss: 42576285696.0000 - mse: 66084417536.0000\n",
"Epoch 181/200\n",
"338/338 [==============================] - 0s 388us/step - loss: 42576216064.0000 - mse: 66122788864.0000\n",
"Epoch 182/200\n",
"338/338 [==============================] - 0s 394us/step - loss: 42576035840.0000 - mse: 66099953664.0000\n",
"Epoch 183/200\n",
"338/338 [==============================] - 0s 445us/step - loss: 42576080896.0000 - mse: 66106101760.0000\n",
"Epoch 184/200\n",
"338/338 [==============================] - 0s 397us/step - loss: 42575962112.0000 - mse: 66115301376.0000\n",
"Epoch 185/200\n",
"338/338 [==============================] - 0s 403us/step - loss: 42575974400.0000 - mse: 66110218240.0000\n",
"Epoch 186/200\n",
"338/338 [==============================] - 0s 407us/step - loss: 42576007168.0000 - mse: 66087485440.0000\n",
"Epoch 187/200\n",
"338/338 [==============================] - 0s 474us/step - loss: 42575888384.0000 - mse: 66112311296.0000\n",
"Epoch 188/200\n",
"338/338 [==============================] - 0s 460us/step - loss: 42575900672.0000 - mse: 66080944128.0000\n",
"Epoch 189/200\n",
"338/338 [==============================] - 0s 403us/step - loss: 42575773696.0000 - mse: 66100109312.0000\n",
"Epoch 190/200\n",
"338/338 [==============================] - 0s 421us/step - loss: 42575839232.0000 - mse: 66122510336.0000\n",
"Epoch 191/200\n",
"338/338 [==============================] - 0s 397us/step - loss: 42575761408.0000 - mse: 66094694400.0000\n",
"Epoch 192/200\n",
"338/338 [==============================] - 0s 419us/step - loss: 42575806464.0000 - mse: 66108387328.0000\n",
"Epoch 193/200\n",
"338/338 [==============================] - 0s 408us/step - loss: 42575749120.0000 - mse: 66084438016.0000\n",
"Epoch 194/200\n",
"338/338 [==============================] - 0s 424us/step - loss: 42575691776.0000 - mse: 66112634880.0000\n",
"Epoch 195/200\n",
"338/338 [==============================] - 0s 428us/step - loss: 42575757312.0000 - mse: 66082951168.0000\n",
"Epoch 196/200\n",
"338/338 [==============================] - 0s 395us/step - loss: 42575679488.0000 - mse: 66130681856.0000\n",
"Epoch 197/200\n",
"338/338 [==============================] - 0s 369us/step - loss: 42575716352.0000 - mse: 66101477376.0000\n",
"Epoch 198/200\n",
"338/338 [==============================] - 0s 373us/step - loss: 42575585280.0000 - mse: 66107961344.0000\n",
"Epoch 199/200\n",
"338/338 [==============================] - 0s 388us/step - loss: 42575622144.0000 - mse: 66102431744.0000\n",
"Epoch 200/200\n",
"338/338 [==============================] - 0s 370us/step - loss: 42575634432.0000 - mse: 66091057152.0000\n"
]
}
],
"source": [
"# Infering the l1 and l2 params \n",
"l1, l2 = elastic_net_to_keras(alpha, l1_ratio)\n",
"\n",
"# Defining a simple regression neural net \n",
"numeric_input = Input(shape=(len(features), ))\n",
"output = Dense(1, activation='linear', kernel_regularizer=L1L2(l1, l2))(numeric_input)\n",
"\n",
"model = Model(inputs=numeric_input, outputs=output)\n",
"optimizer = tf.keras.optimizers.SGD(learning_rate=0.0001)\n",
"\n",
"# Compiling the model \n",
"model.compile(optimizer=optimizer, loss=NMSE(), metrics=['mse'])\n",
"\n",
"model.summary()\n",
"\n",
"# Fitting the model to data using keras\n",
"history = model.fit(d[features].values, d[y_var].values, epochs=200, batch_size=64)"
]
},
{
"cell_type": "code",
"execution_count": 137,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAmcAAAFNCAYAAABFbcjcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAuRklEQVR4nO3deZxddX3/8ddn1uz7ZN9D2CEsKYsgIiqCVbBVW7AVtLZUW7da+1PbPur6+9lFrWtVVERbhbYqSlsVUKmggJJggARZQghkJRtJyJ6Z+fz+uCcwhEwyycydc2fm9Xw87uPe8z3n3Ps5c2Z5z/d8zzmRmUiSJKk21JVdgCRJkp5lOJMkSaohhjNJkqQaYjiTJEmqIYYzSZKkGmI4kyRJqiGGM0kDVkTMjIiMiIYuLPvGiPh5d99Hkg7FcCapT4iI5RGxJyLG7df+6yIYzSypNEnqUYYzSX3JY8Dl+yYi4iRgSHnlSFLPM5xJ6kv+Fbiiw/SVwDc6LhARIyPiGxGxPiIej4i/jYi6Yl59RHw8IjZExDLgtw+w7lcjYk1ErIqIj0ZE/eEWGRGTI+LGiNgUEUsj4k86zDsjIhZExNaIeDIiPlm0D4qIf4uIjRGxOSLujogJh/vZkvo+w5mkvuQuYEREHFeEpsuAf9tvmc8CI4HZwIuohLk3FfP+BHglcCowH3jtfuteC7QCRxXLXAj88RHUeT2wEphcfMb/i4gLinmfBj6dmSOAOcB/FO1XFnVPA8YCbwF2HsFnS+rj+l04i4hrImJdRCzuwrLnRcQ9EdEaEa/db96Piv9e/7t61Uo6Avt6z14G/AZYtW9Gh8D2/sx8OjOXA58A3lAs8nvApzJzRWZuAj7WYd0JwCuAd2Xm9sxcB/xz8X5dFhHTgHOA92bmrsxcBHyFZ3v89gJHRcS4zNyWmXd1aB8LHJWZbZm5MDO3Hs5nS+of+l04o/Kf70VdXPYJ4I3Atw4w75949he6pNrxr8DrqfzsfmO/eeOARuDxDm2PA1OK15OBFfvN22dGse6a4h+zzcCXgPGHWd9kYFNmPt1JDW8GjgYeLA5dvrLDdt0EXB8RqyPiHyOi8TA/W1I/0O/CWWbeBmzq2BYRc4qesIURcXtEHFssuzwz7wPaD/A+PwGe3r9dUrky83EqJwa8AvjufrM3UOmBmtGhbTrP9q6toXLYsOO8fVYAu4FxmTmqeIzIzBMOs8TVwJiIGH6gGjLzkcy8nEro+wfg2xExNDP3ZuaHMvN44AVUDr9egaQBp9+Fs05cDbw9M08H3gP8S8n1SOqeNwMXZOb2jo2Z2UZlDNf/jYjhETEDeDfPjkv7D+AdETE1IkYD7+uw7hrgZuATETEiIuqKf+xedDiFZeYK4A7gY8Ug/5OLev8NICL+MCJaMrMd2Fys1h4RL46Ik4pDs1uphMzn/eMoqf/r9+EsIoZR+S/0PyNiEZXDFJNKLUpSt2Tmo5m5oJPZbwe2A8uAn1MZtnBNMe/LVA4d3gvcw/N73q4AmoAHgKeAb3Nkvy8uB2ZS6UW7AfhAZv64mHcRsCQitlE5OeCyzNwJTCw+byuVsXQ/o3KoU9IAE5lZdg09rrgY5X9n5okRMQJ4KDM7/QUbEdcWy397v/bzgfdk5isPsJokSVKP6/c9Z8XZTo9FxOsAomJeyWVJkiQdUL/rOYuI64DzqZy19STwAeCnwBeoHJ5oBK7PzA9HxG9ROeQwGtgFrN03+DcibgeOBYYBG4E3Z+ZNvbs1kiRpoOl34UySJKkv6/eHNSVJkvoSw5kkSVINaSi7gJ40bty4nDlzZtllSJIkHdLChQs3ZGbL/u39KpzNnDmTBQs6u/SRJElS7YiIxw/U7mFNSZKkGmI4kyRJqiGGM0mSpBpiOJMkSaohhjNJkqQaYjiTJEmqIYYzSZKkGmI4kyRJqiGGM0mSpBpiODsMNy1Zy88eXl92GZIkqR/rV7dvqrbP/OQRxg1r5kVHP+82WJIkST3CnrPDMH3MEFY8taPsMiRJUj9mODsM08YMYeVTO2lvz7JLkSRJ/ZTh7DBMGz2YPa3trN+2u+xSJElSP2U4OwxTxwwBYMUmD21KkqTqMJwdhmmji3DmuDNJklQlVQtnETEtIm6NiAciYklEvPMAy0REfCYilkbEfRFxWod5V0bEI8XjymrVeTimjh4MwBMbd5ZciSRJ6q+qeSmNVuAvM/OeiBgOLIyIWzLzgQ7LXAzMLR5nAl8AzoyIMcAHgPlAFuvemJlPVbHeQxrUWM+EEc32nEmSpKqpWs9ZZq7JzHuK108DvwGm7LfYpcA3suIuYFRETAJeDtySmZuKQHYLcFG1aj0c00YPccyZJEmqml4ZcxYRM4FTgV/uN2sKsKLD9MqirbP20u27nIYkSVI1VD2cRcQw4DvAuzJzaxXe/6qIWBARC9avr/6tlaaNHsyaLTvZ09pe9c+SJEkDT1XDWUQ0Uglm38zM7x5gkVXAtA7TU4u2ztqfJzOvzsz5mTm/paX6t1WaNmYI7QmrN9t7JkmSel41z9YM4KvAbzLzk50sdiNwRXHW5lnAlsxcA9wEXBgRoyNiNHBh0Va6aWO8nIYkSaqeap6teQ7wBuD+iFhUtP01MB0gM78I/AB4BbAU2AG8qZi3KSI+AtxdrPfhzNxUxVq77JlwtsmeM0mS1POqFs4y8+dAHGKZBP68k3nXANdUobRumThiEI31Yc+ZJEmqCu8QcJjq64IpowZ7OQ1JklQVhrMjMG3MEFZ4OQ1JklQFhrMjMNUL0UqSpCoxnB2BaWMGs2n7Hrbvbi27FEmS1M8Yzo7AtNFeTkOSJFWH4ewIzBhbCWePbzScSZKknmU4OwIzxw0FYPmG7SVXIkmS+hvD2REYMaiRsUObWL7RcCZJknqW4ewIzRg7hMfsOZMkST3McHaEZo4b6pgzSZLU4wxnR2jW2KGs2bKLnXvayi5FkiT1I4azI7TvpIDHN3loU5Ik9RzD2RGa5RmbkiSpCgxnR2jftc6WO+5MkiT1IMPZERo+qJFxw5rsOZMkST3KcNYNM8cO9XIakiSpRxnOumHmuKFeiFaSJPUow1k3zBo3lCe37mbHntayS5EkSf2E4awbvAG6JEnqaYazbpg51stpSJKknmU464Z9F6J9zHFnkiSphxjOumFYcwMtw5vtOZMkST3GcNZNs8YOZfkGx5xJkqSeYTjrptktQ3l0/bayy5AkSf2E4aybjho/jI3b9/DU9j1llyJJkvoBw1k3zWkZBmDvmSRJ6hGGs246anwlnC1dZziTJEndZzjrpimjBtPcUGfPmSRJ6hGGs26qqwtmtwyz50ySJPWIqoWziLgmItZFxOJO5v9VRCwqHosjoi0ixhTzlkfE/cW8BdWqsaccNX4YS+05kyRJPaCaPWfXAhd1NjMz/ykzT8nMU4D3Az/LzE0dFnlxMX9+FWvsEUe1DGPlUzvZtbet7FIkSVIfV7Vwlpm3AZsOuWDF5cB11aql2uaMH0omLFvvnQIkSVL3lD7mLCKGUOlh+06H5gRujoiFEXFVOZV13TNnbHpoU5IkdVND2QUArwJ+sd8hzXMzc1VEjAduiYgHi5645ynC21UA06dPr361BzBz7FDqAh71pABJktRNpfecAZex3yHNzFxVPK8DbgDO6GzlzLw6M+dn5vyWlpaqFtqZQY31TBszxJ4zSZLUbaWGs4gYCbwI+H6HtqERMXzfa+BC4IBnfNaSo1qG2XMmSZK6rWqHNSPiOuB8YFxErAQ+ADQCZOYXi8V+B7g5MzuOpJ8A3BAR++r7Vmb+qFp19pSjxg/j9qUbaGtP6uui7HIkSVIfVbVwlpmXd2GZa6lccqNj2zJgXnWqqp45LcPY09rOyqd2MGPs0LLLkSRJfVQtjDnrF+Z4j01JktQDDGc9ZO6ESjh76MmnS65EkiT1ZYazHjJiUCNTRg3mobWGM0mSdOQMZz3omInDDWeSJKlbDGc96OgJw3l0/Tb2trWXXYokSeqjDGc96NiJw9nbljy2wXtsSpKkI2M460HHTBwOwIMe2pQkSUfIcNaD5rQMo6EueGjt1rJLkSRJfZThrAc1NdQxa9xQHlrrtc4kSdKRMZz1sGMmDuehJ+05kyRJR8Zw1sOOnTicFZt2sm13a9mlSJKkPshw1sOOmTgCgIe9U4AkSToChrMedmxxxubDnrEpSZKOgOGsh00ZNZghTfVeTkOSJB0Rw1kPq6sLjp7gbZwkSdKRMZxVwXGThvPg2q1kZtmlSJKkPsZwVgXHTxrBUzv2smbLrrJLkSRJfYzhrAqOnzwSgCWrvd6ZJEk6PIazKjhu0nAiYPGqLWWXIkmS+hjDWRUMaWpg9rih9pxJkqTDZjirkhOnjOSB1facSZKkw2M4q5ITJo9g9ZZdPLV9T9mlSJKkPsRwViUneFKAJEk6AoazKjlhcuUem0s8tClJkg6D4axKRg1pYsqowSy250ySJB0Gw1kVHT95hD1nkiTpsBjOqujEySN5bMN2tu9uLbsUSZLURxjOquiEySPIhAfXemhTkiR1TdXCWURcExHrImJxJ/PPj4gtEbGoePxdh3kXRcRDEbE0It5XrRqr7YQplZMCFq8ynEmSpK6pZs/ZtcBFh1jm9sw8pXh8GCAi6oHPAxcDxwOXR8TxVayzaiaOGETL8GbuXbm57FIkSVIfUbVwlpm3AZuOYNUzgKWZuSwz9wDXA5f2aHG9JCKYN3Uk967YXHYpkiSpjyh7zNnZEXFvRPwwIk4o2qYAKzoss7Jo65PmTR3Fsg3b2bprb9mlSJKkPqDMcHYPMCMz5wGfBb53JG8SEVdFxIKIWLB+/fqerK9HzJs2ikxYvNJLakiSpEMrLZxl5tbM3Fa8/gHQGBHjgFXAtA6LTi3aOnufqzNzfmbOb2lpqWrNR+LkqZXbOC1y3JkkSeqC0sJZREyMiChen1HUshG4G5gbEbMiogm4DLixrDq7a9SQJmaOHeK4M0mS1CUN1XrjiLgOOB8YFxErgQ8AjQCZ+UXgtcBbI6IV2AlclpkJtEbE24CbgHrgmsxcUq06e8O8aaP45bIjOTdCkiQNNFULZ5l5+SHmfw74XCfzfgD8oBp1lWHe1FF8f9Fqnty6iwkjBpVdjiRJqmFln605IMybNgrAQ5uSJOmQDGe94ITJI2ioCy9GK0mSDslw1gsGNdZzzMTh3LvCy2lIkqSDM5z1knnTRnHvys20t2fZpUiSpBpmOOslp04bxdO7Wlm6flvZpUiSpBpmOOsl82eOAWDB8qdKrkSSJNUyw1kvmTl2CGOHNrHgca93JkmSOmc46yURwekzRrPwcXvOJElS5wxnvWj+zNE8vnEH65/eXXYpkiSpRhnOetHpMyrjzuw9kyRJnTGc9aITp4ygqaGOhY47kyRJnTCc9aLmhnpOnjKSBfacSZKkThjOetnpM0ezeNUWdu1tK7sUSZJUgwxnvWz+jDHsbUvuX+WtnCRJ0vMZznrZ6TNGA3D3csedSZKk5zOc9bIxQ5s4avwwfvWY4UySJD2f4awEZ80ew92PbaK1rb3sUiRJUo0xnJXgrNlj2b6njcWrt5ZdiiRJqjGGsxKcOWssAHct21hyJZIkqdYYzkrQMryZo8YPM5xJkqTnMZyVxHFnkiTpQAxnJXHcmSRJOhDDWUkcdyZJkg7EcFYSx51JkqQDMZyVyHFnkiRpf10KZxExNCLqitdHR8QlEdFY3dL6v7Nnj2P7njbuXbm57FIkSVKN6GrP2W3AoIiYAtwMvAG4tlpFDRTnHDWWCLj9kQ1llyJJkmpEV8NZZOYO4HeBf8nM1wEnVK+sgWHUkCZOnjLScCZJkp7R5XAWEWcDfwD8T9FWX52SBpYXzm1h0YrNbN21t+xSJElSDehqOHsX8H7ghsxcEhGzgVsPtkJEXBMR6yJicSfz/yAi7ouI+yPijoiY12He8qJ9UUQs6GKNfdK5c8fR1p7c+ahnbUqSpC6Gs8z8WWZekpn/UJwYsCEz33GI1a4FLjrI/MeAF2XmScBHgKv3m//izDwlM+d3pca+6rTpoxnSVM/PPbQpSZLo+tma34qIERExFFgMPBARf3WwdTLzNmDTQebfkZlPFZN3AVO7WHO/0tRQx9mzx3L7I+vLLkWSJNWArh7WPD4ztwKvBn4IzKJyxmZPeXPxvvskcHNELIyIq3rwc2rSuXPHsXzjDlZs2lF2KZIkqWRdDWeNxXXNXg3cmJl7qQSobouIF1MJZ+/t0HxuZp4GXAz8eUScd5D1r4qIBRGxYP36vtn79MK5LYCX1JAkSV0PZ18ClgNDgdsiYgbQ7Tt2R8TJwFeASzPzmRHxmbmqeF4H3ACc0dl7ZObVmTk/M+e3tLR0t6RSzGkZyuSRg7jt4b4ZLiVJUs/p6gkBn8nMKZn5iqx4HHhxdz44IqYD3wXekJkPd2gfGhHD970GLqQyzq3figjOP3Y8tz+ynj2t3spJkqSBrKsnBIyMiE/uO3wYEZ+g0ot2sHWuA+4EjomIlRHx5oh4S0S8pVjk74CxwL/sd8mMCcDPI+Je4FfA/2Tmj45k4/qSC44Zz/Y9bfzqsU7PoZAkSQNAQxeXu4ZK79XvFdNvAL5G5Y4BB5SZlx/sDTPzj4E/PkD7MmDe89fo3845ahzNDXX89MF1nDt3XNnlSJKkknR1zNmczPxAZi4rHh8CZlezsIFmcFM9Z88Zy08efJLMHjnXQpIk9UFdDWc7I+LcfRMRcQ6wszolDVwXHDuexzfuYNmG7WWXIkmSStLVw5pvAb4RESOL6aeAK6tT0sD14mPGA0u49cF1zGkZVnY5kiSpBF09W/PezJwHnAycnJmnAhdUtbIBaNqYIRw9YRg/+c26skuRJEkl6ephTQAyc2txpwCAd1ehngHvgmMncPfyTWzdtbfsUiRJUgkOK5ztJ3qsCj3jpceNp7U9ufVBe88kSRqIuhPOPKWwCk6dPppxw5q5ecmTZZciSZJKcNATAiLiaQ4cwgIYXJWKBrj6uuDCEybwvV+vYtfeNgY11pddkiRJ6kUH7TnLzOGZOeIAj+GZ2dUzPXWYLjphIjv2tHkjdEmSBqDuHNZUlZw1eywjBjXwo8Vryy5FkiT1MsNZDWpqqOOlx03gx795kr1t3ghdkqSBxHBWoy48YSJbdu71RuiSJA0whrMa9aKjWxjUWMcPF68puxRJktSLDGc1anBTPRccO54fLV5Lq4c2JUkaMAxnNexVJ09mw7Y93LXMQ5uSJA0UhrMa9uJjxzOsuYEb711VdimSJKmXGM5q2KDGei48fgI/WryW3a1tZZcjSZJ6geGsxr1q3mS27mrltoe9IK0kSQOB4azGnTt3HKOGNPJf964uuxRJktQLDGc1rrG+jotPnMQtDzzJjj2tZZcjSZKqzHDWB1wybzI797ZxywNPll2KJEmqMsNZH3DmrDFMGTWYby9cWXYpkiSpygxnfUBdXfCa06bw86UbWLNlZ9nlSJKkKjKc9RGvOX0qmXDDr73mmSRJ/ZnhrI+YMXYovzVzNN9euJLMLLscSZJUJYazPuS1p09l2frt/HrF5rJLkSRJVWI460NecdIkBjXWeWKAJEn9mOGsDxk+qJGLT5zEfy1a7TXPJEnqpwxnfczlZ0zn6d2t/Pe9a8ouRZIkVUFVw1lEXBMR6yJicSfzIyI+ExFLI+K+iDitw7wrI+KR4nFlNevsS35r5miOGj+Mb/7qibJLkSRJVVDtnrNrgYsOMv9iYG7xuAr4AkBEjAE+AJwJnAF8ICJGV7XSPiIi+IMzp3Pvis0sWb2l7HIkSVIPq2o4y8zbgE0HWeRS4BtZcRcwKiImAS8HbsnMTZn5FHALBw95A8rvnjqV5oY6vvVLe88kSepvyh5zNgVY0WF6ZdHWWbuAkUMaeeXJk/n+otVs3+2JAZIk9Sdlh7Nui4irImJBRCxYv3592eX0mtefOZ1tu1u9Y4AkSf1M2eFsFTCtw/TUoq2z9ufJzKszc35mzm9paalaobXmtOmjOHHKCK69Y7l3DJAkqR8pO5zdCFxRnLV5FrAlM9cANwEXRsTo4kSAC4s2FSKCPzpnFkvXbeP2RzaUXY4kSeoh1b6UxnXAncAxEbEyIt4cEW+JiLcUi/wAWAYsBb4M/BlAZm4CPgLcXTw+XLSpg98+eRLjhjXztV88VnYpkiSphzRU880z8/JDzE/gzzuZdw1wTTXq6i+aG+p5w1kz+OcfP8yj67cxp2VY2SVJkqRuKvuwprrp9WdOp6m+jq/fsbzsUiRJUg8wnPVxLcObueSUyfzngpU8tX1P2eVIkqRuMpz1A1edN5ude9v4+p3Lyy5FkiR1k+GsHzh6wnBeetwErr1jOTv2eFFaSZL6MsNZP/HW8+ewecderv/VikMvLEmSapbhrJ84fcZozpg1hq/cvoy9be1llyNJko6Q4awfeev5c1i9ZRff85ZOkiT1WYazfuT8o1s4YfIIPnfrUlrtPZMkqU8ynPUjEcG7Xno0j2/cwXftPZMkqU8ynPUzLz1uPCdNGclnf/qIY88kSeqDDGf9TKX3bC4rNu3ku/esLLscSZJ0mAxn/dAFx45n3tSRfPanS9nTau+ZJEl9ieGsH4oI3n3hMax8aiff/OXjZZcjSZIOg+Gsnzpv7jheMGcsn/3pUp7etbfsciRJUhcZzvqpiOD9Fx/Hpu17+NLPlpVdjiRJ6iLDWT920tSRvGreZL7y82U8uXVX2eVIkqQuMJz1c3914TG0tScfv+mhskuRJEldYDjr56aPHcIbXzCTb9+zkvtWbi67HEmSdAiGswHgHS+Zy9ihzXzwxiW0t2fZ5UiSpIMwnA0Awwc18t6LjuGeJzbzvUXe1kmSpFpmOBsgXnPaVOZNG8Xf//BBL60hSVINM5wNEHV1wYcuOYH123bziZsfLrscSZLUCcPZAHLKtFFccdYMvn7nchat2Fx2OZIk6QAMZwPMe15+DOOHN/P+797P3jbvuylJUq0xnA0wwwc18qFLTuA3a7by1Z8/VnY5kiRpP4azAejlJ0zkwuMn8MlbHmbpum1llyNJkjownA1AEcFHf+dEhjTV85f/eS+tHt6UJKlmGM4GqPHDB/GRS0/k3hWb+dJt3hhdkqRaYTgbwF558iRecdJEPvXjh3lg9dayy5EkSVQ5nEXERRHxUEQsjYj3HWD+P0fEouLxcERs7jCvrcO8G6tZ50AVEXz01ScxekgTb7/uHnbuaSu7JEmSBryqhbOIqAc+D1wMHA9cHhHHd1wmM/8iM0/JzFOAzwLf7TB75755mXlJteoc6MYMbeKTv3cKyzZs58P//UDZ5UiSNOBVs+fsDGBpZi7LzD3A9cClB1n+cuC6KtajTpw7dxx/et4crvvVE/zg/jVllyNJ0oBWzXA2BVjRYXpl0fY8ETEDmAX8tEPzoIhYEBF3RcSrq1alAPjLC49m3rRRvPfb97F8w/ayy5EkacCqlRMCLgO+nZkdBz3NyMz5wOuBT0XEnAOtGBFXFSFuwfr163uj1n6psb6Oz7/+VOrrg7f820LHn0mSVJJqhrNVwLQO01OLtgO5jP0OaWbmquJ5GfC/wKkHWjEzr87M+Zk5v6Wlpbs1D2hTRw/h05edykNPPs1f33A/mVl2SZIkDTjVDGd3A3MjYlZENFEJYM876zIijgVGA3d2aBsdEc3F63HAOYCj1XvBi45u4V0vOZobfr2Kr/1iednlSJI04DRU640zszUi3gbcBNQD12Tmkoj4MLAgM/cFtcuA6/O53TTHAV+KiHYqAfLvM9Nw1kvefsFRPLBmCx/9nweY3TKU848ZX3ZJkiQNGNGfDl3Nnz8/FyxYUHYZ/cL23a287ot3smLTDr77Zy9g7oThZZckSVK/EhELi/H1z1ErJwSoxgxtbuArV86nubGeP/r63ax7elfZJUmSNCAYztSpyaMG89Ur57Ph6T286Wt3s213a9klSZLU7xnOdFDzpo3iX/7wNB5c+zRv+deF7GltL7skSZL6NcOZDunFx4znH15zMj9fuoG/+PdFtLYZ0CRJqpaqna2p/uW1p09l8449fPR/fkNzQx0ff9086uqi7LIkSep3DGfqsj9+4Wx27W3j4zc/THNjHf/31ScZ0CRJ6mGGMx2Wt10wl5172/j8rY/S3g7/73dPot6AJklSjzGc6bC958JjqK+r4zM/eYTdrW18/HXzaKh3+KIkST3BcKbDFhG8+2VH01QffPzmh9mxp43PXH4qgxrryy5NkqQ+z+4OHbG3XTCXD7zqeG5+4Emu+Oqv2LJzb9klSZLU5xnO1C1vOmcWn7n8VH694il+74t3smrzzrJLkiSpTzOcqdsumTeZa990Bqu37OTSz/2CXz/xVNklSZLUZxnO1CPOOWoc333rCxjcVMdlV9/F9xetKrskSZL6JMOZeszcCcP53p+dw7ypo3jn9Yv42+/dz+7WtrLLkiSpTzGcqUeNHdbMN//kTP70vNn8211P8Nov3MkTG3eUXZYkSX2G4Uw9rrG+jve/4ji+fMV8Ht+4nd/+7O3cvGRt2WVJktQnGM5UNS87fgL/844XMnPsUK7614V86L+WsHOPhzklSToYw5mqatqYIXz7rWfzxhfM5Gu/WM7Fn76NXz22qeyyJEmqWYYzVV1zQz0fvOQEvvUnZ9KWye9ffScfvHEJO/a0ll2aJEk1x3CmXvOCOeP40TvP44qzZnDtHcu56FO3c8ejG8ouS5KkmmI4U68a2tzAhy49keuvOguA13/5l/zZNxeyYpNndEqSBIYzleSs2WO5+S/O490vO5qfPriOl3zyZ3z8pofYvttDnZKkgc1wptIMaqznHS+Zy0//8nwuPnEin7t1KRd84n/59sKVtLVn2eVJklQKw5lKN3nUYD592al8561nM2HEIN7zn/fysn/+Gd9ftMqQJkkacAxnqhmnzxjD9/7sHL74h6fRVF/HO69fxIWGNEnSABOZ/eeP3vz583PBggVll6Ee0N6e3LRkLZ/68SM89OTTzGkZyh+dO4vfPXUqg5vqyy5PkqRui4iFmTn/ee2GM9Wy9vbkh4vX8oWfLWXxqq2MGtLI68+YzhVnz2TiyEFllydJ0hEznKlPy0zuXv4U1/z8MW5+YC11EVx80iQu/61pnDV7LHV1UXaJkiQdls7CWUOVP/Qi4NNAPfCVzPz7/ea/EfgnYFXR9LnM/Eox70rgb4v2j2bm16tZq2pbRHDGrDGcMWsMKzbt4No7lvMfC1bwX/euZurowbzu9Gm85vQpTB09pOxSJUnqlqr1nEVEPfAw8DJgJXA3cHlmPtBhmTcC8zPzbfutOwZYAMwHElgInJ6ZTx3sM+05G1h27W3jpiVr+Y8FK/jF0o1EwDlzxvGqeZO48PiJjB7aVHaJkiR1qoyeszOApZm5rCjgeuBS4IGDrlXxcuCWzNxUrHsLcBFwXZVqVR80qLGeS0+ZwqWnTGHFph18556VfOeelbz3O/fzNzcs5gVHjeOVJ03iwhMmMGqIQU2S1DdUM5xNAVZ0mF4JnHmA5V4TEedR6WX7i8xc0cm6U6pVqPq+aWOG8K6XHs07XzKXxau28t/3r+YH96/h/3znPv76huD0GaO54NjxXHDseI4aP4wIx6hJkmpTVcecdcF/Addl5u6I+FPg68AFh/MGEXEVcBXA9OnTe75C9SkRwUlTR3LS1JG876JjWbxqKz9cvIafPriOj/3wQT72wweZMmowFxw7nnOOGsdZs8fYqyZJqinVHHN2NvDBzHx5Mf1+gMz8WCfL1wObMnNkRFwOnJ+Zf1rM+xLwv5l50MOajjnTwazZspNbH1zPrQ+t4xdLN7BjTxsRcNzEEZw9Zyxnzx7L6TNGO1ZNktQrev1SGhHRQOVQ5UuonI15N/D6zFzSYZlJmbmmeP07wHsz86zihICFwGnFovdQOSFg08E+03CmrtrT2s59Kzdz56MbuePRjSx84in2tLYDMHPsEE6ZNqrymD6a4yYNp7nBC99KknpWr58QkJmtEfE24CYql9K4JjOXRMSHgQWZeSPwjoi4BGgFNgFvLNbdFBEfoRLoAD58qGAmHY6mhjrmzxzD/JljePtL5rJrbxuLVmzmnieeYtETm7nj0Y18b9HqyrL1dRw3eQSnTB3JsZNGcMzE4Rw9YTjDmsseFSBJ6o+8CK10AJnJ2q27WPTEZhat2MyvV2xm8aot7NjT9swyU0cP5tiJwzlm4nCOmTiCo1qGMXPcEIY0GdokSYdWykVopb4qIpg0cjCTThrMxSdNAiq3klr51E4eevJpHlq7lQfXPs1Da5/m1ofWP+fG7BNGNDNj7FBmjR3KjHFDmDl2KDPHDmXqmMGMGNRY1iZJkvoIw5nURXV1wfSxQ5g+dggvO37CM+27W9tYtn47j67fxuMbd/DYhu08vnE7P3lwHRu27X7OewxvbmDyqMFMGjWIyaMGM2XUYCaPGsSkkYNpGd7MuGHNjBjU4KU+JGkAM5xJ3dTcUM9xk0Zw3KQRz5u3bXcryzds5/GNO1i1eQerN+9i9eadrN6yk/tWbmHT9j3PW6epoY6WYc2MG95My7CmZ0LbvudRQxoZNbip8jykkcGN9YY5SepHDGdSFQ1rbuDEKSM5ccrIA87fuaeN1Vt2smbzLjZs282GbbtZ//Ru1hfPqzbvYtGKLWzavpv2ToaHNtXXMXJII6MGV8LayMFNjBzcyPBBDQxrbmBocwPDBjUwrLmeoU2VtmGDivbiMaTJgCdJtcJwJpVocFM9c1qGMadl2EGXa2tPNm3fw4Ztu9m8Yy9bdu5h8469bN6595npLcXr1Zt38sDqLTy9u5Xtu1s7DXUdRcDQpgYGNdbR3FDPoMY6BjfVM6ihnkGNlenmxsr04Ka657QPaqynubGe5oY6GuuDxvo6GuvraCqeG+uDxoZnpxvqo9N59XUGREkynEl9QH1d0DK8cmjzcGQmu/a2s213K9uKsNbx+dnXbWzb1cqu1jZ27W1j9952du1tY1drGzv3trFp+x52tXZo39vGrtb255wI0RMioKEuqK8L6iOoq4tnpuui8npfW12xTH3dfo9O2vZfLwLqIggqJ4BUpiEI6uoAojJdLFdX9CzWdVy2eB37Lfvc96xM19UdYH2K9Tv0WkaHr8Vzpzsss988Drp+7L9Ip8twwGVi/1nPrneQz3h22eeu/9w6Ol+vmnrz43q/Q7p3P7C/7rv6uuAlx0049IJVYjiT+rGIYHBTPYOb6g872HXF3rZ2dhZhbW9bsre1nb1t7expa69Mt7Wzt/XZ6db957W1s6f12ek9re20ZdLenrS2J23tSXtWXrcX023tSVsnbW0dpg/1XglkQnsmmZUgmzw7XcmdSXt2bEvYN03HdSGLZTOzS72VkmrXoMY6HvzIxaV9vuFM0hHbdwjTS4Q8X+4LbuwX7vabfk6Qy31PWbzHc5qfed+ObbnfOs99n+cuc7D37njNy/0vf3nY6z9vvdxv+sC19YbnfJ2q/Vm9vW39+GsJvbt9ZQ/BNZxJUhXsO6wJUN/Lh5ok9W11ZRcgSZKkZxnOJEmSaojhTJIkqYYYziRJkmqI4UySJKmGGM4kSZJqiOFMkiSphhjOJEmSaojhTJIkqYYYziRJkmpIZG/fjKuKImI98HiVP2YcsKHKn1HLBvL2D+RtB7ff7R+42z+Qtx3c/mpu/4zMbNm/sV+Fs94QEQsyc37ZdZRlIG//QN52cPvd/oG7/QN528HtL2P7PawpSZJUQwxnkiRJNcRwdviuLruAkg3k7R/I2w5uv9s/cA3kbQe3v9e33zFnkiRJNcSeM0mSpBpiOOuiiLgoIh6KiKUR8b6y66m2iJgWEbdGxAMRsSQi3lm0fzAiVkXEouLxirJrrZaIWB4R9xfbuaBoGxMRt0TEI8Xz6LLrrIaIOKbDPl4UEVsj4l39ef9HxDURsS4iFndoO+D+jorPFL8P7ouI08qrvPs62fZ/iogHi+27ISJGFe0zI2Jnh++BL5ZWeA/pZPs7/V6PiPcX+/6hiHh5OVX3nE62/987bPvyiFhUtPer/X+Qv3Xl/uxnpo9DPIB64FFgNtAE3AscX3ZdVd7mScBpxevhwMPA8cAHgfeUXV8vfQ2WA+P2a/tH4H3F6/cB/1B2nb3wdagH1gIz+vP+B84DTgMWH2p/A68AfggEcBbwy7Lrr8K2Xwg0FK//ocO2z+y4XH94dLL9B/xeL34P3gs0A7OKvw31ZW9DT2//fvM/Afxdf9z/B/lbV+rPvj1nXXMGsDQzl2XmHuB64NKSa6qqzFyTmfcUr58GfgNMKbeqmnAp8PXi9deBV5dXSq95CfBoZlb7As+lyszbgE37NXe2vy8FvpEVdwGjImJSrxRaBQfa9sy8OTNbi8m7gKm9Xlgv6WTfd+ZS4PrM3J2ZjwFLqfyN6LMOtv0REcDvAdf1alG95CB/60r92Tecdc0UYEWH6ZUMoKASETOBU4FfFk1vK7pzr+mvh/UKCdwcEQsj4qqibUJmrilerwUmlFNar7qM5/5iHij7Hzrf3wPtd8IfUekt2GdWRPw6In4WES8sq6hecKDv9YG2718IPJmZj3Ro65f7f7+/daX+7BvOdFARMQz4DvCuzNwKfAGYA5wCrKHS3d1fnZuZpwEXA38eEed1nJmVPu5+fbpzRDQBlwD/WTQNpP3/HANhfx9IRPwN0Ap8s2haA0zPzFOBdwPfiogRZdVXRQP2e30/l/Pcf8765f4/wN+6Z5Txs28465pVwLQO01OLtn4tIhqpfLN+MzO/C5CZT2ZmW2a2A1+mj3fnH0xmriqe1wE3UNnWJ/d1YRfP68qrsFdcDNyTmU/CwNr/hc7294D4nRARbwReCfxB8QeK4nDexuL1Qipjro4urcgqOcj3+oDY9wAR0QD8LvDv+9r64/4/0N86Sv7ZN5x1zd3A3IiYVfQkXAbcWHJNVVWMM/gq8JvM/GSH9o7H1n8HWLz/uv1BRAyNiOH7XlMZHL2Yyn6/sljsSuD75VTYa57zX/NA2f8ddLa/bwSuKM7cOgvY0uEQSL8QERcB/we4JDN3dGhviYj64vVsYC6wrJwqq+cg3+s3ApdFRHNEzKKy/b/q7fp6yUuBBzNz5b6G/rb/O/tbR9k/+2WfKdFXHlTO0HiYyn8Jf1N2Pb2wvedS6ca9D1hUPF4B/Ctwf9F+IzCp7FqrtP2zqZyRdS+wZN8+B8YCPwEeAX4MjCm71ip+DYYCG4GRHdr67f6nEkLXAHupjCN5c2f7m8qZWp8vfh/cD8wvu/4qbPtSKmNr9v38f7FY9jXFz8Qi4B7gVWXXX6Xt7/R7HfibYt8/BFxcdv3V2P6i/VrgLfst26/2/0H+1pX6s+8dAiRJkmqIhzUlSZJqiOFMkiSphhjOJEmSaojhTJIkqYYYziRJkmqI4UzSgBARbRGxqMPjfT343jMjor9f801SL2kouwBJ6iU7M/OUsouQpEOx50zSgBYRyyPiHyPi/oj4VUQcVbTPjIifFje+/klETC/aJ0TEDRFxb/F4QfFW9RHx5YhYEhE3R8Tg0jZKUp9mOJM0UAze77Dm73eYtyUzTwI+B3yqaPss8PXMPJnKTb8/U7R/BvhZZs4DTqNytXSo3Mbm85l5ArCZypXUJemweYcASQNCRGzLzGEHaF8OXJCZy4obIK/NzLERsYHKLXv2Fu1rMnNcRKwHpmbm7g7vMRO4JTPnFtPvBRoz86O9sGmS+hl7ziSpcm+9A70+HLs7vG7DMb2SjpDhTJLg9zs831m8vgO4rHj9B8DtxeufAG8FiIj6iBjZW0VKGhj8z07SQDE4IhZ1mP5RZu67nMboiLiPSu/X5UXb24GvRcRfAeuBNxXt7wSujog3U+kheyuwptrFSxo4HHMmaUArxpzNz8wNZdciSeBhTUmSpJpiz5kkSVINsedMkiSphhjOJEmSaojhTJIkqYYYziRJkmqI4UySJKmGGM4kSZJqyP8HUUaRGN4qbKEAAAAASUVORK5CYII=",
"text/plain": [
"<Figure size 720x360 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"plt.figure(figsize=(10, 5))\n",
"plt.plot(history.history['loss'])\n",
"plt.title('Model loss')\n",
"plt.ylabel('Loss')\n",
"plt.xlabel('Epoch')\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 138,
"metadata": {},
"outputs": [],
"source": [
"# Creating the coef frame for TF\n",
"coefs_df_tf = pd.DataFrame({\n",
" 'feature': features + ['intercept'],\n",
" 'coef_tf': np.append(model.get_weights()[0], model.get_weights()[1])\n",
"})"
]
},
{
"cell_type": "code",
"execution_count": 139,
"metadata": {},
"outputs": [],
"source": [
"## Merging the two dataframes \n",
"coefs_df_merged = coefs_df.merge(coefs_df_tf, on='feature')\n",
"coefs_df_merged['percent_diff'] = (coefs_df_merged['coef_sk'] - coefs_df_merged['coef_tf']) / coefs_df_merged['coef_sk'] * 100"
]
},
{
"cell_type": "code",
"execution_count": 140,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>feature</th>\n",
" <th>coef_sk</th>\n",
" <th>coef_tf</th>\n",
" <th>percent_diff</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>age</td>\n",
" <td>12999.031247</td>\n",
" <td>12999.976562</td>\n",
" <td>-0.007272</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>distance_from_center</td>\n",
" <td>-62430.341480</td>\n",
" <td>-62430.808594</td>\n",
" <td>-0.000748</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>floors</td>\n",
" <td>14022.123588</td>\n",
" <td>14021.656250</td>\n",
" <td>0.003333</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>sqft_living</td>\n",
" <td>99113.890140</td>\n",
" <td>99104.625000</td>\n",
" <td>0.009348</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>sqft_lot</td>\n",
" <td>9282.895480</td>\n",
" <td>9284.000000</td>\n",
" <td>-0.011898</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>sqft_above</td>\n",
" <td>72578.847965</td>\n",
" <td>72580.195312</td>\n",
" <td>-0.001856</td>\n",
" </tr>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>intercept</td>\n",
" <td>540088.141767</td>\n",
" <td>539467.562500</td>\n",
" <td>0.114903</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" feature coef_sk coef_tf percent_diff\n",
"0 age 12999.031247 12999.976562 -0.007272\n",
"1 distance_from_center -62430.341480 -62430.808594 -0.000748\n",
"2 floors 14022.123588 14021.656250 0.003333\n",
"3 sqft_living 99113.890140 99104.625000 0.009348\n",
"4 sqft_lot 9282.895480 9284.000000 -0.011898\n",
"5 sqft_above 72578.847965 72580.195312 -0.001856\n",
"6 intercept 540088.141767 539467.562500 0.114903"
]
},
"execution_count": 140,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"coefs_df_merged"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3.10.4 ('cuda_gpu_env')",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.4"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "1843498fe16f59076ae8a859587e2a2b45b21ed510245a5df4c4791774183e51"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment