Created
November 19, 2018 16:18
-
-
Save renxida/9e53e1307abd44b33e0984e1ec49937c to your computer and use it in GitHub Desktop.
with daily-dev
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| { | |
| "cells": [ | |
| { | |
| "cell_type": "code", | |
| "execution_count": 1, | |
| "metadata": {}, | |
| "outputs": [ | |
| { | |
| "name": "stderr", | |
| "output_type": "stream", | |
| "text": [ | |
| "Using TensorFlow backend.\n" | |
| ] | |
| } | |
| ], | |
| "source": [ | |
| "from keras.models import Model\n", | |
| "from keras.layers import Input, Dense, Conv1D, Flatten, Concatenate\n", | |
| "from keras import regularizers\n", | |
| "import logging\n", | |
| "logger = logging.getLogger('replicate_talos_float_hyperparameter_error')" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 2, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "import talos as ta\n", | |
| "import keras\n", | |
| "import numpy as np" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 3, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "def make_net(input_layer, params):\n", | |
| " net = input_layer\n", | |
| "\n", | |
| " if params.get('conv_count', 0) !=0:\n", | |
| " for _ in range(params['conv_count']):\n", | |
| " newConv = Conv1D(\n", | |
| " activation='relu',\n", | |
| " filters=params['conv_filters'],\n", | |
| " kernel_size=params['conv_kernel_size'],\n", | |
| " kernel_regularizer=regularizers.l2(float(params['l2reg'])))\n", | |
| " net = newConv(net)\n", | |
| " # optional pooling\n", | |
| " if params.get('pool_size', None) is not None:\n", | |
| " newPool = keras.layers.MaxPooling1D(\n", | |
| " pool_size=params['pool_size'],\n", | |
| " strides=params.get(\n", | |
| " 'pool_strides',\n", | |
| " params['pool_size']),\n", | |
| " )\n", | |
| " net = newPool(net)\n", | |
| "\n", | |
| " net = Flatten()(net)\n", | |
| " \n", | |
| " if params.get('dense_count', 0) !=0:\n", | |
| " for dense_num in range(params['dense_count']):\n", | |
| " newDense = Dense(\n", | |
| " units=params['dense_units'],\n", | |
| " activation='relu',\n", | |
| " kernel_regularizer=regularizers.l2(float(params['l2reg'])),\n", | |
| " )\n", | |
| " net = newDense(net)\n", | |
| "\n", | |
| " return net\n", | |
| "\n", | |
| "def make_optimizer(params):\n", | |
| " opzr = keras.optimizers.Adam(**params.get('optimizer', {}))\n", | |
| " return opzr\n", | |
| "\n", | |
| "import keras.backend as K\n", | |
| "def model(params):\n", | |
| " \n", | |
| " \n", | |
| " def metric_correlation(y_true, y_pred):\n", | |
| " logger.warn(f'[metric_correlation] y_true size: {y_true.shape}, y_predy size: {y_pred.shape}')\n", | |
| "\n", | |
| " def m(x, w):\n", | |
| " \"\"\"Weighted Mean\"\"\"\n", | |
| " return K.sum(x*w) / K.sum(w)\n", | |
| " def cov(x, y, w):\n", | |
| " \"\"\"Weighted Covariance\"\"\"\n", | |
| " return K.sum(w* (x - m(x, w))*(y - m(y, w)))/ K.sum(w)\n", | |
| "\n", | |
| " def corr(x, y, w):\n", | |
| " \"\"\"Weighted Correlation\"\"\"\n", | |
| " return cov(x, y, w) / K.sqrt(cov(x, x, w) * cov(y, y, w))\n", | |
| " print(y_true.get_shape())\n", | |
| " return corr(y_true, y_pred, K.ones_like(y_true))\n", | |
| " inputs = []\n", | |
| "\n", | |
| " subnets = []\n", | |
| "\n", | |
| "\n", | |
| " inpt = Input(shape=(30,), dtype='float32', name='returns_input')\n", | |
| " inputs.append(inpt)\n", | |
| "\n", | |
| " net = make_net(inpt, params)\n", | |
| " \n", | |
| " condenser = Dense(units=1, activation=None, use_bias=False)\n", | |
| " net = condenser(net)\n", | |
| " \n", | |
| " model = Model(inputs=inputs, outputs = net)\n", | |
| "\n", | |
| " opzr = make_optimizer(params)\n", | |
| " model.compile(\n", | |
| " loss='mean_squared_error',\n", | |
| " optimizer=opzr,\n", | |
| " metrics=[metric_correlation])\n", | |
| " return model" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 4, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "def model_ta(x_train, y_train, x_val, y_val, params):\n", | |
| " print(params)\n", | |
| " # replace the hyperparameter inputs with references to params dictionary \n", | |
| " mdl = model(params)\n", | |
| " \n", | |
| " history_callback = mdl.fit(x_train, y_train,\n", | |
| " batch_size=params['batch_size'],\n", | |
| " epochs=params['epochs'],\n", | |
| " verbose=1,\n", | |
| " validation_data=[x_val, y_val])\n", | |
| " history_callback.history['val_acc']=[42]\n", | |
| " # modify the output model\n", | |
| " return history_callback, mdl" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": null, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 5, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "## Compare this:" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 6, | |
| "metadata": { | |
| "scrolled": true | |
| }, | |
| "outputs": [ | |
| { | |
| "name": "stderr", | |
| "output_type": "stream", | |
| "text": [ | |
| " 0%| | 0/3 [00:00<?, ?it/s][metric_correlation] y_true size: (?, ?), y_predy size: (?, 1)\n" | |
| ] | |
| }, | |
| { | |
| "name": "stdout", | |
| "output_type": "stream", | |
| "text": [ | |
| "{'dense_count': 3, 'dense_units': 200, 'conv_count': 0, 'l2reg': 1000, 'epochs': 4, 'batch_size': 32}\n", | |
| "(?, ?)\n", | |
| "Train on 3500 samples, validate on 1500 samples\n", | |
| "Epoch 1/4\n", | |
| "3500/3500 [==============================] - 1s 283us/step - loss: 149230.8663 - metric_correlation: 0.0016 - val_loss: 17388.8659 - val_metric_correlation: -0.0277\n", | |
| "Epoch 2/4\n", | |
| "3500/3500 [==============================] - 0s 53us/step - loss: 4440.2120 - metric_correlation: 0.0266 - val_loss: 290.6183 - val_metric_correlation: 0.0013\n", | |
| "Epoch 3/4\n", | |
| "3500/3500 [==============================] - 0s 54us/step - loss: 69.2349 - metric_correlation: nan - val_loss: 4.7466 - val_metric_correlation: nan\n", | |
| "Epoch 4/4\n", | |
| "3500/3500 [==============================] - 0s 53us/step - loss: 1.8202 - metric_correlation: nan - val_loss: 1.0265 - val_metric_correlation: nan\n" | |
| ] | |
| }, | |
| { | |
| "name": "stderr", | |
| "output_type": "stream", | |
| "text": [ | |
| "\r", | |
| " 33%|███▎ | 1/3 [00:01<00:03, 1.97s/it]" | |
| ] | |
| }, | |
| { | |
| "name": "stdout", | |
| "output_type": "stream", | |
| "text": [ | |
| "{'dense_count': 1, 'dense_units': 200, 'conv_count': 0, 'l2reg': 1000, 'epochs': 4, 'batch_size': 32}\n" | |
| ] | |
| }, | |
| { | |
| "name": "stderr", | |
| "output_type": "stream", | |
| "text": [ | |
| "[metric_correlation] y_true size: (?, ?), y_predy size: (?, 1)\n" | |
| ] | |
| }, | |
| { | |
| "name": "stdout", | |
| "output_type": "stream", | |
| "text": [ | |
| "(?, ?)\n", | |
| "Train on 3500 samples, validate on 1500 samples\n", | |
| "Epoch 1/4\n", | |
| "3500/3500 [==============================] - 0s 77us/step - loss: 20710.3201 - metric_correlation: 0.0077 - val_loss: 4473.2741 - val_metric_correlation: 0.0119\n", | |
| "Epoch 2/4\n", | |
| "3500/3500 [==============================] - 0s 39us/step - loss: 1468.8747 - metric_correlation: -0.0017 - val_loss: 195.5151 - val_metric_correlation: 0.0023\n", | |
| "Epoch 3/4\n", | |
| "3500/3500 [==============================] - 0s 40us/step - loss: 53.3786 - metric_correlation: 0.0280 - val_loss: 4.7616 - val_metric_correlation: -0.0027\n", | |
| "Epoch 4/4\n", | |
| "3500/3500 [==============================] - 0s 39us/step - loss: 1.8406 - metric_correlation: 0.0354 - val_loss: 1.0286 - val_metric_correlation: -0.0225\n" | |
| ] | |
| }, | |
| { | |
| "name": "stderr", | |
| "output_type": "stream", | |
| "text": [ | |
| "\r", | |
| " 67%|██████▋ | 2/3 [00:02<00:01, 1.65s/it]" | |
| ] | |
| }, | |
| { | |
| "name": "stdout", | |
| "output_type": "stream", | |
| "text": [ | |
| "{'dense_count': 1, 'dense_units': 100, 'conv_count': 0, 'l2reg': 1, 'epochs': 4, 'batch_size': 32}\n" | |
| ] | |
| }, | |
| { | |
| "name": "stderr", | |
| "output_type": "stream", | |
| "text": [ | |
| "[metric_correlation] y_true size: (?, ?), y_predy size: (?, 1)\n" | |
| ] | |
| }, | |
| { | |
| "name": "stdout", | |
| "output_type": "stream", | |
| "text": [ | |
| "(?, ?)\n", | |
| "Train on 3500 samples, validate on 1500 samples\n", | |
| "Epoch 1/4\n", | |
| "3500/3500 [==============================] - 0s 75us/step - loss: 23.8993 - metric_correlation: -0.0101 - val_loss: 9.1448 - val_metric_correlation: -0.0156\n", | |
| "Epoch 2/4\n", | |
| "3500/3500 [==============================] - 0s 39us/step - loss: 4.5872 - metric_correlation: -0.0084 - val_loss: 2.0304 - val_metric_correlation: -0.0062\n", | |
| "Epoch 3/4\n", | |
| "3500/3500 [==============================] - 0s 40us/step - loss: 1.4057 - metric_correlation: 0.0063 - val_loss: 1.0943 - val_metric_correlation: -0.0180\n", | |
| "Epoch 4/4\n", | |
| "3500/3500 [==============================] - 0s 40us/step - loss: 1.0367 - metric_correlation: 0.0385 - val_loss: 1.0130 - val_metric_correlation: -0.0246\n" | |
| ] | |
| }, | |
| { | |
| "name": "stderr", | |
| "output_type": "stream", | |
| "text": [ | |
| "\r", | |
| "100%|██████████| 3/3 [00:03<00:00, 1.42s/it]" | |
| ] | |
| }, | |
| { | |
| "name": "stdout", | |
| "output_type": "stream", | |
| "text": [ | |
| "Scan Finished!\n" | |
| ] | |
| }, | |
| { | |
| "name": "stderr", | |
| "output_type": "stream", | |
| "text": [ | |
| "\n" | |
| ] | |
| } | |
| ], | |
| "source": [ | |
| "h = ta.Scan(x=np.random.normal(loc=0.0, scale=1.0, size=(5000, 30)),\n", | |
| " y=np.random.normal(loc=0.0, scale=1.0, size=(5000, 1)), \n", | |
| " params={\n", | |
| " 'dense_count': [1, 2, 3],\n", | |
| " 'dense_units': [50, 100, 150, 200],\n", | |
| " 'conv_count': [0],\n", | |
| " 'l2reg': [1, 100, 1000],\n", | |
| " 'epochs':[4],\n", | |
| " 'batch_size':[32]\n", | |
| " },\n", | |
| " model=model_ta,\n", | |
| " grid_downsample=0.1)" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 7, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "rpt = ta.Reporting(h)" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 8, | |
| "metadata": {}, | |
| "outputs": [ | |
| { | |
| "ename": "ValueError", | |
| "evalue": "cannot insert metric_correlation, already exists", | |
| "output_type": "error", | |
| "traceback": [ | |
| "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", | |
| "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", | |
| "\u001b[0;32m<ipython-input-8-21813ace0770>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mrpt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcorrelate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'metric_correlation'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", | |
| "\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/talos/commands/reporting.py\u001b[0m in \u001b[0;36mcorrelate\u001b[0;34m(self, metric)\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0mcolumns\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mc\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mc\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcolumns\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mc\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mmetric_names\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 54\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mcolumns\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 55\u001b[0;31m \u001b[0mout\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minsert\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmetric\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mmetric\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 56\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mout\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcorr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mmetric\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 57\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", | |
| "\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/pandas/core/frame.py\u001b[0m in \u001b[0;36minsert\u001b[0;34m(self, loc, column, value, allow_duplicates)\u001b[0m\n\u001b[1;32m 3217\u001b[0m \u001b[0mvalue\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_sanitize_column\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcolumn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbroadcast\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3218\u001b[0m self._data.insert(loc, column, value,\n\u001b[0;32m-> 3219\u001b[0;31m allow_duplicates=allow_duplicates)\n\u001b[0m\u001b[1;32m 3220\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3221\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0massign\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", | |
| "\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/pandas/core/internals.py\u001b[0m in \u001b[0;36minsert\u001b[0;34m(self, loc, item, value, allow_duplicates)\u001b[0m\n\u001b[1;32m 4336\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mallow_duplicates\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mitem\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4337\u001b[0m \u001b[0;31m# Should this be a different kind of error??\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 4338\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'cannot insert {}, already exists'\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mitem\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4339\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4340\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mloc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", | |
| "\u001b[0;31mValueError\u001b[0m: cannot insert metric_correlation, already exists" | |
| ] | |
| } | |
| ], | |
| "source": [ | |
| "rpt.correlate('metric_correlation')" | |
| ] | |
| } | |
| ], | |
| "metadata": { | |
| "_draft": { | |
| "nbviewer_url": "https://gist.github.com/fba8417e04b1c9094bf924db0a2938ec" | |
| }, | |
| "gist": { | |
| "data": { | |
| "description": "test_custom_metric_corr.ipynb", | |
| "public": true | |
| }, | |
| "id": "fba8417e04b1c9094bf924db0a2938ec" | |
| }, | |
| "kernelspec": { | |
| "display_name": "Python 3", | |
| "language": "python", | |
| "name": "python3" | |
| }, | |
| "language_info": { | |
| "codemirror_mode": { | |
| "name": "ipython", | |
| "version": 3 | |
| }, | |
| "file_extension": ".py", | |
| "mimetype": "text/x-python", | |
| "name": "python", | |
| "nbconvert_exporter": "python", | |
| "pygments_lexer": "ipython3", | |
| "version": "3.6.5" | |
| }, | |
| "toc": { | |
| "base_numbering": 1, | |
| "nav_menu": {}, | |
| "number_sections": true, | |
| "sideBar": true, | |
| "skip_h1_title": false, | |
| "title_cell": "Table of Contents", | |
| "title_sidebar": "Contents", | |
| "toc_cell": false, | |
| "toc_position": { | |
| "height": "calc(100% - 180px)", | |
| "left": "10px", | |
| "top": "150px", | |
| "width": "381px" | |
| }, | |
| "toc_section_display": true, | |
| "toc_window_display": true | |
| } | |
| }, | |
| "nbformat": 4, | |
| "nbformat_minor": 2 | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment