Skip to content

Instantly share code, notes, and snippets.

@qharlie
Created June 30, 2017 02:34
Show Gist options
  • Save qharlie/e2eef230fd7c8e90572a5fc474a90a46 to your computer and use it in GitHub Desktop.
Save qharlie/e2eef230fd7c8e90572a5fc474a90a46 to your computer and use it in GitHub Desktop.
vigate
LaTeX_envs
Help
Autosave interval (min):
In [ ]:
In [1]:
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
dataset = np.loadtxt('normalized_ibm_prices.csv')
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
plt.plot(dataset)
plt.xlabel('time period')
plt.ylabel('normalized series value')
executed in 265ms, finished 21:27:20 2017-06-29
Out[1]:
<matplotlib.text.Text at 0x7f3cc0f842b0>
In [4]:
+idx
def window_transform_series(series,window_size,idx = 3):
# containers for input/output pairs
X = []
y = []
upper_range = len(series)-window_size
for i in range(0,upper_range):
window_index = i+window_size
X_temp = [series[j] for j in range(i,window_index) ]
X.append(X_temp)
y.append( series[i+window_size:i+window_size+idx])
# reshape each
X = np.asarray(X)
dir((np.shape(X)[0:2]))
X.shape = (np.shape(X)[0:2])
y = np.asarray(y)
y.reshape(len(y),1)
return X,y
window_size = 7
X,y = window_transform_series(dataset,7)
print (X.shape)
print (y.shape)
print(X[0])
print(y[0])
executed in 42ms, finished 21:28:01 2017-06-29
(1756, 7)
(1756,)
[ 109.622681 110.152489 109.454445 109.874969 108.739616 109.420853
107.806137]
[ 106.906273 108.470512 105.199074]
In [6]:
# split our dataset into training / testing sets
train_test_split = int(np.ceil(2*len(y)/float(3))) # set the split point
# partition the training set
X_train = X[:train_test_split,:]
y_train = y[:train_test_split]
# keep the last chunk for testing
X_test = X[train_test_split:,:]
y_test = y[train_test_split:]
# NOTE: to use keras's RNN LSTM module our input must be reshaped to [samples, window size, stepsize]
X_train = np.asarray(np.reshape(X_train, (X_train.shape[0], window_size, 1)))
X_test = np.asarray(np.reshape(X_test, (X_test.shape[0], window_size, 1)))
executed in 10ms, finished 21:28:35 2017-06-29
In [7]:
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.callbacks import EarlyStopping
import keras
# given - fix random seed - so we can all reproduce the same results on our default time series
np.random.seed(0)
# TODO: build an RNN to perform regression on our time series input/output data
model = Sequential()
model.add(LSTM(5, input_shape = (window_size,1)))
model.add(Dense(1, activation=None))
print(len(X_train))
print(len(y_train))
# build model using keras documentation recommended optimizer initialization
optimizer = keras.optimizers.RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0)
# compile the model
model.compile(loss='mean_squared_error', optimizer=optimizer)
model.fit(X_train, y_train, epochs=100, verbose=2)
executed in 7.78s, finished 21:28:48 2017-06-29
Using Theano backend.
WARNING (theano.sandbox.cuda): The cuda backend is deprecated and will be removed in the next release (v0.10). Please switch to the gpuarray backend. You can get more information about how to switch at this URL:
https://github.com/Theano/Theano/wiki/Converting-to-the-new-gpu-back-end%28gpuarray%29
Using gpu device 0: GeForce GTX 850M (CNMeM is enabled with initial size: 75.0% of memory, cuDNN 5110)
1171
1171
Epoch 1/100
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-7-5cf5b03019f2> in <module>()
24 model.compile(loss='mean_squared_error', optimizer=optimizer)
25
---> 26 model.fit(X_train, y_train, epochs=100, verbose=2)
/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/keras/models.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)
868 class_weight=class_weight,
869 sample_weight=sample_weight,
--> 870 initial_epoch=initial_epoch)
871
872 def evaluate(self, x, y, batch_size=32, verbose=1,
/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)
1505 val_f=val_f, val_ins=val_ins, shuffle=shuffle,
1506 callback_metrics=callback_metrics,
-> 1507 initial_epoch=initial_epoch)
1508
1509 def evaluate(self, x, y, batch_size=32, verbose=1, sample_weight=None):
/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/keras/engine/training.py in _fit_loop(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch)
1154 batch_logs['size'] = len(batch_ids)
1155 callbacks.on_batch_begin(batch_index, batch_logs)
-> 1156 outs = f(ins_batch)
1157 if not isinstance(outs, list):
1158 outs = [outs]
/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/keras/backend/theano_backend.py in __call__(self, inputs)
1194 def __call__(self, inputs):
1195 assert isinstance(inputs, (list, tuple))
-> 1196 return self.function(*inputs)
1197
1198
/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/theano/compile/function_module.py in __call__(self, *args, **kwargs)
793 s.storage[0] = s.type.filter(
794 arg, strict=s.strict,
--> 795 allow_downcast=s.allow_downcast)
796
797 except Exception as e:
/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/theano/tensor/type.py in filter(self, data, strict, allow_downcast)
115 if allow_downcast:
116 # Convert to self.dtype, regardless of the type of data
--> 117 data = theano._asarray(data, dtype=self.dtype)
118 # TODO: consider to pad shape with ones to make it consistent
119 # with self.broadcastable... like vector->row type thing
/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/theano/misc/safe_asarray.py in _asarray(a, dtype, order)
32 dtype = theano.config.floatX
33 dtype = np.dtype(dtype) # Convert into dtype object.
---> 34 rval = np.asarray(a, dtype=dtype, order=order)
35 # Note that dtype comparison must be done by comparing their `num`
36 # attribute. One cannot assume that two identical data types are pointers
/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/numpy/core/numeric.py in asarray(a, dtype, order)
529
530 """
--> 531 return array(a, dtype, copy=False, order=order)
532
533
ValueError: Bad input argument to theano function with name "train_function" at index 1 (0-based).
Backtrace when that variable is created:
File "/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/ipykernel/zmqshell.py", line 533, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/IPython/core/interactiveshell.py", line 2683, in run_cell
interactivity=interactivity, compiler=compiler, result=result)
File "/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/IPython/core/interactiveshell.py", line 2787, in run_ast_nodes
if self.run_code(code, result):
File "/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/IPython/core/interactiveshell.py", line 2847, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-7-5cf5b03019f2>", line 24, in <module>
model.compile(loss='mean_squared_error', optimizer=optimizer)
File "/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/keras/models.py", line 788, in compile
**kwargs)
File "/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/keras/engine/training.py", line 890, in compile
dtype=K.dtype(self.outputs[i]))
File "/home/q/anaconda3/envs/p3/lib/python3.6/site-packages/keras/backend/theano_backend.py", line 221, in placeholder
x = T.TensorType(dtype, broadcast)(name)
setting an array element with a sequence.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment