Created
June 30, 2017 02:31
-
-
Save qharlie/40c00eda18c6473e7a72d94f54ec1edd to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# coding: utf-8 | |
# In[ ]: | |
# In[1]: | |
get_ipython().magic('matplotlib inline') | |
import numpy as np | |
import matplotlib.pyplot as plt | |
dataset = np.loadtxt('normalized_ibm_prices.csv') | |
get_ipython().magic('matplotlib inline') | |
import numpy as np | |
import matplotlib.pyplot as plt | |
plt.plot(dataset) | |
plt.xlabel('time period') | |
plt.ylabel('normalized series value') | |
# In[4]: | |
def window_transform_series(series,window_size,idx = 3): | |
# containers for input/output pairs | |
X = [] | |
y = [] | |
upper_range = len(series)-window_size | |
for i in range(0,upper_range): | |
window_index = i+window_size | |
X_temp = [series[j] for j in range(i,window_index) ] | |
X.append(X_temp) | |
y.append( series[i+window_size:i+window_size+idx]) | |
# reshape each | |
X = np.asarray(X) | |
dir((np.shape(X)[0:2])) | |
X.shape = (np.shape(X)[0:2]) | |
y = np.asarray(y) | |
y.reshape(len(y),1) | |
return X,y | |
window_size = 7 | |
X,y = window_transform_series(dataset,7) | |
print (X.shape) | |
print (y.shape) | |
print(X[0]) | |
print(y[0]) | |
# In[6]: | |
# split our dataset into training / testing sets | |
train_test_split = int(np.ceil(2*len(y)/float(3))) # set the split point | |
# partition the training set | |
X_train = X[:train_test_split,:] | |
y_train = y[:train_test_split] | |
# keep the last chunk for testing | |
X_test = X[train_test_split:,:] | |
y_test = y[train_test_split:] | |
# NOTE: to use keras's RNN LSTM module our input must be reshaped to [samples, window size, stepsize] | |
X_train = np.asarray(np.reshape(X_train, (X_train.shape[0], window_size, 1))) | |
X_test = np.asarray(np.reshape(X_test, (X_test.shape[0], window_size, 1))) | |
# In[7]: | |
from keras.models import Sequential | |
from keras.layers import Dense | |
from keras.layers import LSTM | |
from keras.callbacks import EarlyStopping | |
import keras | |
# given - fix random seed - so we can all reproduce the same results on our default time series | |
np.random.seed(0) | |
# TODO: build an RNN to perform regression on our time series input/output data | |
model = Sequential() | |
model.add(LSTM(5, input_shape = (window_size,1))) | |
model.add(Dense(1, activation=None)) | |
print(len(X_train)) | |
print(len(y_train)) | |
# build model using keras documentation recommended optimizer initialization | |
optimizer = keras.optimizers.RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0) | |
# compile the model | |
model.compile(loss='mean_squared_error', optimizer=optimizer) | |
model.fit(X_train, y_train, epochs=100, verbose=2) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment