Last active
March 1, 2017 01:47
-
-
Save sehugg/18e69f412b10a9c67fd6a011a66133d3 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
'''Test of stateful LSTM. | |
This trains a LSTM to convert a frequency-modulated signal to a sine wave. | |
The period of the signal is greater than the temporal dimension of the LSTM, | |
so in theory the stateful version should have an advantage. | |
''' | |
from __future__ import print_function | |
import os | |
os.environ['KERAS_BACKEND'] = 'tensorflow' | |
import numpy as np | |
from keras.models import Sequential | |
from keras.layers import Input, Dense, Dropout, Activation | |
from keras.layers.recurrent import LSTM | |
from keras.utils import np_utils | |
from keras.callbacks import * | |
import math | |
import matplotlib.pyplot as plt | |
import argparse | |
parser = argparse.ArgumentParser() | |
parser.add_argument('--stateful', dest='stateful', action='store_true') | |
parser.add_argument('--seqlen', type=int, default=32) | |
parser.add_argument('--epochs', type=int, default=100) | |
parser.add_argument('--units', type=int, default=100) | |
parser.add_argument('--freq', type=float, default=0.01) | |
args = parser.parse_args() | |
print(args) | |
batch_size = seqlen = args.seqlen | |
nb_epoch = args.epochs | |
nb_units = args.units | |
stateful = args.stateful | |
freq = args.freq | |
nsamples = seqlen*25 | |
ntest = seqlen*5 | |
def f(x): | |
return math.sin(math.pi*2*(1*x+.9/(math.pi*2*freq)*(1-math.cos(math.pi*2*freq*x)))) | |
def make_sequences(seq, seqlen): | |
result = [] | |
for i in range(0,len(seq)-seqlen): | |
result.append(seq[i:i+seqlen]) | |
return np.array(result).reshape(-1,seqlen,1) | |
X = np.array( [f(x) for x in range(0,nsamples)] ) | |
y = [math.sin(x*math.pi*2*freq) for x in range(0,nsamples-seqlen)] | |
#plt.plot(X[0:300]) | |
#plt.plot(y[0:300]) | |
#plt.show() | |
X = make_sequences(X, seqlen) | |
X_test = X[-ntest:] | |
y_test = y[-ntest:] | |
X_train = X[0:-ntest] | |
y_train = y[0:-ntest] | |
model = Sequential() | |
model.add(LSTM(nb_units, stateful=stateful, batch_input_shape=(batch_size,seqlen,1))) | |
model.add(Dropout(0.2)) | |
#model.add(LSTM(nb_units, stateful=stateful)) | |
#model.add(Dropout(0.2)) | |
model.add(Dense(nb_units, activation='tanh')) | |
model.add(Dense(1)) | |
model.compile(loss='mse', optimizer='adadelta') | |
model.summary() | |
print("Stateful = %r" % stateful) | |
model.fit(X_train, y_train, | |
batch_size=batch_size, nb_epoch=nb_epoch, | |
shuffle=not stateful, | |
callbacks=[EarlyStopping(monitor='val_loss', patience=3, verbose=0, mode='auto')], | |
verbose=1, validation_data=(X_test, y_test)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment