-
-
Save khuangaf/9f59188d1bc5f1288a8dc04c6b1a9ffc to your computer and use it in GitHub Desktop.
CryptocurrencyPrediction
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pandas as pd | |
import numpy as numpy | |
from keras.models import Sequential | |
from keras.layers import Dense, Dropout, Activation, Flatten,Reshape | |
from keras.layers import Conv1D, MaxPooling1D, LeakyReLU | |
from keras.utils import np_utils | |
from keras.layers import GRU,CuDNNGRU | |
from keras.callbacks import CSVLogger, ModelCheckpoint | |
import h5py | |
import os | |
import tensorflow as tf | |
from keras.backend.tensorflow_backend import set_session | |
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID' | |
os.environ['CUDA_VISIBLE_DEVICES'] = '1' | |
os.environ['TF_CPP_MIN_LOG_LEVEL']='2' | |
config = tf.ConfigProto() | |
config.gpu_options.allow_growth = True | |
set_session(tf.Session(config=config)) | |
with h5py.File(''.join(['bitcoin2015to2017_close.h5']), 'r') as hf: | |
datas = hf['inputs'].value | |
labels = hf['outputs'].value | |
output_file_name='bitcoin2015to2017_close_GRU_1_tanh_relu_' | |
step_size = datas.shape[1] | |
units= 50 | |
batch_size = 8 | |
nb_features = datas.shape[2] | |
epochs = 100 | |
output_size=16 | |
#split training validation | |
training_size = int(0.8* datas.shape[0]) | |
training_datas = datas[:training_size,:] | |
training_labels = labels[:training_size,:,0] | |
validation_datas = datas[training_size:,:] | |
validation_labels = labels[training_size:,:,0] | |
#build model | |
model = Sequential() | |
model.add(GRU(units=units, input_shape=(step_size,nb_features),return_sequences=False)) | |
model.add(Activation('tanh')) | |
model.add(Dropout(0.2)) | |
model.add(Dense(output_size)) | |
model.add(Activation('relu')) | |
model.compile(loss='mse', optimizer='adam') | |
model.fit(training_datas, training_labels, batch_size=batch_size,validation_data=(validation_datas,validation_labels), epochs = epochs, callbacks=[CSVLogger(output_file_name+'.csv', append=True),ModelCheckpoint('weights/'+output_file_name+'-{epoch:02d}-{val_loss:.5f}.hdf5', monitor='val_loss', verbose=1,mode='min')]) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment