Skip to content

Instantly share code, notes, and snippets.

@leotam
Last active March 26, 2020 05:11
Show Gist options
  • Save leotam/109580f0d84cb6dd5d5407a06cbed2ad to your computer and use it in GitHub Desktop.
Save leotam/109580f0d84cb6dd5d5407a06cbed2ad to your computer and use it in GitHub Desktop.
import time
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Reshape, Flatten
from keras.layers import Conv2D, Conv2DTranspose
from keras.optimizers import RMSprop, from keras import regularizers
def init_model():
start_time = time.time()
print 'Compiling Model ... '
model = Sequential()
model.add(Flatten(input_shape=(1,128,128) ) )
model.add(Dense(2*128*128))
model.add(Activation('tanh'))
model.add(Dense(128*128))
model.add(Activation('tanh'))
model.add(Dense(128*128))
model.add(Activation('tanh'))
model.add(Reshape((1,128,128) ) )
model.add(Conv2D(filters=64, kernel_size=(5,5), strides=(1,1), activation = 'relu', data_format="channels_first", kernel_regularizer=regularizers.l1(0.01)) )
model.add(Conv2D(filters=64, kernel_size=(5,5), strides=(1,1), activation = 'relu', data_format="channels_first", kernel_regularizer=regularizers.l1(0.01)) )
model.add(Conv2DTranspose(filters=1, kernel_size=(9,9), strides=(1,1), activation='relu', data_format="channels_first" ) )
rms = RMSprop()
model.compile(loss='mean_squared_error', optimizer=rms, metrics=['mean_squared_error'])
print 'Model complied in {0} seconds'.format(time.time() - start_time)
return model
@rmsouza01
Copy link

Thanks for sharing this code! Do you have any thoughts about the best way to normalize the input k-space to train this network?
Thanks again!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment