Created
October 7, 2022 05:08
-
-
Save Eligijus112/a30c8f2b670537718d90a105db909f7b to your computer and use it in GitHub Desktop.
Loading a model in memory
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Deep learning | |
import tensorflow as tf | |
import keras | |
# Memory tracking | |
from memory_profiler import profile | |
@profile | |
def create_model( | |
input_size: int, | |
hidden_neuron_count: int, | |
optimizer_name: str, | |
learning_rate: float | |
) -> keras.Sequential: | |
""" | |
Function to initiate a model in RAM | |
Arguments | |
--------- | |
input_size: int | |
The size of the input layer | |
hidden_neuron_count: int | |
The number of neurons in the hidden layer | |
optimizer_name: str | |
The optimizer to use; Available options are: 'adam', 'sgd', 'rmsprop' | |
learning_rate: float | |
The learning rate to use | |
Returns | |
------- | |
model: keras.Sequential | |
The model in RAM | |
""" | |
# Defining a simple feed forward network | |
model = keras.Sequential([ | |
keras.layers.Dense(hidden_neuron_count, activation=tf.nn.relu, input_shape=(input_size,)), | |
keras.layers.Dense(hidden_neuron_count, activation=tf.nn.relu), | |
keras.layers.Dense(1) | |
]) | |
optimizer = keras.optimizers.Adam(lr=learning_rate) if optimizer_name == 'adam' else \ | |
keras.optimizers.SGD(lr=learning_rate) if optimizer_name == 'sgd' else \ | |
keras.optimizers.RMSprop(lr=learning_rate) if optimizer_name == 'rmsprop' else None | |
# Compiling the model | |
model.compile( | |
optimizer=optimizer, | |
loss='mean_squared_error', | |
metrics=['mean_squared_error'] | |
) | |
# Returning the model | |
return model | |
if __name__ == '__main__': | |
# Initiating the model in memory | |
model = create_model(18, 128, 'adam', 0.001) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment