Skip to content

Instantly share code, notes, and snippets.

@ntakouris
Created November 19, 2020 19:31
Show Gist options
  • Save ntakouris/859f9e3fa01e79f79926bb6b048b6788 to your computer and use it in GitHub Desktop.
Save ntakouris/859f9e3fa01e79f79926bb6b048b6788 to your computer and use it in GitHub Desktop.
IntelliJ IDEAPyCharm
from typing import Dict, Text
import tensorflow as tf
from absl import logging
from tensorflow.keras.layers import (LSTM, Activation, Concatenate, Dense)
import kerastuner
from rnn.constants import (INPUT_FEATURE_KEYS, PREDICT_FEATURE_KEYS,
HP_HIDDEN_LATENT_DIM,
HP_HIDDEN_LAYER_NUM, HP_LR,
HP_PRE_OUTPUT_UNITS,
INPUT_WINDOW_SIZE,
OUTPUT_WINDOW_SIZE)
from input_fn_utils import transformed_name
from model_utils import get_input_graph, get_output_graph
def build_keras_model(hparams: kerastuner.HyperParameters) -> tf.keras.Model:
input_layers, pre_model_input = get_input_graph(
INPUT_FEATURE_KEYS, INPUT_WINDOW_SIZE)
x = pre_model_input
# ======
layer_num = int(hparams.get(HP_HIDDEN_LAYER_NUM))
latent_dim = int(hparams.get(HP_HIDDEN_LATENT_DIM))
for i in range(layer_num):
return_sequences = (i != layer_num-1)
x = LSTM(latent_dim, return_sequences=return_sequences)(x)
pre_output_units = int(hparams.get(HP_PRE_OUTPUT_UNITS))
x = Dense(units=pre_output_units, activation='swish')(x)
model_head = Dense(units=OUTPUT_WINDOW_SIZE *
len(PREDICT_FEATURE_KEYS), activation='relu')(x)
# =====
output_layers = get_output_graph(
model_head, PREDICT_FEATURE_KEYS, OUTPUT_WINDOW_SIZE)
model = tf.keras.Model(input_layers, output_layers)
model.compile(
loss='mae',
optimizer=tf.keras.optimizers.Adam(
lr=float(hparams.get(HP_LR))))
model.summary(print_fn=logging.info)
return model
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment