Skip to content

Instantly share code, notes, and snippets.

@pranjalAI
Created September 4, 2020 14:42
Show Gist options
  • Save pranjalAI/19f447f27d6ed6b7e15940bf16e7a3c7 to your computer and use it in GitHub Desktop.
Save pranjalAI/19f447f27d6ed6b7e15940bf16e7a3c7 to your computer and use it in GitHub Desktop.
def make_inference_models():
encoder_model = tf.keras.models.Model(encoder_inputs, encoder_states)
decoder_state_input_h = tf.keras.layers.Input(shape=( 200 ,))
decoder_state_input_c = tf.keras.layers.Input(shape=( 200 ,))
decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c]
decoder_outputs, state_h, state_c = decoder_lstm(
decoder_embedding , initial_state=decoder_states_inputs)
decoder_states = [state_h, state_c]
decoder_outputs = decoder_dense(decoder_outputs)
decoder_model = tf.keras.models.Model(
[decoder_inputs] + decoder_states_inputs,
[decoder_outputs] + decoder_states)
return encoder_model , decoder_model
def str_to_tokens( sentence : str ):
words = sentence.lower().split()
tokens_list = list()
for word in words:
tokens_list.append( tokenizer.word_index[ word ] )
return preprocessing.sequence.pad_sequences( [tokens_list] , maxlen=maxlen , padding='post')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment