Skip to content

Instantly share code, notes, and snippets.

@braingineer
Created April 15, 2016 06:42
Show Gist options
  • Save braingineer/799d1d88a2b71b54691a983a55c727bd to your computer and use it in GitHub Desktop.
Save braingineer/799d1d88a2b71b54691a983a55c727bd to your computer and use it in GitHub Desktop.
minimum working example for an issue with TimeDistributed's current compute_mask
from __future__ import print_function
from keras.layers import Input, Embedding, LSTM, Activation
from keras.layers import TimeDistributed as Distribute
import numpy as np
def outmask(x):
return x._keras_history[0].outbound_nodes[0].output_masks[0]
def inmask(x):
return x._keras_history[0].inbound_nodes[0].output_masks[0]
def pp(s,x):
pstr = "For x={}\n\t x.ndim={}\n\t outmask.ndim={}\n\t inmask.ndim={}"
print(pstr.format(s, x.ndim, outmask(x).ndim, inmask(x).ndim))
batch_shape = (10,10,10,10)
X_in = Input(batch_shape=batch_shape, dtype='int32')
X_emb = Embedding(input_dim=5,output_dim=10, mask_zero=True)(X_in)
X_reduced = Distribute(LSTM(10))(X_emb)
out = Activation('relu')(X_reduced)
print("==== input gets embedded =====")
pp("X_emb", X_emb)
print("==== a reduction from LSTM ====")
pp("X_reduction, ", X_reduced)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment