Skip to content

Instantly share code, notes, and snippets.

@eustin
Last active May 27, 2020 20:41
Show Gist options
  • Save eustin/51d71c9e85b37c58bc24f2cea4c006b2 to your computer and use it in GitHub Desktop.
Save eustin/51d71c9e85b37c58bc24f2cea4c006b2 to your computer and use it in GitHub Desktop.
loss_hist = []
for i in range(20):
if i > 0:
content_words, context_words, labels = make_skipgrams()
hist = model.fit([content_words, context_words], labels, epochs=1, verbose=0)
print(f"loss: {hist.history['loss'][-1]:.4f}")
loss_hist.extend(hist.history['loss'])
embedding_vectors = np.array(embeddings.weights[0].numpy())
fig, ax = plt.subplots(figsize=(10,10))
ax.scatter(embedding_vectors[1:, 0], embedding_vectors[1:, 1], c='white')
for idx, word in sorted(tokeniser.index_word.items()):
x_coord = embedding_vectors[idx, 0]
y_coord = embedding_vectors[idx, 1]
ax.annotate(
word,
(x_coord, y_coord),
horizontalalignment='center',
verticalalignment='center',
size=20,
alpha=0.7
)
ax.set_title(f"iteration-{i+1}")
plt.savefig(f"iteration-{i+1:03d}.png")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment