Skip to content

Instantly share code, notes, and snippets.

@joyhuang9473
Created March 5, 2017 07:43
Show Gist options
  • Save joyhuang9473/939e834080fef3182e7a669a4386e1da to your computer and use it in GitHub Desktop.
Save joyhuang9473/939e834080fef3182e7a669a4386e1da to your computer and use it in GitHub Desktop.
"""
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Model:
""" Build the graph for model """
def __init__(self, vocab_size, embed_size, batch_size, num_sampled, learning_rate):
self.vocab_size = vocab_size
self.embed_size = embed_size
self.batch_size = batch_size
self.num_sampled = num_sampled
self.lr = learning_rate
self.global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
def _create_placeholders(self):
""" Step 1: define the placeholders for input and output """
with tf.name_scope("data"):
def _create_embedding(self):
""" Step 2: define weights. In word2vec, it's actually the weights that we care about """
# Assemble this part of the graph on the CPU. You can change it to GPU if you have GPU
with tf.name_scope("embed"):
def _create_loss(self):
""" Step 3 + 4: define the model + the loss function """
with tf.name_scope("loss"):
def _create_optimizer(self):
""" Step 5: define optimizer """
def _create_summaries(self):
with tf.name_scope("summaries"):
def build_graph(self):
""" Build the graph for our model """
self._create_placeholders()
self._create_embedding()
self._create_loss()
self._create_optimizer()
self._create_summaries()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment