Skip to content

Instantly share code, notes, and snippets.

@hadifar
hadifar / tf.data 9.py
Last active September 15, 2018 15:06
writer = tf.summary.FileWriter('./graphs/logreg', tf.get_default_graph())
with tf.Session() as sess:
start_time = time.time()
sess.run(tf.global_variables_initializer())
# train the model n_epochs times
for i in range(30):
sss = sess.run(train_init) # drawing samples from train_data
total_loss = 0
@hadifar
hadifar / oop.tf 1.py
Last active September 15, 2018 15:06
Tensorflow Object oriented structure part 1
class SkipGram(object):
def __init__():
pass
def _import_data():
pass
def _init_weight():
pass
@hadifar
hadifar / oop.tf 2.py
Last active September 15, 2018 15:05
def __init__(self, dataset, vocab_size, embed_dim, neg_sample, learning_rate, skip_step=5000):
self.neg_sample = neg_sample
self.vocab_size = vocab_size
self.dataset = dataset
self.embed_dim = embed_dim
self.lr = learning_rate
self.skip_step = skip_step
@hadifar
hadifar / oop.tf 3.py
Last active September 15, 2018 15:05
def _import_data(self):
with tf.name_scope('data'):
self.iterator = self.dataset.make_initializable_iterator()
self.center_words, self.target_words = self.iterator.get_next()
@hadifar
hadifar / oop.tf 4.py
Last active September 15, 2018 15:05
def _create_embedding(self):
with tf.name_scope('embedding'):
self.embed_matrix = tf.get_variable(name='embed',
initializer=tf.random_uniform([self.vocab_size, self.embed_dim],
minval=-1,
maxval=1))
self.embed = tf.nn.embedding_lookup(self.embed_matrix, self.center_words, name='embedding')
@hadifar
hadifar / oop.tf 5.py
Last active September 15, 2018 15:05
def get_dataset():
def gen():
yield from word2vec_utils.batch_gen(FLAGS.DOWNLOAD_URL, FLAGS.EXPECTED_BYTES, FLAGS.VOCAB_SIZE,
FLAGS.BATCH_SIZE, FLAGS.SKIP_WINDOW, FLAGS.VISUAL_FLD)
dataset = tf.data.Dataset.from_generator(gen,
(tf.int32, tf.int32),
(tf.TensorShape([FLAGS.BATCH_SIZE]),
tf.TensorShape([FLAGS.BATCH_SIZE, 1])))
retrun dataset
@hadifar
hadifar / oop.tf 6.py
Last active September 15, 2018 15:05
def _create_loss(self):
with tf.name_scope('loss'):
self.neg_weight = tf.get_variable(name='neg_weight',
initializer=tf.truncated_normal([self.vocab_size, self.embed_dim],
stddev=1.0 / self.embed_dim ** 0.5))
self.neg_bias = tf.get_variable(name='neg_bias',
initializer=tf.zeros([self.vocab_size]))
self.loss = tf.reduce_mean(tf.nn.sampled_softmax_loss(weights=self.neg_weight,
biases=self.neg_bias,
def _create_optimizer(self):
with tf.name_scope('optimizer'):
self.optimizer = tf.train.GradientDescentOptimizer(learning_rate=self.lr).minimize(self.loss)
@hadifar
hadifar / oop.tf 8.py
Last active September 15, 2018 15:02
def build_graph(self):
self._import_data()
self._create_embedding()
self._create_loss()
self._create_optimizer()
@hadifar
hadifar / oop.tf 9.py
Last active September 15, 2018 15:02
def train(self, epoch):
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(self.iterator.initializer)
total_loss = 0.0 # we use this to calculate late average loss in the last SKIP_STEP steps
for index in range(epoch):
try: