This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Entire graph | |
a = tf.constant([1, 2, 3], name="a") | |
b = tf.placeholder(name="b") | |
c = tf.mul(a, b, name="c") | |
# d = ... | |
# e = ... | |
f = tf.add(c, 4, name="f") | |
with tf.Session() as session: | |
print session.run(f, feed_dict={ b: 10 }) | |
# >>> [14 24 34] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
TEST_F(SubgraphTest, Targets1) { | |
ExpectOK( | |
"node { name: 'W1' op: 'TestParams' }" | |
"node { name: 'W2' op: 'TestParams' }" | |
"node { name: 'input' op: 'TestInput' }" | |
"node { name: 't1' op: 'TestMul' input: [ 'W1', 'input:1' ] }" | |
"node { name: 't2' op: 'TestMul' input: [ 'W2', 't1' ] }" | |
"node { name: 't3_a' op: 'TestRelu' input: 't2' }" | |
"node { name: 't3_b' op: 'TestRelu' input: 't2' }"); | |
EXPECT_EQ("OK", Subgraph("", "", "t1")); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
$ python embedding/word2vec_optimized.py --train_data=data/text8 --save_path=fiddling-one --eval_data=data/questions-words.txt --epochs_to_train=1 | |
.../local_device.cc:40] Local device intra op parallelism threads: 8 | |
.../direct_session.cc:60] Direct session inter op parallelism threads: 8 |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Test that a graph with no constraints will successfully assign nodes to the | |
// "best available" device (i.e. prefer GPU over CPU). | |
TEST_F(SimplePlacerTest, TestNoConstraints) { | |
Graph g(OpRegistry::Global()); | |
{ // Scope for temporary variables used to construct g. | |
GraphDefBuilder b(GraphDefBuilder::kFailImmediately); | |
Node* input = ops::SourceOp("TestInput", b.opts().WithName("in")); | |
ops::UnaryOp("TestRelu", ops::NodeOut(input, 0), b.opts().WithName("n1")); | |
ops::UnaryOp("TestRelu", ops::NodeOut(input, 1), b.opts().WithName("n2")); | |
EXPECT_OK(BuildGraph(b, &g)); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
.../simple_placer.cc:304] global_step: /job:localhost/replica:0/task:0/cpu:0 | |
.../simple_placer.cc:304] Skipgram: /job:localhost/replica:0/task:0/cpu:0 | |
... |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
tuple = word2vec.skipgram(filename=opts.train_data, | |
batch_size=opts.batch_size, | |
window_size=opts.window_size, | |
min_count=opts.min_count, | |
subsample=opts.subsample) | |
(vocab_word, vocab_freq, words_per_epoch, current_epoch, total_words_processed, examples, labels) = tuple |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def skipgram(filename, batch_size, window_size=None, min_count=None, | |
subsample=None, name=None): | |
r"""Parses a text file and creates a batch of examples. | |
... | |
""" | |
return _op_def_lib.apply_op("Skipgram", filename=filename, | |
batch_size=batch_size, window_size=window_size, | |
min_count=min_count, subsample=subsample, | |
name=name) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def _InitOpDefLibrary(): | |
op_list = op_def_pb2.OpList() | |
text_format.Merge(op_list_ascii, op_list) | |
op_def_registry.register_op_list(op_list) | |
op_def_lib = op_def_library.OpDefLibrary() | |
op_def_lib.add_op_list(op_list) | |
return op_def_lib | |
_InitOpDefLibrary.op_list_ascii = """op { |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# build | |
bazel build -c dbg tensorflow/models/embedding:all | |
# copy built gen_word2vec.py file from bazel-out/../genfiles into the project | |
# in word2vec.py, comment out the division line to fix python 2/3 error: | |
# error: TypeError: unsupported operand type(s) for /: 'Tensor' and 'int' | |
# fix: comment out from __future__ import division | |
# in word2vec.py, change the import to pull in gen_word2vec from the local folder: | |
# before: from tensorflow.models.embedding import gen_word2vec as word2vec |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class SkipgramOp : public OpKernel { | |
public: | |
explicit SkipgramOp(OpKernelConstruction* ctx) : OpKernel(ctx) { | |
// validate input and save it as state | |
OP_REQUIRES_OK(ctx, ctx->GetAttr("filename", &filename)); | |
// ... | |
} | |
void Compute(OpKernelContext* ctx) override { | |
Tensor examples(DT_INT32, TensorShape({batch_size_})); |