-
-
Save marta-sd/ba47a9626ae2dbcc47094c196669fd59 to your computer and use it in GitHub Desktop.
import tensorflow as tf | |
# 1. Create and save two graphs | |
# c = a*b | |
g1 = tf.Graph() | |
with g1.as_default(): | |
a = tf.placeholder(tf.float32, name='a') | |
b = tf.Variable(initial_value=tf.truncated_normal((1,)), name='b') | |
c = tf.multiply(a, b, name='c') | |
s1 = tf.train.Saver() | |
with tf.Session(graph=g1) as sess: | |
sess.run(tf.global_variables_initializer()) | |
b_init = sess.run(b) | |
s1.save(sess, 'g1') | |
# f = d*e | |
g2 = tf.Graph() | |
with g2.as_default(): | |
d = tf.placeholder(tf.float32, name='d') | |
e = tf.Variable(initial_value=tf.truncated_normal((1,)), name='e') | |
f = tf.multiply(d, e, name='f') | |
s2 = tf.train.Saver() | |
with tf.Session(graph=g2) as sess: | |
sess.run(tf.global_variables_initializer()) | |
e_init = sess.run(e) | |
s2.save(sess, 'g2') | |
# 2.A Stack loaded models horizontally: g = a*b + d*e | |
g3 = tf.Graph() | |
with g3.as_default(): | |
tf.train.import_meta_graph('g1.meta', import_scope='g1') | |
a_, b_, c_ = [g3.get_tensor_by_name('g1/%s:0' % name) for name in ('a', 'b', 'c')] | |
tf.train.import_meta_graph('g2.meta', import_scope='g2') | |
d_, e_, f_ = [g3.get_tensor_by_name('g2/%s:0' % name) for name in ('d', 'e', 'f')] | |
g = c_ + f_ | |
# create separate loaders - we need to load variables from different files | |
with g3.as_default(): | |
s31 = tf.train.Saver(var_list={'b': b_}) | |
s32 = tf.train.Saver(var_list={'e': e_}) | |
feed_dict = {a_: 1.0, d_: -1.0} | |
print('a=%s and d=%s' % (feed_dict[a_], feed_dict[d_])) | |
with tf.Session(graph=g3) as sess: | |
s31.restore(sess, './g1') | |
s32.restore(sess, './g2') | |
# check if values were actually restored, not re-initialized | |
b_value, e_value, g_value = sess.run([b_, e_, g], feed_dict=feed_dict) | |
assert b_init == b_value, 'saved %s and restored %s' % (b_init, b_value) | |
assert e_init == e_value, 'saved %s and restored %s' % (e_init, e_value) | |
print('restored %s and %s ' % (b_value, e_value)) | |
# check if model works correctly | |
assert g_value == feed_dict[a_] * b_init + feed_dict[d_] * e_init | |
print('a*b + d*e = %s' % g_value) | |
# 2.B Stack loaded models vertically: g = e*(a*b) | |
g4 = tf.Graph() | |
with g4.as_default(): | |
tf.train.import_meta_graph('g1.meta', import_scope='g1') | |
a_, b_, c_ = [g4.get_tensor_by_name('g1/%s:0' % name) for name in ('a', 'b', 'c')] | |
tf.train.import_meta_graph('g2.meta', import_scope='g2', input_map={'d:0': c_}) | |
e_, g = [g4.get_tensor_by_name('g2/%s:0' % name) for name in ('e', 'f')] | |
# create separate loaders again | |
with g4.as_default(): | |
s41 = tf.train.Saver(var_list={'b': b_}) | |
s42 = tf.train.Saver(var_list={'e': e_}) | |
feed_dict = {a_: 1.0} | |
print('a=%s' % feed_dict[a_]) | |
with tf.Session(graph=g4) as sess: | |
s41.restore(sess, './g1') | |
s42.restore(sess, './g2') | |
# check restored values | |
b_value, e_value, g_value = sess.run([b_, e_, g], feed_dict=feed_dict) | |
assert b_init == b_value, 'saved %s and restored %s' % (b_init, b_value) | |
assert e_init == e_value, 'saved %s and restored %s' % (e_init, e_value) | |
print('restored %s and %s ' % (b_value, e_value)) | |
# check if model works correctly | |
assert g_value == feed_dict[a_] * b_init * e_init | |
print('e*(a*b) = %s' % g_value) |
I found this post very useful, thanx a lot!
Trying to do the same, I make it only for some
graphs. For some others I get an error.
For example, I would like to get the following
graph as a stack of two smaller ones:
graph_unique = tf.Graph()
with graph_unique.as_default():
v = tf.get_variable(name='v', shape=[2], dtype=tf.float64)
x = v[:1]
y = v[1:]
c = tf.add(x3, y2, name='c')
gra = tf.gradients([c], [x])
Here under are two attempts: one of them fails. Unfortunately
it is exactly the one I need.
May you please help me with that?
Thanx again!
Address to save metadata
adr_big = '' # please add a valid addres
adr_small = '' # " "
Contruct and save graph_small.
graph_small = tf.Graph()
FIRST VERSION of graph_small: IT WORKS.
with graph_small.as_default():
tf.get_variable(name='x', shape=[1], dtype=tf.float64)
tf.get_variable(name='y', shape=[1], dtype=tf.float64)
s_small = tf.train.Saver()
SECOND VERSION of graph_small (the one I need): IT DOESN'T WORK:
"""
with graph_small.as_default():
v = tf.get_variable(name='v', shape=[2], dtype=tf.float64)
x = tf.identity(v[:1], name='x')
y = tf.identity(v[1:], name='y')
s_small = tf.train.Saver()
"""
with tf.Session(graph=graph_small) as sess:
sess.run(tf.global_variables_initializer())
s_small.save(sess, adr_small)
Contruct and save graph_big
graph_big = tf.Graph()
with graph_big.as_default():
a = tf.get_variable(name='a', shape=[1], dtype=tf.float64)
b = tf.get_variable(name='b', shape=[1], dtype=tf.float64)
c = tf.add(a3, b2, name='c')
s = tf.train.Saver()
with tf.Session(graph=graph_big) as sess:
sess.run(tf.global_variables_initializer())
s.save(sess, adr_big)
Glue together
graph_together = tf.Graph()
with graph_together.as_default():
tf.train.import_meta_graph(adr_small+'.meta', import_scope='g_small')
x = graph_together.get_tensor_by_name('g_small/x:0')
y = graph_together.get_tensor_by_name('g_small/y:0')
tf.train.import_meta_graph(adr_big+'.meta', import_scope='g_big', input_map={'a:0':x, 'b:0':y})
c = graph_together.get_tensor_by_name('g_big/c:0')
gra = tf.gradients([c],[x])
PS: In the definition of c, this editor does not recognize double asterisk for power.
This is very useful , Thanks a lot. I have a doubt , How we can merge two custom made tensorflow object detection models , I have the frozen graph of both the models. Is it possible to merge both the models together and make it as one?
you girl have done a good work.