Skip to content

Instantly share code, notes, and snippets.

@dkurt
Created October 31, 2019 11:00
Show Gist options
  • Save dkurt/287c58ed624b68778e17c558b3dd92fd to your computer and use it in GitHub Desktop.
Save dkurt/287c58ed624b68778e17c558b3dd92fd to your computer and use it in GitHub Desktop.
  1. Prepare the model (tested with mobilenetv2_coco_cityscapes_trainfine from https://github.com/tensorflow/models/blob/master/research/deeplab/g3doc/model_zoo.md)
import tensorflow as tf
from tensorflow.tools.graph_transforms import TransformGraph
from tensorflow.python.tools import optimize_for_inference_lib


graph = 'deeplabv3_mnv2_cityscapes_train/frozen_inference_graph.pb'
with tf.gfile.FastGFile(graph, 'rb') as f:
    graph_def = tf.GraphDef()
    graph_def.ParseFromString(f.read())

    graph_def = optimize_for_inference_lib.optimize_for_inference(graph_def, ['ImageTensor'], ['ArgMax'], tf.uint8.as_datatype_enum)
    graph_def = TransformGraph(graph_def, ['ImageTensor'], ['ArgMax'], ['fold_constants', 'strip_unused_nodes'])

    with tf.gfile.FastGFile("opt_graph.pb", 'wb') as f:
        f.write(graph_def.SerializeToString())
  1. Convert to IR
python3 /opt/intel/openvino/deployment_tools/model_optimizer/mo_tf.py --input_model opt_graph.pb  --input_shape "[1, 512, 512, 3]"
  1. Compare with TensorFlow
import numpy as np

np.random.seed(324)
inpU8 = np.random.randint(0, 255, [1, 512, 512, 3], np.uint8)
inpFP32 = inpU8.astype(np.float32).transpose(0, 3, 1, 2)  # NHWC to NCHW

#
# TensorFlow
#
import tensorflow as tf

graph = 'deeplabv3_mnv2_cityscapes_train/frozen_inference_graph.pb'
with tf.gfile.FastGFile(graph, 'rb') as f:
    graph_def = tf.GraphDef()
    graph_def.ParseFromString(f.read())

with tf.Session() as sess:
    # Restore session
    sess.graph.as_default()
    tf.import_graph_def(graph_def, name='')

    tfOut = sess.run(sess.graph.get_tensor_by_name('ArgMax:0'),
                     feed_dict={'ImageTensor:0': inpU8})
    print('TensorFlow output:')
    print(tfOut.shape)

#
# OpenVINO
#
from openvino.inference_engine import IENetwork, IECore

net = IENetwork('opt_graph.xml', 'opt_graph.bin')

ie = IECore()
ie.add_extension('libcpu_extension_avx2.so', "CPU")
exec_net = ie.load_network(net, 'CPU')

outputs = exec_net.infer(inputs={'ImageTensor': inpFP32})
ieOut = outputs['ArgMax/Squeeze']
print(ieOut.shape)

# print(np.max(np.abs(ieOut - tfOut)))

def iou(a, b):
    inter = np.sum(np.logical_and(a, b))
    union = np.sum(np.logical_or(a, b))
    return inter / union if union else 1.0

for i in range(19):
    print('class %d:' % i, iou(tfOut == i, ieOut == i))

Output:

class 0: 0.9999767638197182
class 1: 0.9998295841854125
class 2: 0.9999793293695833
class 3: 1.0
class 4: 1.0
class 5: 0.999964207738287
class 6: 1.0
class 7: 1.0
class 8: 0.9999764616159571
class 9: 1.0
class 10: 0.9999939766414154
class 11: 0.9999199038846616
class 12: 1.0
class 13: 1.0
class 14: 0.999559277214632
class 15: 1.0
class 16: 1.0
class 17: 1.0
class 18: 1.0
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment