Skip to content

Instantly share code, notes, and snippets.

@ravi9
Last active September 27, 2022 19:10
Show Gist options
  • Save ravi9/f56fa78e1d15372eb00fc619df8671ed to your computer and use it in GitHub Desktop.
Save ravi9/f56fa78e1d15372eb00fc619df8671ed to your computer and use it in GitHub Desktop.
See Medium blog: Accelerate Big Transfer (BiT) model inference with Intel® OpenVINO™
"""
Copyright (c) 2022 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import logging as log
import sys, os
import re
from distutils.version import LooseVersion
from argparse import ArgumentParser
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
try:
import tensorflow.compat.v1 as tf_v1
# disable eager execution of TensorFlow 2 environment immediately
tf_v1.disable_eager_execution()
import tensorflow as tf
from tensorflow.python.framework.convert_to_constants import (
convert_variables_to_constants_v2,
)
except ImportError:
import tensorflow as tf_v1
def build_argparser():
usage = """python tf_freeze_graph.py -i </path/to/tf/savedmodel/dir>"""
parser = ArgumentParser(
prog="python tf_freeze_graph.py",
description="Convert TF SavedModel to frozen pb file",
epilog=usage,
)
parser.add_argument("-i", "--input_dir", help="Path to TF SavedModel directory", required=True)
return parser.parse_args()
def get_graph_def(saved_model_dir):
# enable eager execution temporarily while TensorFlow 2 model is being loaded
tf_v1.enable_eager_execution()
try:
# Code to extract Keras model.
imported = tf.keras.models.load_model(saved_model_dir, compile=False)
except:
imported = tf.saved_model.load(saved_model_dir)
# to get a signature by key throws KeyError for TF 1.x SavedModel format in case TF 2.x installed
concrete_func = imported.signatures[
tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY
]
# the aggressive inlining parameter needs to freeze a table of embeddings for Keras Embedding operation
# and a model with Embedding operation cannot properly converted to IR without this function parameter
frozen_func = convert_variables_to_constants_v2(
concrete_func, lower_control_flow=True, aggressive_inlining=True
)
graph_def = frozen_func.graph.as_graph_def(add_shapes=True)
return graph_def
def main():
args = build_argparser()
model_name = os.path.basename(args.input_dir)
dir_name = os.path.dirname(args.input_dir)
if len(dir_name) == 0 :
dir_name = "."
graph_def = get_graph_def(args.input_dir)
is_text = False
new_ext = ".pbtxt" if is_text else ".pb"
out_dir = "".join([dir_name, "/frozen_", model_name])
out_name = "frozen_graph.pb"
tf_v1.import_graph_def(graph_def, name="")
tf_v1.train.write_graph(graph_def, out_dir, out_name, as_text=is_text)
print(f"\nFrozen graph saved at: {out_dir}/{out_name}\n")
if __name__ == "__main__":
sys.exit(main() or 0)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment