Skip to content

Instantly share code, notes, and snippets.

@thibthibaut
Last active January 8, 2021 17:03
Show Gist options
  • Save thibthibaut/f95833705376cd2ae23d02c37c7a5034 to your computer and use it in GitHub Desktop.
Save thibthibaut/f95833705376cd2ae23d02c37c7a5034 to your computer and use it in GitHub Desktop.
# https://www.tensorflow.org/lite/guide/inference#load_and_run_a_model_in_python
#Simple version
interpreter = tf.lite.Interpreter(model_path='./upsampling_float.tflite')
interpreter.allocate_tensors()
interpreter.set_tensor(interpreter.get_input_details()[0]['index'], sample_input )
interpreter.invoke()
output_tflite_float = interpreter.get_tensor(output_details[0]['index'])[0]
##################################################
interpreter = tf.lite.Interpreter(model_path='./model_quant.tflite')
# (batch_size, height, width, channels)
input_shape = (32, 224, 224, 3)
# Adjust graph input to handle batch tensor
interpreter.resize_tensor_input(input_details[0]['index'], input_shape)
# 2 outputs
interpreter.resize_tensor_input(output_details[0]['index'], output_shape1)
interpreter.resize_tensor_input(output_details[1]['index'], output_shape2)
interpreter.allocate_tensors()
#get input and output tensors
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
#set the tensor to point to the input data to be inferred
input_index = interpreter.get_input_details()[0]['index']
interpreter.set_tensor(input_index, input_tensor )
#Run the inference
interpreter.invoke()
prediction1 = interpreter.get_tensor(output_details[0]['index'])[0]
prediction2 = interpreter.get_tensor(output_details[1]['index'])[0]
print(prediction)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment