Skip to content

Instantly share code, notes, and snippets.

@vaclavcadek
Created May 14, 2021 11:38
Show Gist options
  • Save vaclavcadek/c86dce84f4d65703012c5e0dafe7519c to your computer and use it in GitHub Desktop.
Save vaclavcadek/c86dce84f4d65703012c5e0dafe7519c to your computer and use it in GitHub Desktop.
Simple TF serving example with normalization layer being part of TF graph wrapped in TF serving REST API.
# Download the TensorFlow Serving Docker image and repo
docker pull tensorflow/serving
# Start TensorFlow Serving container and open the REST API port
docker run -t --rm -p 8501:8501 -v "/home/vaclav/PycharmProjects/the-algorithm-lab/tensorflow2/practice/deployment/my_model:/models/my_model" -e MODEL_NAME=my_model tensorflow/serving &
# Query the model using the predict API
curl -d '{"instances": [17.99000,20.57000,19.69000,11.42000,20.29000,12.45000,18.25000,13.71000,13.00000,12.46000,16.02000,15.78000,19.17000,15.85000,13.73000,14.54000,14.68000,16.13000,19.81000,13.54000,13.08000,9.50400,15.34000,21.16000,16.65000,17.14000,14.58000,18.61000,15.30000,17.57000]}' \
-X POST http://localhost:8501/v1/models/my_model:predict
import os.path
import tensorflow as tf
import numpy as np
from sklearn.datasets import load_breast_cancer
from sklearn.model_selection import train_test_split
SEED = 42
VERSION = 1
class LogisticRegression(tf.keras.Model):
def __init__(self, input_shape):
super().__init__()
self.W = tf.Variable(tf.random.normal((input_shape, 1)) * 0.001, name="W")
self.b = tf.Variable(tf.random.normal((1, 1)) * 0.001, name="b")
def call(self, X, **kwargs):
return tf.nn.sigmoid(tf.matmul(X, self.W) + self.b)
if __name__ == '__main__':
tf.random.set_seed(SEED)
np.random.seed(SEED)
cancer = load_breast_cancer()
X = cancer.data.astype(np.float32)
y = cancer.target.astype(np.float32)[:, np.newaxis]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
normalizer = tf.keras.layers.experimental.preprocessing.Normalization()
normalizer.adapt(X_train)
model = LogisticRegression(input_shape=X.shape[1])
pipeline = tf.keras.Sequential([
normalizer,
model
])
optimizer = tf.keras.optimizers.SGD(lr=0.01)
for epoch in range(1000):
with tf.GradientTape() as tape:
y_hat = pipeline(X_train)
loss = -tf.reduce_mean(y_train * tf.math.log(y_hat) + (1. - y_train) * tf.math.log(1. - y_hat))
dW, db = tape.gradient(loss, model.trainable_variables)
print(loss)
optimizer.apply_gradients(zip([dW, db], model.trainable_variables))
# it is important to have it under '1' folder for serving
model_folder = os.path.join(os.getcwd(), f"my_model/{1}")
pipeline.save(model_folder)
frozen_graph = tf.saved_model.load(model_folder)
m = tf.keras.metrics.AUC(num_thresholds=100)
y_hat = frozen_graph(X_train)
m.update_state(y_train, y_hat)
print(f"AUC (train): {m.result().numpy():.2f}")
y_hat = frozen_graph(X_test)
m.reset_states()
m.update_state(y_test, y_hat)
print(f"AUC (test): {m.result().numpy():.2f}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment