Last active
January 9, 2019 15:36
-
-
Save salman-ghauri/63c7aa31850e8f207fa954981900451e to your computer and use it in GitHub Desktop.
DockerFile for creating a tensorflow server container by adding models in the path and running it on specified ports.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
model_config_list: { | |
config: { | |
name: "resnet_101", | |
base_path: "/models/res2", | |
model_platform: "tensorflow", | |
model_version_policy: { | |
specific: { | |
versions: 1 | |
} | |
} | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
ARG TF_SERVING_VERSION=latest | |
ARG TF_SERVING_BUILD_IMAGE=tensorflow/serving:${TF_SERVING_VERSION}-devel | |
FROM ${TF_SERVING_BUILD_IMAGE} as build_image | |
FROM ubuntu:16.04 | |
ARG TF_SERVING_VERSION_GIT_BRANCH=master | |
ARG TF_SERVING_VERSION_GIT_COMMIT=head | |
LABEL maintainer="[email protected]" | |
LABEL tensorflow_serving_github_branchtag=${TF_SERVING_VERSION_GIT_BRANCH} | |
LABEL tensorflow_serving_github_commit=${TF_SERVING_VERSION_GIT_COMMIT} | |
RUN apt-get update && apt-get install -y --no-install-recommends \ | |
ca-certificates \ | |
&& \ | |
apt-get clean && \ | |
rm -rf /var/lib/apt/lists/* | |
# Install TF Serving pkg | |
COPY --from=build_image /usr/local/bin/tensorflow_model_server /usr/bin/tensorflow_model_server | |
# Expose ports | |
# gRPC | |
EXPOSE 8500 | |
# REST | |
EXPOSE 8501 | |
# Set where models should be stored in the container | |
ENV MODEL_BASE_PATH=/models | |
RUN mkdir -p ${MODEL_BASE_PATH} | |
COPY ./models/inception ${MODEL_BASE_PATH}/inception | |
COPY ./models/res2 ${MODEL_BASE_PATH}/res2 | |
COPY ./models/ssd ${MODEL_BASE_PATH}/ssd | |
COPY ./models.conf /models/models.conf | |
# The only required piece is the model name in order to differentiate endpoints | |
ENTRYPOINT tensorflow_model_server --port=8500 --rest_api_port=8501 --model_config_file=/models/models.conf |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment