Skip to content

Instantly share code, notes, and snippets.

@sundisee
Forked from jorgemf/Dockerfile_TFserving_1_6
Created March 10, 2018 16:33
Show Gist options
  • Save sundisee/d03d59bfd3ed0c5fa19afda92633d0c1 to your computer and use it in GitHub Desktop.
Save sundisee/d03d59bfd3ed0c5fa19afda92633d0c1 to your computer and use it in GitHub Desktop.
Dockerfile to compile TensorFlow Serving 1.5 using GPU
# docker build --pull -t $USER/tensorflow-serving-devel -f Dockerfile .
# export TF_SERVING_PORT=9000
# export TF_SERVING_MODEL_PATH=/tf_models/mymodel
# export CONTAINER_NAME=my_container
# CUDA_VISIBLE_DEVICES=0 docker run --runtime=nvidia -it -p $TF_SERVING_PORT:$TF_SERVING_PORT -v $TF_SERVING_MODEL_PATH:/root/tf_model --name $CONTAINER_NAME $USER/tensorflow-serving-devel /root/serving/bazel-bin/tensorflow_serving/model_servers/tensorflow_model_server --port=$TF_SERVING_PORT --enable_batching=true --model_base_path=/root/tf_model/
# docker start -ai $CONTAINER_NAME
FROM nvidia/cuda:9.0-cudnn7-devel-ubuntu16.04
ENV TF_CUDA_VERSION=9.0 \
TF_CUDNN_VERSION=7 \
TF_SERVING_COMMIT=tags/1.5.0 \
BAZEL_VERSION=0.8.0
RUN apt-get update && apt-get install -y \
build-essential \
curl \
git \
libfreetype6-dev \
libpng12-dev \
libzmq3-dev \
mlocate \
pkg-config \
python-dev \
python-numpy \
python-pip \
software-properties-common \
swig \
zip \
zlib1g-dev \
libcurl3-dev \
openjdk-8-jdk\
openjdk-8-jre-headless \
wget \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Set up grpc
RUN pip install mock grpcio
# Set up Bazel.
ENV BAZELRC /root/.bazelrc
# Install the most recent bazel release.
WORKDIR /root/
RUN mkdir /bazel && \
cd /bazel && \
curl -fSsL -O https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/bazel-$BAZEL_VERSION-installer-linux-x86_64.sh && \
curl -fSsL -o /bazel/LICENSE.txt https://raw.githubusercontent.com/bazelbuild/bazel/master/LICENSE && \
chmod +x bazel-*.sh && \
./bazel-$BAZEL_VERSION-installer-linux-x86_64.sh && \
cd / && \
rm -f /bazel/bazel-$BAZEL_VERSION-installer-linux-x86_64.sh
# compile TF serving with CUDA support
ENV TF_NEED_CUDA=1 \
TF_NEED_S3=1 \
TF_CUDA_COMPUTE_CAPABILITIES="3.5,5.2,6.1" \
TF_NEED_GCP=1 \
TF_NEED_JEMALLOC=0 \
TF_NEED_HDFS=0 \
TF_NEED_OPENCL=0 \
TF_NEED_MKL=0 \
TF_NEED_VERBS=0 \
TF_NEED_MPI=0 \
TF_NEED_GDR=0 \
TF_ENABLE_XLA=0 \
TF_CUDA_CLANG=0 \
TF_NEED_OPENCL_SYCL=0 \
CUDA_TOOLKIT_PATH=/usr/local/cuda \
CUDNN_INSTALL_PATH=/usr/lib/x86_64-linux-gnu \
GCC_HOST_COMPILER_PATH=/usr/bin/gcc \
PYTHON_BIN_PATH=/usr/bin/python \
CC_OPT_FLAGS="-march=native" \
PYTHON_LIB_PATH=/usr/local/lib/python2.7/dist-packages
RUN cd /root && git clone --recurse-submodules https://github.com/tensorflow/serving
RUN cd /root/serving && \
git checkout $TF_SERVING_COMMIT && \
git submodule update --recursive --init
RUN cd /root/serving/tensorflow && \
TF_SET_ANDROID_WORKSPACE= ./configure
RUN cd /root/serving && \
bazel build -c opt --copt=-mavx --copt=-mavx2 --copt=-mfma --copt=-mfpmath=both --copt=-msse4.2 --config=cuda -k --verbose_failures --crosstool_top=@local_config_cuda//crosstool:toolchain tensorflow_serving/model_servers:tensorflow_model_server
RUN ln -s /usr/local/cuda /usr/local/nvidia && \
ln -s /usr/local/cuda/lib64/stubs/libcuda.so /usr/local/cuda/lib64/libcuda.so.1
CMD ["/bin/bash"]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment