Last active
February 15, 2018 15:25
-
-
Save agostbiro/c155a8107879d9c4e0368f3c3e94ea81 to your computer and use it in GitHub Desktop.
Dockerfile to reproduce MKL bug in TF 1.6
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
FROM tensorflow/tensorflow:latest-devel | |
# Based on https://github.com/tensorflow/tensorflow/blob/master/tensorflow/tools/docker/Dockerfile.devel-cpu-mkl | |
# Original by Clayne Robison<[email protected]> | |
# These arguments are parameterized. Use --build-args to override. | |
ARG TF_BRANCH=r1.6 | |
ARG WHL_DIR=/whl | |
# Set this to 0 to build without MKL | |
ARG MKL_FLAG=1 | |
RUN apt-get update && apt-get install -y --no-install-recommends \ | |
wget \ | |
&& \ | |
apt-get clean && \ | |
rm -rf /var/lib/apt/lists/* | |
RUN pip --no-cache-dir install --upgrade \ | |
pip setuptools wheel h5py | |
# Download and build TensorFlow. | |
WORKDIR / | |
RUN rm -rf tensorflow && \ | |
git clone https://github.com/tensorflow/tensorflow.git && \ | |
cd tensorflow && \ | |
git checkout ${TF_BRANCH} | |
WORKDIR /tensorflow | |
# Configure the build for CPU with MKL by accepting default build options and | |
# setting library locations | |
ENV CI_BUILD_PYTHON=python \ | |
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} \ | |
PYTHON_BIN_PATH=/usr/bin/python \ | |
PYTHON_LIB_PATH=/usr/local/lib/python2.7/dist-packages \ | |
TF_NEED_JEMALLOC=0 \ | |
TF_NEED_GCP=0 \ | |
TF_NEED_CUDA=0 \ | |
TF_NEED_HDFS=0 \ | |
TF_NEED_S3=0 \ | |
TF_NEED_OPENCL=0 \ | |
TF_NEED_GDR=0 \ | |
TF_ENABLE_XLA=0 \ | |
TF_NEED_VERBS=0 \ | |
TF_NEED_MPI=0 | |
RUN ./configure | |
# Build and Install TensorFlow. | |
# The 'mkl' option builds with Intel(R) Math Kernel Library (MKL), which detects | |
# the platform it is currently running on and takes appropriately optimized | |
# paths. | |
RUN LD_LIBRARY_PATH=${LD_LIBRARY_PATH} \ | |
bazel build --config="opt" \ | |
`if [ "$MKL_FLAG" -eq "1" ]; then echo -n "--config=mkl"; else echo -n ""; fi` \ | |
--copt="-O3" \ | |
//tensorflow/tools/pip_package:build_pip_package && \ | |
mkdir ${WHL_DIR} && \ | |
bazel-bin/tensorflow/tools/pip_package/build_pip_package ${WHL_DIR} | |
# Clean up Bazel cache when done, but leave the whl. | |
# This will upgrade the default Tensorflow version with the Intel MKL version | |
RUN pip --no-cache-dir install --upgrade ${WHL_DIR}/tensorflow-*.whl | |
WORKDIR /root | |
RUN wget \ | |
https://github.com/fchollet/deep-learning-models/releases/download/v0.4/xception_weights_tf_dim_ordering_tf_kernels.h5 \ | |
-O xception_weights.h5 | |
ENTRYPOINT python -c \ | |
"import tensorflow as tf; \ | |
import numpy as np; \ | |
x = tf.keras.applications.xception.Xception(weights=\"xception_weights.h5\"); \ | |
x.predict(np.zeros((1, 299, 299, 3)), batch_size=1);" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Tensorflow 1.6
Build with MKL library:
docker build -t tf_mkl -f Dockerfile.devel-cpu-mkl .
Build without MKL library:
docker build -t tf_nomkl -f Dockerfile.devel-cpu-mkl --build-arg MKL_FLAG=0 .
Run image built with MKL library:
docker run tf_mkl
Result:
Run image built without MKL library:
docker run tf_nomkl
Result: no error