diff --git a/tensorflow_serving/tools/docker/Dockerfile.devel-mkl-aarch64 b/tensorflow_serving/tools/docker/Dockerfile.devel-mkl-aarch64 new file mode 100644 index 00000000000..ac7f6ce9679 --- /dev/null +++ b/tensorflow_serving/tools/docker/Dockerfile.devel-mkl-aarch64 @@ -0,0 +1,152 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +ARG base_image="ubuntu:20.04" + +FROM "${base_image}" as base_build + +ARG TF_SERVING_VERSION_GIT_BRANCH=master +ARG TF_SERVING_VERSION_GIT_COMMIT=HEAD + +ARG DEBIAN_FRONTEND=noninteractive + +LABEL maintainer="Sunita Nadampalli " +LABEL tensorflow_serving_github_branchtag=${TF_SERVING_VERSION_GIT_BRANCH} +LABEL tensorflow_serving_github_commit=${TF_SERVING_VERSION_GIT_COMMIT} + +RUN apt-get update && apt-get install -y --no-install-recommends \ + automake \ + build-essential \ + ca-certificates \ + curl \ + git \ + gcc-10 \ + g++-10 \ + libcurl3-dev \ + libfreetype6-dev \ + libpng-dev \ + libtool \ + libzmq3-dev \ + mlocate \ + openjdk-8-jdk\ + openjdk-8-jre-headless \ + pkg-config \ + python-dev-is-python3 \ + software-properties-common \ + swig \ + unzip \ + wget \ + zip \ + zlib1g-dev \ + python3-distutils \ + && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +RUN curl -fSsL -O https://bootstrap.pypa.io/get-pip.py && \ + python3 get-pip.py && \ + rm get-pip.py + +#make gcc-10 the default version +RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 1 && \ + update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 1 + +RUN pip3 --no-cache-dir install \ + future>=0.17.1 \ + grpcio \ + h5py \ + keras_applications>=1.0.8 \ + keras_preprocessing>=1.1.0 \ + mock \ + numpy \ + portpicker \ + requests \ + --ignore-installed six>=1.12.0 + +# Set up Bazel +ENV BAZEL_VERSION 5.1.1 +WORKDIR / +RUN mkdir /bazel && \ + cd /bazel && \ + wget https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/bazel-$BAZEL_VERSION-linux-arm64 && \ + mv bazel-$BAZEL_VERSION-linux-arm64 bazel && \ + chmod a+x bazel + ENV PATH=$WORKDIR/bazel:$PATH + +# Download TF Serving sources (optionally at specific commit). +WORKDIR /tensorflow-serving +RUN curl -sSL --retry 5 https://github.com/tensorflow/serving/tarball/${TF_SERVING_VERSION_GIT_COMMIT} | tar --strip-components=1 -xzf - + +FROM base_build as binary_build +# Build, and install TensorFlow Serving +ARG TF_SERVING_BUILD_OPTIONS="--config=mkl_aarch64 --copt=-Wno-maybe-uninitialized --copt=-Wno-error=stringop-truncation" + +RUN echo "Building with build options: ${TF_SERVING_BUILD_OPTIONS}" +ARG TF_SERVING_BAZEL_OPTIONS="" +RUN echo "Building with Bazel options: ${TF_SERVING_BAZEL_OPTIONS}" + +RUN bazel build --color=yes --curses=yes \ + ${TF_SERVING_BAZEL_OPTIONS} \ + --verbose_failures \ + --output_filter=DONT_MATCH_ANYTHING \ + ${TF_SERVING_BUILD_OPTIONS} \ + tensorflow_serving/model_servers:tensorflow_model_server && \ + cp bazel-bin/tensorflow_serving/model_servers/tensorflow_model_server \ + /usr/local/bin/ + +# Build and install TensorFlow Serving API +RUN bazel build --color=yes --curses=yes \ + ${TF_SERVING_BAZEL_OPTIONS} \ + --verbose_failures \ + --output_filter=DONT_MATCH_ANYTHING \ + ${TF_SERVING_BUILD_OPTIONS} \ + tensorflow_serving/tools/pip_package:build_pip_package && \ + bazel-bin/tensorflow_serving/tools/pip_package/build_pip_package \ + /tmp/pip && \ + pip --no-cache-dir install --no-dependencies --upgrade \ + /tmp/pip/tensorflow_serving_api-*.whl && \ + rm -rf /tmp/pip + +# Expose ports +# gRPC +EXPOSE 8500 + +# REST +EXPOSE 8501 + +# Set where models should be stored in the container +ENV MODEL_BASE_PATH=/models +RUN mkdir -p ${MODEL_BASE_PATH} + +# The only required piece is the model name in order to differentiate endpoints +ENV MODEL_NAME=model + +# Create a script that runs the model server so we can use environment variables +# while also passing in arguments from the docker command line +RUN echo '#!/bin/bash \n\n\ +tensorflow_model_server --port=8500 --rest_api_port=8501 \ +--model_name=${MODEL_NAME} --model_base_path=${MODEL_BASE_PATH}/${MODEL_NAME} \ +"$@"' > /usr/bin/tf_serving_entrypoint.sh \ +&& chmod +x /usr/bin/tf_serving_entrypoint.sh + +ENTRYPOINT ["/usr/bin/tf_serving_entrypoint.sh"] + + +ENV LIBRARY_PATH '/usr/local/lib:$LIBRARY_PATH' +ENV LD_LIBRARY_PATH '/usr/local/lib:$LD_LIBRARY_PATH' + +FROM binary_build as clean_build +# Clean up Bazel cache when done. +RUN bazel clean --expunge --color=yes && \ + rm -rf /root/.cache +CMD ["/bin/bash"]