Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Make the docker image tool support cross building with the new docker…
… buildx command
  • Loading branch information
holdenk committed May 16, 2020
commit 6a25a323f16f1e403b471b3a4ac88e29483fc604
31 changes: 30 additions & 1 deletion bin/docker-image-tool.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
# This script builds and pushes docker images when run from a release of Spark
# with Kubernetes support.

set -x

function error {
echo "$@" 1>&2
exit 1
Expand Down Expand Up @@ -62,6 +64,9 @@ function docker_push {
if [ $? -ne 0 ]; then
error "Failed to push $image_name Docker image."
fi
if [ "${CROSS_BUILD}" != "false" ]; then
docker buildx push "$(image_ref ${image_name})"
fi
else
echo "$(image_ref ${image_name}) image not found. Skipping push for this image."
fi
Expand Down Expand Up @@ -172,13 +177,19 @@ function build {
local BASEDOCKERFILE=${BASEDOCKERFILE:-"kubernetes/dockerfiles/spark/Dockerfile"}
local PYDOCKERFILE=${PYDOCKERFILE:-false}
local RDOCKERFILE=${RDOCKERFILE:-false}
local ARCHS=${ARCHS:-"--platform linux/amd64,linux/arm64"}

(cd $(img_ctx_dir base) && docker build $NOCACHEARG "${BUILD_ARGS[@]}" \
-t $(image_ref spark) \
-f "$BASEDOCKERFILE" .)
if [ $? -ne 0 ]; then
error "Failed to build Spark JVM Docker image, please refer to Docker build output for details."
fi
if [ "${CROSS_BUILD}" != "false" ]; then
(cd $(img_ctx_dir base) && docker buildx build $ARCHS $NOCACHEARG "${BUILD_ARGS[@]}" \
-t $(image_ref spark) \
-f "$BASEDOCKERFILE" .)
fi

if [ "${PYDOCKERFILE}" != "false" ]; then
(cd $(img_ctx_dir pyspark) && docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
Expand All @@ -187,6 +198,11 @@ function build {
if [ $? -ne 0 ]; then
error "Failed to build PySpark Docker image, please refer to Docker build output for details."
fi
if [ "${CROSS_BUILD}" != "false" ]; then
(cd $(img_ctx_dir pyspark) && docker buildx build $ARCHS $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-py) \
-f "$PYDOCKERFILE" .)
fi
fi

if [ "${RDOCKERFILE}" != "false" ]; then
Expand All @@ -196,6 +212,11 @@ function build {
if [ $? -ne 0 ]; then
error "Failed to build SparkR Docker image, please refer to Docker build output for details."
fi
if [ "${CROSS_BUILD}" != "false" ]; then
(cd $(img_ctx_dir sparkr) && docker buildx build $ARCHS $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-r) \
-f "$RDOCKERFILE" .)
fi
fi
}

Expand Down Expand Up @@ -227,6 +248,7 @@ Options:
-n Build docker image with --no-cache
-u uid UID to use in the USER directive to set the user the main Spark process runs as inside the
resulting container
-x Use docker buildx to cross build
-b arg Build arg to build or push the image. For multiple build args, this option needs to
be used separately for each build arg.

Expand All @@ -252,6 +274,11 @@ Examples:
- Build and push JDK11-based image with tag "v3.0.0" to docker.io/myrepo
$0 -r docker.io/myrepo -t v3.0.0 -b java_image_tag=11-jre-slim build
$0 -r docker.io/myrepo -t v3.0.0 push

- Build and push JDK11-based image for multiple archs to docker.io/myrepo
$0 -r docker.io/myrepo -t v3.0.0 -X -b java_image_tag=11-jre-slim build
$0 -r docker.io/myrepo -t v3.0.0 -X push

EOF
}

Expand All @@ -268,7 +295,8 @@ RDOCKERFILE=
NOCACHEARG=
BUILD_PARAMS=
SPARK_UID=
while getopts f:p:R:mr:t:nb:u: option
CROSS_BUILD="false"
while getopts f:p:R:mr:t:Xnb:u: option
do
case "${option}"
in
Expand All @@ -279,6 +307,7 @@ do
t) TAG=${OPTARG};;
n) NOCACHEARG="--no-cache";;
b) BUILD_PARAMS=${BUILD_PARAMS}" --build-arg "${OPTARG};;
X) CROSS_BUILD=1;;
m)
if ! which minikube 1>/dev/null; then
error "Cannot find minikube."
Expand Down