Skip to content
Closed
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 28 additions & 1 deletion bin/docker-image-tool.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
# This script builds and pushes docker images when run from a release of Spark
# with Kubernetes support.

set -x

function error {
echo "$@" 1>&2
exit 1
Expand Down Expand Up @@ -172,13 +174,19 @@ function build {
local BASEDOCKERFILE=${BASEDOCKERFILE:-"kubernetes/dockerfiles/spark/Dockerfile"}
local PYDOCKERFILE=${PYDOCKERFILE:-false}
local RDOCKERFILE=${RDOCKERFILE:-false}
local ARCHS=${ARCHS:-"--platform linux/amd64,linux/arm64"}

(cd $(img_ctx_dir base) && docker build $NOCACHEARG "${BUILD_ARGS[@]}" \
-t $(image_ref spark) \
-f "$BASEDOCKERFILE" .)
if [ $? -ne 0 ]; then
error "Failed to build Spark JVM Docker image, please refer to Docker build output for details."
fi
if [ "${CROSS_BUILD}" != "false" ]; then
(cd $(img_ctx_dir base) && docker buildx build $ARCHS $NOCACHEARG "${BUILD_ARGS[@]}" \
-t $(image_ref spark) \
-f "$BASEDOCKERFILE" .)
fi

if [ "${PYDOCKERFILE}" != "false" ]; then
(cd $(img_ctx_dir pyspark) && docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
Expand All @@ -187,6 +195,11 @@ function build {
if [ $? -ne 0 ]; then
error "Failed to build PySpark Docker image, please refer to Docker build output for details."
fi
if [ "${CROSS_BUILD}" != "false" ]; then
(cd $(img_ctx_dir pyspark) && docker buildx build $ARCHS $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-py) \
-f "$PYDOCKERFILE" .)
fi
fi

if [ "${RDOCKERFILE}" != "false" ]; then
Expand All @@ -196,6 +209,11 @@ function build {
if [ $? -ne 0 ]; then
error "Failed to build SparkR Docker image, please refer to Docker build output for details."
fi
if [ "${CROSS_BUILD}" != "false" ]; then
(cd $(img_ctx_dir sparkr) && docker buildx build $ARCHS $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-r) \
-f "$RDOCKERFILE" .)
fi
fi
}

Expand Down Expand Up @@ -227,6 +245,7 @@ Options:
-n Build docker image with --no-cache
-u uid UID to use in the USER directive to set the user the main Spark process runs as inside the
resulting container
-X Use docker buildx to cross build. Automatically pushes.
-b arg Build arg to build or push the image. For multiple build args, this option needs to
be used separately for each build arg.

Expand All @@ -252,6 +271,12 @@ Examples:
- Build and push JDK11-based image with tag "v3.0.0" to docker.io/myrepo
$0 -r docker.io/myrepo -t v3.0.0 -b java_image_tag=11-jre-slim build
$0 -r docker.io/myrepo -t v3.0.0 push

- Build and push JDK11-based image for multiple archs to docker.io/myrepo
$0 -r docker.io/myrepo -t v3.0.0 -X -b java_image_tag=11-jre-slim build
# Note: buildx, which does cross building, needs to do the push during build
# So there is no seperate push step with -X

EOF
}

Expand All @@ -268,7 +293,8 @@ RDOCKERFILE=
NOCACHEARG=
BUILD_PARAMS=
SPARK_UID=
while getopts f:p:R:mr:t:nb:u: option
CROSS_BUILD="false"
while getopts f:p:R:mr:t:Xnb:u: option
do
case "${option}"
in
Expand All @@ -279,6 +305,7 @@ do
t) TAG=${OPTARG};;
n) NOCACHEARG="--no-cache";;
b) BUILD_PARAMS=${BUILD_PARAMS}" --build-arg "${OPTARG};;
X) CROSS_BUILD=1;;
m)
if ! which minikube 1>/dev/null; then
error "Cannot find minikube."
Expand Down