|
19 | 19 | # This script builds and pushes docker images when run from a release of Spark |
20 | 20 | # with Kubernetes support. |
21 | 21 |
|
22 | | -declare -A path=( [spark-driver]=kubernetes/dockerfiles/driver/Dockerfile \ |
23 | | - [spark-executor]=kubernetes/dockerfiles/executor/Dockerfile \ |
24 | | - [spark-init]=kubernetes/dockerfiles/init-container/Dockerfile ) |
| 22 | +function error { |
| 23 | + echo "$@" 1>&2 |
| 24 | + exit 1 |
| 25 | +} |
| 26 | + |
| 27 | +# Detect whether this is a git clone or a Spark distribution and adjust paths |
| 28 | +# accordingly. |
| 29 | +if [ -z "${SPARK_HOME}" ]; then |
| 30 | + SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)" |
| 31 | +fi |
| 32 | +. "${SPARK_HOME}/bin/load-spark-env.sh" |
| 33 | + |
| 34 | +if [ -f "$SPARK_HOME/RELEASE" ]; then |
| 35 | + IMG_PATH="kubernetes/dockerfiles" |
| 36 | + SPARK_JARS="jars" |
| 37 | +else |
| 38 | + IMG_PATH="resource-managers/kubernetes/docker/src/main/dockerfiles" |
| 39 | + SPARK_JARS="assembly/target/scala-$SPARK_SCALA_VERSION/jars" |
| 40 | +fi |
| 41 | + |
| 42 | +if [ ! -d "$IMG_PATH" ]; then |
| 43 | + error "Cannot find docker images. This script must be run from a runnable distribution of Apache Spark." |
| 44 | +fi |
| 45 | + |
| 46 | +declare -A path=( [spark-driver]="$IMG_PATH/driver/Dockerfile" \ |
| 47 | + [spark-executor]="$IMG_PATH/executor/Dockerfile" \ |
| 48 | + [spark-init]="$IMG_PATH/init-container/Dockerfile" ) |
| 49 | + |
| 50 | +function image_ref { |
| 51 | + local image="$1" |
| 52 | + local add_repo="${2:-1}" |
| 53 | + if [ $add_repo = 1 ] && [ -n "$REPO" ]; then |
| 54 | + image="$REPO/$image" |
| 55 | + fi |
| 56 | + if [ -n "$TAG" ]; then |
| 57 | + image="$image:$TAG" |
| 58 | + fi |
| 59 | + echo "$image" |
| 60 | +} |
25 | 61 |
|
26 | 62 | function build { |
27 | | - docker build -t spark-base -f kubernetes/dockerfiles/spark-base/Dockerfile . |
| 63 | + local base_image="$(image_ref spark-base 0)" |
| 64 | + docker build --build-arg "spark_jars=$SPARK_JARS" \ |
| 65 | + --build-arg "img_path=$IMG_PATH" \ |
| 66 | + -t "$base_image" \ |
| 67 | + -f "$IMG_PATH/spark-base/Dockerfile" . |
28 | 68 | for image in "${!path[@]}"; do |
29 | | - docker build -t ${REPO}/$image:${TAG} -f ${path[$image]} . |
| 69 | + docker build --build-arg "base_image=$base_image" -t "$(image_ref $image)" -f ${path[$image]} . |
30 | 70 | done |
31 | 71 | } |
32 | 72 |
|
33 | | - |
34 | 73 | function push { |
35 | 74 | for image in "${!path[@]}"; do |
36 | | - docker push ${REPO}/$image:${TAG} |
| 75 | + docker push "$(image_ref $image)" |
37 | 76 | done |
38 | 77 | } |
39 | 78 |
|
40 | 79 | function usage { |
41 | | - echo "This script must be run from a runnable distribution of Apache Spark." |
42 | | - echo "Usage: ./sbin/build-push-docker-images.sh -r <repo> -t <tag> build" |
43 | | - echo " ./sbin/build-push-docker-images.sh -r <repo> -t <tag> push" |
44 | | - echo "for example: ./sbin/build-push-docker-images.sh -r docker.io/myrepo -t v2.3.0 push" |
| 80 | + cat <<EOF |
| 81 | +Usage: $0 [options] [command] |
| 82 | +Builds or pushes the built-in Spark Docker images. |
| 83 | +
|
| 84 | +Commands: |
| 85 | + build Build images. |
| 86 | + push Push images to a registry. Requires a repository address to be provided, both |
| 87 | + when building and when pushing the images. |
| 88 | +
|
| 89 | +Options: |
| 90 | + -r repo Repository address. |
| 91 | + -t tag Tag to apply to built images, or to identify images to be pushed. |
| 92 | + -m Use minikube's Docker daemon. |
| 93 | +
|
| 94 | +Using minikube when building images will do so directly into minikube's Docker daemon. |
| 95 | +There is no need to push the images into minikube in that case, they'll be automatically |
| 96 | +available when running applications inside the minikube cluster. |
| 97 | +
|
| 98 | +Check the following documentation for more information on using the minikube Docker daemon: |
| 99 | +
|
| 100 | + https://kubernetes.io/docs/getting-started-guides/minikube/#reusing-the-docker-daemon |
| 101 | +
|
| 102 | +Examples: |
| 103 | + - Build images in minikube with tag "testing" |
| 104 | + $0 -m -t testing build |
| 105 | +
|
| 106 | + - Build and push images with tag "v2.3.0" to docker.io/myrepo |
| 107 | + $0 -r docker.io/myrepo -t v2.3.0 build |
| 108 | + $0 -r docker.io/myrepo -t v2.3.0 push |
| 109 | +EOF |
45 | 110 | } |
46 | 111 |
|
47 | 112 | if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then |
48 | 113 | usage |
49 | 114 | exit 0 |
50 | 115 | fi |
51 | 116 |
|
52 | | -while getopts r:t: option |
| 117 | +REPO= |
| 118 | +TAG= |
| 119 | +while getopts mr:t: option |
53 | 120 | do |
54 | 121 | case "${option}" |
55 | 122 | in |
56 | 123 | r) REPO=${OPTARG};; |
57 | 124 | t) TAG=${OPTARG};; |
| 125 | + m) |
| 126 | + if ! which minikube 1>/dev/null; then |
| 127 | + error "Cannot find minikube." |
| 128 | + fi |
| 129 | + eval $(minikube docker-env) |
| 130 | + ;; |
58 | 131 | esac |
59 | 132 | done |
60 | 133 |
|
61 | | -if [ -z "$REPO" ] || [ -z "$TAG" ]; then |
| 134 | +case "${@: -1}" in |
| 135 | + build) |
| 136 | + build |
| 137 | + ;; |
| 138 | + push) |
| 139 | + if [ -z "$REPO" ]; then |
| 140 | + usage |
| 141 | + exit 1 |
| 142 | + fi |
| 143 | + push |
| 144 | + ;; |
| 145 | + *) |
62 | 146 | usage |
63 | | -else |
64 | | - case "${@: -1}" in |
65 | | - build) build;; |
66 | | - push) push;; |
67 | | - *) usage;; |
68 | | - esac |
69 | | -fi |
| 147 | + exit 1 |
| 148 | + ;; |
| 149 | +esac |
0 commit comments