From 78a0d36e8755b2e72cba31e0818fa383cba93073 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Tue, 30 Apr 2019 20:27:48 -0700 Subject: [PATCH 1/4] Resolved SPARK-26048. https://github.com/apache/spark/pull/23931 --- dev/create-release/release-build.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dev/create-release/release-build.sh b/dev/create-release/release-build.sh index 5e65d999f0b5..28aa74b9c0c0 100755 --- a/dev/create-release/release-build.sh +++ b/dev/create-release/release-build.sh @@ -122,6 +122,9 @@ fi PUBLISH_SCALA_2_12=0 SCALA_2_12_PROFILES="-Pscala-2.12" +if [[ $SPARK_VERSION < "3.0." ]]; then + SCALA_2_12_PROFILES="-Pscala-2.12 -Pflume" +fi if [[ $SPARK_VERSION > "2.4" ]]; then PUBLISH_SCALA_2_12=1 fi From c74dfde72e49887fa24a23e132efa1eff8f6a49a Mon Sep 17 00:00:00 2001 From: wright Date: Wed, 16 Jan 2019 21:00:58 -0600 Subject: [PATCH 2/4] [MINOR][BUILD] ensure call to translate_component has correct number of arguments ## What changes were proposed in this pull request? The call to `translate_component` only supplied 2 out of the 3 required arguments. I added a default empty list for the missing argument to avoid a run-time error. I work for Semmle, and noticed the bug with our LGTM code analyzer: https://lgtm.com/projects/g/apache/spark/snapshot/0655f1624ff7b73e5c8937ab9e83453a5a3a4466/files/dev/create-release/releaseutils.py?sort=name&dir=ASC&mode=heatmap#x1434915b6576fb40:1 ## How was this patch tested? I checked that `./dev/run-tests` pass OK. Closes #23567 from ipwright/wrong-number-of-arguments-fix. Authored-by: wright Signed-off-by: Sean Owen --- dev/create-release/releaseutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/create-release/releaseutils.py b/dev/create-release/releaseutils.py index f273b337fdb4..a5a26ae8f535 100755 --- a/dev/create-release/releaseutils.py +++ b/dev/create-release/releaseutils.py @@ -236,7 +236,7 @@ def translate_component(component, commit_hash, warnings): # The returned components are already filtered and translated def find_components(commit, commit_hash): components = re.findall(r"\[\w*\]", commit.lower()) - components = [translate_component(c, commit_hash) + components = [translate_component(c, commit_hash, []) for c in components if c in known_components] return components From 88e0791ac4f4ce2f29e5685ae2791e555408bef2 Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Tue, 2 Oct 2018 10:10:22 -0700 Subject: [PATCH 3/4] [SPARK-24530][FOLLOWUP] run Sphinx with python 3 in docker ## What changes were proposed in this pull request? SPARK-24530 discovered a problem of generation python doc, and provided a fix: setting SPHINXPYTHON to python 3. This PR makes this fix automatic in the release script using docker. ## How was this patch tested? verified by the 2.4.0 rc2 Closes #22607 from cloud-fan/python. Authored-by: Wenchen Fan Signed-off-by: Marcelo Vanzin --- dev/create-release/do-release-docker.sh | 3 +++ dev/create-release/spark-rm/Dockerfile | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/dev/create-release/do-release-docker.sh b/dev/create-release/do-release-docker.sh index fa7b73cdb40e..c1a122ebfb12 100755 --- a/dev/create-release/do-release-docker.sh +++ b/dev/create-release/do-release-docker.sh @@ -135,6 +135,9 @@ if [ -n "$JAVA" ]; then JAVA_VOL="--volume $JAVA:/opt/spark-java" fi +# SPARK-24530: Sphinx must work with python 3 to generate doc correctly. +echo "SPHINXPYTHON=/opt/p35/bin/python" >> $ENVFILE + echo "Building $RELEASE_TAG; output will be at $WORKDIR/output" docker run -ti \ --env-file "$ENVFILE" \ diff --git a/dev/create-release/spark-rm/Dockerfile b/dev/create-release/spark-rm/Dockerfile index 15f831cf06a6..42315446016c 100644 --- a/dev/create-release/spark-rm/Dockerfile +++ b/dev/create-release/spark-rm/Dockerfile @@ -62,8 +62,8 @@ RUN echo 'deb http://cran.cnr.Berkeley.edu/bin/linux/ubuntu xenial/' >> /etc/apt pip install $BASE_PIP_PKGS && \ pip install $PIP_PKGS && \ cd && \ - virtualenv -p python3 p35 && \ - . p35/bin/activate && \ + virtualenv -p python3 /opt/p35 && \ + . /opt/p35/bin/activate && \ pip install $BASE_PIP_PKGS && \ pip install $PIP_PKGS && \ # Install R packages and dependencies used when building. From 3ed2e8c673ce1b5e51932d607d2d2e5301c75b31 Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Tue, 30 Oct 2018 21:17:40 +0800 Subject: [PATCH 4/4] [BUILD][MINOR] release script should not interrupt by svn ## What changes were proposed in this pull request? When running the release script, you will be interrupted unexpectedly ``` ATTENTION! Your password for authentication realm: ASF Committers can only be stored to disk unencrypted! You are advised to configure your system so that Subversion can store passwords encrypted, if possible. See the documentation for details. You can avoid future appearances of this warning by setting the value of the 'store-plaintext-passwords' option to either 'yes' or 'no' in '/home/spark-rm/.subversion/servers'. ----------------------------------------------------------------------- Store password unencrypted (yes/no)? ``` We can avoid it by adding `--no-auth-cache` when running svn command. ## How was this patch tested? manually verified with 2.4.0 RC5 Closes #22885 from cloud-fan/svn. Authored-by: Wenchen Fan Signed-off-by: Wenchen Fan --- dev/create-release/release-build.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/create-release/release-build.sh b/dev/create-release/release-build.sh index 28aa74b9c0c0..affb4dc71756 100755 --- a/dev/create-release/release-build.sh +++ b/dev/create-release/release-build.sh @@ -330,7 +330,7 @@ if [[ "$1" == "package" ]]; then svn add "svn-spark/${DEST_DIR_NAME}-bin" cd svn-spark - svn ci --username $ASF_USERNAME --password "$ASF_PASSWORD" -m"Apache Spark $SPARK_PACKAGE_VERSION" + svn ci --username $ASF_USERNAME --password "$ASF_PASSWORD" -m"Apache Spark $SPARK_PACKAGE_VERSION" --no-auth-cache cd .. rm -rf svn-spark fi @@ -358,7 +358,7 @@ if [[ "$1" == "docs" ]]; then svn add "svn-spark/${DEST_DIR_NAME}-docs" cd svn-spark - svn ci --username $ASF_USERNAME --password "$ASF_PASSWORD" -m"Apache Spark $SPARK_PACKAGE_VERSION docs" + svn ci --username $ASF_USERNAME --password "$ASF_PASSWORD" -m"Apache Spark $SPARK_PACKAGE_VERSION docs" --no-auth-cache cd .. rm -rf svn-spark fi