From 2d8eb29b822e2c79951769cdc591579dc0a79c0b Mon Sep 17 00:00:00 2001 From: JohT Date: Sat, 8 Jul 2023 09:59:54 +0200 Subject: [PATCH 1/7] Add target directory option for maven download --- COMMANDS.md | 1 + scripts/downloadMavenArtifact.sh | 79 +++++++++++++++++++++----------- 2 files changed, 52 insertions(+), 28 deletions(-) diff --git a/COMMANDS.md b/COMMANDS.md index a2c40a380..3cf6a4316 100644 --- a/COMMANDS.md +++ b/COMMANDS.md @@ -123,6 +123,7 @@ to download a Maven artifact into the artifacts directory: - `-a ` - `-v ` - `-t ` +- `-d ` ### Reset the database and scan the java artifacts diff --git a/scripts/downloadMavenArtifact.sh b/scripts/downloadMavenArtifact.sh index bcd4b9201..466174b3a 100755 --- a/scripts/downloadMavenArtifact.sh +++ b/scripts/downloadMavenArtifact.sh @@ -6,66 +6,89 @@ # -a Maven Artifact Name # -v Maven Artifact Version # -t Maven Artifact Type (defaults to jar) +# -d Target directory for the downloaded file -# Read options -ARTIFACT_TYPE="jar" +# Overrideable constants +ARTIFACTS_DIRECTORY=${ARTIFACTS_DIRECTORY:-"artifacts"} + +# Default and initial values for command line options +groupId="" +artifactId="" +version="" +artifactType="jar" +targetDirectory="${ARTIFACTS_DIRECTORY}" + +# Read command line options +USAGE="downloadMavenArtifact: Usage: $0 [-g group_id] [-a artifact_id] [-v version] [-t type (default=jar)] [-d targetDirectory (default=${ARTIFACTS_DIRECTORY})]" OPTIND=1 -while getopts "g:a:v:t:" opt; do +while getopts "g:a:v:t:d:" opt; do case ${opt} in g ) - GROUP_ID=${OPTARG} + groupId=${OPTARG} ;; a ) - ARTIFACT_ID=${OPTARG} + artifactId=${OPTARG} ;; v ) - VERSION=${OPTARG} + version=${OPTARG} ;; t ) - ARTIFACT_TYPE=${OPTARG} + artifactType=${OPTARG} + ;; + d ) + targetDirectory=${OPTARG} ;; \? ) - echo "Usage: $0 [-g group_id] [-a artifact_id] [-v version] [-t type (default=jar)]" + echo "${USAGE}" exit 1 ;; esac done -if [[ -z ${GROUP_ID} || -z ${ARTIFACT_ID} || -z ${VERSION} || -z ${ARTIFACT_TYPE} ]]; then - echo "Usage: $0 [-g group_id] [-a artifact_id] [-v version] [-t type (default=jar)]" +if [[ -z ${groupId} || -z ${artifactId} || -z ${version} || -z ${artifactType} || -z ${targetDirectory} ]]; then + echo "${USAGE}" exit 1 fi -# Overrideable constants -ARTIFACTS_DIRECTORY=${ARTIFACTS_DIRECTORY:-"artifacts"} - # Internal constants BASE_URL="https://repo1.maven.org/maven2" -ARTIFACT_FILENAME="${ARTIFACT_ID}-${VERSION}.${ARTIFACT_TYPE}" -GROUP_ID_FOR_API="$(echo "${GROUP_ID}" | tr '.' '/')" -DOWNLOAD_URL="${BASE_URL}/${GROUP_ID_FOR_API}/${ARTIFACT_ID}/${VERSION}/${ARTIFACT_FILENAME}" +ARTIFACT_FILENAME="${artifactId}-${version}.${artifactType}" +GROUP_ID_FOR_API="$(echo "${groupId}" | tr '.' '/')" +DOWNLOAD_URL="${BASE_URL}/${GROUP_ID_FOR_API}/${artifactId}/${version}/${ARTIFACT_FILENAME}" -# Download Maven Artifact into the ARTIFACTS_DIRECTORY -if [ ! -f "${ARTIFACTS_DIRECTORY}/${ARTIFACT_FILENAME}" ] ; then - echo "Downloading ${DOWNLOAD_URL}" +# Download Maven Artifact into the "targetDirectory" +if [ ! -f "./${targetDirectory}/${ARTIFACT_FILENAME}" ] ; then + echo "downloadMavenArtifact: Downloading ${DOWNLOAD_URL} into target directory ${targetDirectory}" # Download Maven Artifact - curl -L --fail-with-body -O "${DOWNLOAD_URL}" + if ! curl -L --fail-with-body -O "${DOWNLOAD_URL}"; then + echo "downloadMavenArtifact: Error: Failed to download ${ARTIFACT_FILENAME}" + rm -f "${ARTIFACT_FILENAME}" + exit 1 + fi + + # Check downloaded file size to be at least 100 bytes + downloaded_file_size=$(wc -c "${ARTIFACT_FILENAME}" | awk '{print $1}') + if [[ "${downloaded_file_size}" -le 600 ]]; then + echo "downloadMavenArtifact: Error: Failed to download ${ARTIFACT_FILENAME}: Invalid Filesize: ${downloaded_file_size} bytes" + rm -f "${ARTIFACT_FILENAME}" + exit 1 + fi - # Create artifacts directory if it doen't exist - mkdir -p "${ARTIFACTS_DIRECTORY}" + # Create artifacts targetDirectory if it doen't exist + mkdir -p "./${targetDirectory}" || exit 1 # Delete already existing older versions of the artifact - rm -f "${ARTIFACTS_DIRECTORY}/${ARTIFACT_ID}"* + rm -f "./${targetDirectory}/${artifactId}"* || exit 1 - # Move artifact to artifacts directory - mv "${ARTIFACT_FILENAME}" "${ARTIFACTS_DIRECTORY}" + # Move artifact to artifacts targetDirectory + mv "${ARTIFACT_FILENAME}" "./${targetDirectory}" || exit 1 else - echo "${ARTIFACT_FILENAME} already downloaded" + echo "downloadMavenArtifact: ${ARTIFACT_FILENAME} already downloaded into target directory ${targetDirectory}" fi # Fail if Maven Download failed -if [ ! -f "${ARTIFACTS_DIRECTORY}/${ARTIFACT_FILENAME}" ] ; then - echo "Failed to download ${ARTIFACT_FILENAME}" +if [ ! -f "${targetDirectory}/${ARTIFACT_FILENAME}" ] ; then + echo "downloadMavenArtifact: Error: Failed to download ${ARTIFACT_FILENAME}" exit 1 fi \ No newline at end of file From 2f60ac658b79a40e74235344dee78de3194383ab Mon Sep 17 00:00:00 2001 From: JohT Date: Sat, 8 Jul 2023 10:01:09 +0200 Subject: [PATCH 2/7] Add selectable open graph data science plugin --- renovate.json | 12 +++++++ scripts/profiles/Neo4jv4.sh | 1 + scripts/profiles/Neo4jv5.sh | 2 ++ scripts/setupNeo4j.sh | 68 +++++++++++++++++++++++-------------- 4 files changed, 58 insertions(+), 25 deletions(-) diff --git a/renovate.json b/renovate.json index b364a8aa6..47ea5fb92 100644 --- a/renovate.json +++ b/renovate.json @@ -66,6 +66,18 @@ "depNameTemplate": "neo4j/graph-data-science", "datasourceTemplate": "github-releases" }, + { + "fileMatch": [ + "^scripts\/profiles\/Neo4jv5\\.sh$", + "^scripts\/profiles\/Default\\.sh$", + "^scripts\/[^\/]*\\.sh$" + ], + "matchStrings": [ + "NEO4J_OPEN_GDS_PLUGIN_VERSION:-\\\"?(?.*?)\\\"" + ], + "depNameTemplate": "JohT/open-graph-data-science-packaging", + "datasourceTemplate": "github-releases" + }, { "fileMatch": [ "^scripts\/profiles\/Neo4jv5\\.sh$", diff --git a/scripts/profiles/Neo4jv4.sh b/scripts/profiles/Neo4jv4.sh index a7caaaa0c..8d1a2ef52 100755 --- a/scripts/profiles/Neo4jv4.sh +++ b/scripts/profiles/Neo4jv4.sh @@ -18,6 +18,7 @@ NEO4J_APOC_PLUGIN_EDITION=${NEO4J_APOC_PLUGIN_EDITION:-"all"} # Since Neo4j v5 o NEO4J_APOC_PLUGIN_GITHUB=${NEO4J_APOC_PLUGIN_GITHUB:-"neo4j-contrib/neo4j-apoc-procedures"} # Location for the old plugins compatible to Neo4j v4 NEO4J_GDS_PLUGIN_VERSION=${NEO4J_GDS_PLUGIN_VERSION:-"2.3.4"} # Graph Data Science Plugin Version 2.3.x is compatible with Neo4j 4.4.x +NEO4J_GDS_PLUGIN_EDITION=${NEO4J_GDS_PLUGIN_EDITION:-"full"} # Graph Data Science Plugin Edition: "open" for OpenGDS, "full" for the full version with Neo4j license JQASSISTANT_CLI_VERSION=${JQASSISTANT_CLI_VERSION:-"1.12.2"} # Version 1.12.2 is the newest version (may 2023) compatible with Neo4j v4 JQASSISTANT_CLI_ARTIFACT=${JQASSISTANT_CLI_ARTIFACT:-"jqassistant-commandline-neo4jv3"} # For Neo4j v3 & 4: "jqassistant-commandline-neo4jv3" diff --git a/scripts/profiles/Neo4jv5.sh b/scripts/profiles/Neo4jv5.sh index cda5c4d7a..4a80d756a 100755 --- a/scripts/profiles/Neo4jv5.sh +++ b/scripts/profiles/Neo4jv5.sh @@ -18,6 +18,8 @@ NEO4J_APOC_PLUGIN_EDITION=${NEO4J_APOC_PLUGIN_EDITION:-"core"} # Since Neo4j v5 NEO4J_APOC_PLUGIN_GITHUB=${NEO4J_APOC_PLUGIN_GITHUB:-"neo4j/apoc"} # Core edition was moved to "neo4j/apoc" for Neo4j v5 NEO4J_GDS_PLUGIN_VERSION=${NEO4J_GDS_PLUGIN_VERSION:-"2.4.1"} # Version 2.4.0 is the newest version of june 2023 and compatible with Neo4j v5 +NEO4J_OPEN_GDS_PLUGIN_VERSION=${NEO4J_OPEN_GDS_PLUGIN_VERSION:-"2.4.1"} # Graph Data Science Plugin Version 2.4.x of is compatible with Neo4j 5.x +NEO4J_GDS_PLUGIN_EDITION=${NEO4J_GDS_PLUGIN_EDITION:-"open"} # Graph Data Science Plugin Edition: "open" for OpenGDS, "full" for the full version with Neo4j license JQASSISTANT_CLI_VERSION=${JQASSISTANT_CLI_VERSION:-"2.0.4"} # Version 2.0.3 is the newest version (june 2023) compatible with Neo4j v5 JQASSISTANT_CLI_ARTIFACT=${JQASSISTANT_CLI_ARTIFACT:-"jqassistant-commandline-distribution"} # Since jQAssistant CLI v2: "jqassistant-commandline-distribution" diff --git a/scripts/setupNeo4j.sh b/scripts/setupNeo4j.sh index 591c74df9..42cd2aed7 100755 --- a/scripts/setupNeo4j.sh +++ b/scripts/setupNeo4j.sh @@ -10,6 +10,8 @@ NEO4J_APOC_PLUGIN_VERSION=${NEO4J_APOC_PLUGIN_VERSION:-"5.10.1"} #Awesome Proced NEO4J_APOC_PLUGIN_EDITION=${NEO4J_APOC_PLUGIN_EDITION:-"core"} #Awesome Procedures for Neo4j Plugin Edition (Neo4j v4.4.x "all", Neo4j >= v5 "core") NEO4J_APOC_PLUGIN_GITHUB=${NEO4J_APOC_PLUGIN_GITHUB:-"neo4j/apoc"} #Awesome Procedures for Neo4j Plugin GitHub User/Repository (Neo4j v4.4.x "neo4j-contrib/neo4j-apoc-procedures", Neo4j >= v5 "neo4j/apoc") NEO4J_GDS_PLUGIN_VERSION=${NEO4J_GDS_PLUGIN_VERSION:-"2.4.1"} # Graph Data Science Plugin Version 2.4.x of is compatible with Neo4j 5.x +NEO4J_OPEN_GDS_PLUGIN_VERSION=${NEO4J_OPEN_GDS_PLUGIN_VERSION:-"2.4.1"} # Graph Data Science Plugin Version 2.4.x of is compatible with Neo4j 5.x +NEO4J_GDS_PLUGIN_EDITION=${NEO4J_GDS_PLUGIN_EDITION:-"open"} # Graph Data Science Plugin Edition: "open" for OpenGDS, "full" for the full version with Neo4j license NEO4J_DATA_PATH=${NEO4J_DATA_PATH:-"$( pwd -P )/data"} # Path where Neo4j writes its data to (outside tools dir) NEO4J_RUNTIME_PATH=${NEO4J_RUNTIME_PATH:-"$( pwd -P )/runtime"} # Path where Neo4j puts runtime data to (e.g. logs) (outside tools dir) TOOLS_DIRECTORY=${TOOLS_DIRECTORY:-"tools"} # Get the tools directory (defaults to "tools") @@ -23,9 +25,10 @@ NEO4J_BOLT_PORT=${NEO4J_BOLT_PORT:-"7687"} NEO4J_INSTALLATION_NAME="neo4j-${NEO4J_EDITION}-${NEO4J_VERSION}" NEO4J_INSTALLATION_DIRECTORY="${TOOLS_DIRECTORY}/${NEO4J_INSTALLATION_NAME}" NEO4J_CONFIG="${NEO4J_INSTALLATION_DIRECTORY}/conf/neo4j.conf" +NEO4J_PLUGINS="${NEO4J_INSTALLATION_DIRECTORY}/plugins" NEO4J_APOC_CONFIG="${NEO4J_INSTALLATION_DIRECTORY}/conf/apoc.conf" NEO4J_APOC_PLUGIN_ARTIFACT="apoc-${NEO4J_APOC_PLUGIN_VERSION}-${NEO4J_APOC_PLUGIN_EDITION}.jar" -NEO4J_GDS_PLUGIN_ARTIFACT="neo4j-graph-data-science-${NEO4J_GDS_PLUGIN_VERSION}.jar" +NEO4J_MAJOR_VERSION_NUMBER=$(echo "$NEO4J_VERSION" | cut -d'.' -f1) # First part of the version number (=major version number) ## Get this "scripts" directory if not already set # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. @@ -77,13 +80,13 @@ if [ ! -d "${NEO4J_INSTALLATION_DIRECTORY}" ] ; then downloaded_neo4j_archive="${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_INSTALLATION_NAME}-unix.tar.gz" # Check downloaded file size to be at least 100 bytes - downloaded_file_size=$(wc -c "${downloaded_neo4j_archive}") + downloaded_file_size=$(wc -c "${downloaded_neo4j_archive}" | awk '{print $1}') if [[ "$downloaded_file_size" -le 100 ]]; then echo "setupNeo4j: Error: Failed to download ${NEO4J_INSTALLATION_NAME}: Invalid Filesize." rm -f "${downloaded_neo4j_archive}" exit 1 fi - + # Extract the tar file tar -xf "${downloaded_neo4j_archive}" --directory "${TOOLS_DIRECTORY}" @@ -93,9 +96,6 @@ if [ ! -d "${NEO4J_INSTALLATION_DIRECTORY}" ] ; then exit 1 fi - # Extract the first component of the version number (=major version number) - NEO4J_MAJOR_VERSION_NUMBER=$(echo "$NEO4J_VERSION" | cut -d'.' -f1) - # Configure all paths with data that changes (database data, logs, ...) to be in the outside "data" directory # instead of inside the neo4j directory echo "setupNeo4j: Configuring dynamic settings (data directories, ports, ...)" @@ -155,7 +155,7 @@ else fi # Download and Install the Neo4j Plugin "Awesome Procedures for Neo4j" (APOC) -if [ ! -f "${NEO4J_INSTALLATION_DIRECTORY}/plugins/${NEO4J_APOC_PLUGIN_ARTIFACT}" ] ; then +if [ ! -f "${NEO4J_PLUGINS}/${NEO4J_APOC_PLUGIN_ARTIFACT}" ] ; then # Download the Neo4j Plugin "Awesome Procedures for Neo4j" if [ ! -f "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_APOC_PLUGIN_ARTIFACT}" ] ; then @@ -167,7 +167,7 @@ if [ ! -f "${NEO4J_INSTALLATION_DIRECTORY}/plugins/${NEO4J_APOC_PLUGIN_ARTIFACT} fi # Check downloaded file size to be at least 100 bytes - downloaded_file_size=$(wc -c "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_APOC_PLUGIN_ARTIFACT}") + downloaded_file_size=$(wc -c "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_APOC_PLUGIN_ARTIFACT}" | awk '{print $1}') if [[ "$downloaded_file_size" -le 100 ]]; then echo "setupNeo4j: Error: Failed to download ${NEO4J_APOC_PLUGIN_ARTIFACT}: Invalid Filesize." rm -f "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_APOC_PLUGIN_ARTIFACT}" @@ -175,14 +175,14 @@ if [ ! -f "${NEO4J_INSTALLATION_DIRECTORY}/plugins/${NEO4J_APOC_PLUGIN_ARTIFACT} fi # Uninstall previously installed Neo4j Plugin "Awesome Procedures for Neo4j" (APOC) - rm -f "${NEO4J_INSTALLATION_DIRECTORY}/plugins/apoc*.jar" + rm -f "${NEO4J_PLUGINS}/apoc*.jar" # Install the Neo4j Plugin "Awesome Procedures for Neo4j" echo "setupNeo4j: Installing ${NEO4J_APOC_PLUGIN_ARTIFACT}" cp -R "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_APOC_PLUGIN_ARTIFACT}" "${NEO4J_INSTALLATION_DIRECTORY}/plugins" # Fail if Neo4j Plugin "Awesome Procedures for Neo4j" (APOC) hadn't been downloaded successfully - if [ ! -f "${NEO4J_INSTALLATION_DIRECTORY}/plugins/${NEO4J_APOC_PLUGIN_ARTIFACT}" ] ; then + if [ ! -f "${NEO4J_PLUGINS}/${NEO4J_APOC_PLUGIN_ARTIFACT}" ] ; then echo "setupNeo4j: Error: Failed to download and install ${NEO4J_APOC_PLUGIN_ARTIFACT}" exit 1 fi @@ -200,36 +200,54 @@ else fi # Download and Install the Neo4j Plugin "Graph Data Science" (GDS) -if [ ! -f "${NEO4J_INSTALLATION_DIRECTORY}/plugins/${NEO4J_GDS_PLUGIN_ARTIFACT}" ] ; then - +if [[ ${NEO4J_GDS_PLUGIN_EDITION} == "open" ]]; then + neo4jGraphDataScienceDownloadUrl="https://github.com/JohT/open-graph-data-science-packaging/releases/download/v${NEO4J_OPEN_GDS_PLUGIN_VERSION}" + # TODO Maybe it would be a better solution to release open graph data science packages just with the major release version + if [[ "$NEO4J_MAJOR_VERSION_NUMBER" -ge 5 ]]; then + neo4jGraphDataScienceNeo4jVersion="5.9.0" + else + neo4jGraphDataScienceNeo4jVersion="4.4.23" + fi + neo4jGraphDataScienceReleaseArtifactPrefix="open-graph-data-science" + neo4jGraphDataScienceReleaseArtifact="${neo4jGraphDataScienceReleaseArtifactPrefix}-${NEO4J_OPEN_GDS_PLUGIN_VERSION}-for-neo4j-${neo4jGraphDataScienceNeo4jVersion}.jar" +else + neo4jGraphDataScienceDownloadUrl="https://github.com/neo4j/graph-data-science/releases/download/${NEO4J_GDS_PLUGIN_VERSION}" + neo4jGraphDataScienceReleaseArtifactPrefix="neo4j-graph-data-science" + neo4jGraphDataScienceReleaseArtifact="${neo4jGraphDataScienceReleaseArtifactPrefix}-${NEO4J_GDS_PLUGIN_VERSION}-${NEO4J_GDS_PLUGIN_EDITION}.jar" +fi + +if [ ! -f "${NEO4J_PLUGINS}/${neo4jGraphDataScienceReleaseArtifact}" ] ; then + # Download the Neo4j Plugin "Graph Data Science" (GDS) + #source ${SCRIPTS_DIR}/download.sh --url "${neo4jGraphDataScienceDownloadUrl}/${neo4jGraphDataScienceReleaseArtifact}" + # Download the Neo4j Plugin "Graph Data Science" (GDS) - if [ ! -f "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_GDS_PLUGIN_ARTIFACT}" ] ; then - echo "setupNeo4j: Downloading ${NEO4J_GDS_PLUGIN_ARTIFACT}" - curl -L --fail-with-body -o "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_GDS_PLUGIN_ARTIFACT}" https://github.com/neo4j/graph-data-science/releases/download/${NEO4J_GDS_PLUGIN_VERSION}/${NEO4J_GDS_PLUGIN_ARTIFACT} || exit 1 + if [ ! -f "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jGraphDataScienceReleaseArtifact}" ] ; then + echo "setupNeo4j: Downloading ${neo4jGraphDataScienceReleaseArtifact} from ${neo4jGraphDataScienceDownloadUrl}/${neo4jGraphDataScienceReleaseArtifact}" + curl -L --fail-with-body -o "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jGraphDataScienceReleaseArtifact}" ${neo4jGraphDataScienceDownloadUrl}/${neo4jGraphDataScienceReleaseArtifact} || exit 1 else - echo "setupNeo4j: ${NEO4J_GDS_PLUGIN_ARTIFACT} already downloaded" + echo "setupNeo4j: ${neo4jGraphDataScienceReleaseArtifact} already downloaded" fi # Check downloaded file size to be at least 100 bytes - downloaded_file_size=$(wc -c "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_GDS_PLUGIN_ARTIFACT}" | awk '{print $1}') + downloaded_file_size=$(wc -c "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jGraphDataScienceReleaseArtifact}" | awk '{print $1}') if [[ "$downloaded_file_size" -le 100 ]]; then - echo "setupNeo4j: Error: Failed to download ${NEO4J_GDS_PLUGIN_ARTIFACT}. Invalid Filesize." - rm -f "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_GDS_PLUGIN_ARTIFACT}" + echo "setupNeo4j: Error: Failed to download ${neo4jGraphDataScienceReleaseArtifact}. Invalid Filesize." + rm -f "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jGraphDataScienceReleaseArtifact}" exit 1 fi # Uninstall previously installed Neo4j Plugin "Graph Data Science" (GDS) - rm -f "${NEO4J_INSTALLATION_DIRECTORY}/plugins/neo4j-graph-data-science*.jar" + rm -f "${NEO4J_PLUGINS}/${neo4jGraphDataScienceReleaseArtifactPrefix}*.jar" # Install the Neo4j Plugin "Graph Data Science" (GDS) - echo "setupNeo4j: Installing ${NEO4J_GDS_PLUGIN_ARTIFACT}" - cp -R "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_GDS_PLUGIN_ARTIFACT}" "${NEO4J_INSTALLATION_DIRECTORY}/plugins" + echo "setupNeo4j: Installing ${neo4jGraphDataScienceReleaseArtifact}" + cp -R "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jGraphDataScienceReleaseArtifact}" "${NEO4J_PLUGINS}" # Fail if Neo4j Plugin "Graph Data Science" (GDS) hadn't been downloaded successfully - if [ ! -f "${NEO4J_INSTALLATION_DIRECTORY}/plugins/${NEO4J_GDS_PLUGIN_ARTIFACT}" ] ; then - echo "setupNeo4j: Error: Failed to download and install ${NEO4J_GDS_PLUGIN_ARTIFACT}" + if [ ! -f "${NEO4J_PLUGINS}/${neo4jGraphDataScienceReleaseArtifact}" ] ; then + echo "setupNeo4j: Error: Failed to install ${neo4jGraphDataScienceReleaseArtifact}" exit 1 fi else - echo "setupNeo4j: ${NEO4J_GDS_PLUGIN_ARTIFACT} already installed" + echo "setupNeo4j: ${neo4jGraphDataScienceReleaseArtifact} already installed" fi \ No newline at end of file From 3f9151d691739cba56623f1662de1a7d88b250fa Mon Sep 17 00:00:00 2001 From: JohT Date: Sat, 8 Jul 2023 22:14:04 +0200 Subject: [PATCH 3/7] Minor fixes in comments --- scripts/reports/compilations/AllReports.sh | 4 +--- scripts/reports/compilations/CsvReports.sh | 4 +--- scripts/reports/compilations/JupyterReports.sh | 4 +--- 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/scripts/reports/compilations/AllReports.sh b/scripts/reports/compilations/AllReports.sh index 5b2d02ce5..f4e13f087 100755 --- a/scripts/reports/compilations/AllReports.sh +++ b/scripts/reports/compilations/AllReports.sh @@ -3,13 +3,11 @@ # Runs all report scripts. # It only consideres scripts in the "reports" directory (overridable with REPORTS_SCRIPT_DIR) one directory above this one. -## Get this "reports/compilations" directory if not already set. +## Get this "scripts/reports/compilations" directory if not already set. # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. # This way non-standard tools like readlink aren't needed. REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR:-$( CDPATH=. cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P )} -# The following solution should work in most shells but returns the (wrong) path of the caller when using "source" to call. -#REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR:-$( CDPATH=. cd -- "$(dirname -- "$0")" && pwd -P)} echo "AllReports: REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR}" REPORTS_SCRIPT_DIR=${REPORTS_SCRIPT_DIR:-$(dirname -- "${REPORT_COMPILATIONS_SCRIPT_DIR}")} diff --git a/scripts/reports/compilations/CsvReports.sh b/scripts/reports/compilations/CsvReports.sh index 5ddc04838..17983f846 100755 --- a/scripts/reports/compilations/CsvReports.sh +++ b/scripts/reports/compilations/CsvReports.sh @@ -3,13 +3,11 @@ # Runs all CSV report scripts (no Python and Chromium required). # It only consideres scripts in the "reports" directory (overridable with REPORTS_SCRIPT_DIR) one directory above this one. -## Get this "reports/compilations" directory if not already set. +## Get this "scripts/reports/compilations" directory if not already set. # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. # This way non-standard tools like readlink aren't needed. REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR:-$( CDPATH=. cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P )} -# The following solution should work in most shells but returns the (wrong) path of the caller when using "source" to call. -#REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR:-$( CDPATH=. cd -- "$(dirname -- "$0")" && pwd -P)} echo "CsvReports: REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR}" REPORTS_SCRIPT_DIR=${REPORTS_SCRIPT_DIR:-$(dirname -- "${REPORT_COMPILATIONS_SCRIPT_DIR}")} diff --git a/scripts/reports/compilations/JupyterReports.sh b/scripts/reports/compilations/JupyterReports.sh index 5d4f482df..cc778195c 100755 --- a/scripts/reports/compilations/JupyterReports.sh +++ b/scripts/reports/compilations/JupyterReports.sh @@ -6,13 +6,11 @@ # For PDF generation chromium is required additionally. # Therefore these reports will take longer and require more ressources than just plain database queries/procedures. -## Get this "reports/compilations" directory if not already set. +## Get this "scripts/reports/compilations" directory if not already set. # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. # This way non-standard tools like readlink aren't needed. REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR:-$( CDPATH=. cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P )} -# The following solution should work in most shells but returns the (wrong) path of the caller when using "source" to call. -#REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR:-$( CDPATH=. cd -- "$(dirname -- "$0")" && pwd -P)} echo "JupyterReports: REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR}" REPORTS_SCRIPT_DIR=${REPORTS_SCRIPT_DIR:-$(dirname -- "${REPORT_COMPILATIONS_SCRIPT_DIR}")} From bee8b8dbc0c9efbd9ec0c32ed27a16d7fedc5fb0 Mon Sep 17 00:00:00 2001 From: JohT Date: Sat, 8 Jul 2023 22:16:25 +0200 Subject: [PATCH 4/7] Disable database export for default analysis --- .github/workflows/code-reports.yml | 18 ++++++++++-------- COMMANDS.md | 2 +- scripts/SCRIPTS.md | 2 +- .../DatabaseCsvExportReports.sh} | 18 +++++++++++------- 4 files changed, 23 insertions(+), 17 deletions(-) rename scripts/reports/{DatabaseCsvExport.sh => compilations/DatabaseCsvExportReports.sh} (60%) diff --git a/.github/workflows/code-reports.yml b/.github/workflows/code-reports.yml index 5ce4cd1a5..98afce860 100644 --- a/.github/workflows/code-reports.yml +++ b/.github/workflows/code-reports.yml @@ -112,7 +112,7 @@ jobs: env: NEO4J_INITIAL_PASSWORD: ${{ secrets.NEO4J_INITIAL_PASSWORD }} run: | - ./../../scripts/analysis/analyze.sh --report All --profile Neo4jv5 + ./../../scripts/analysis/analyze.sh - name: Move reports from the temp to the results directory preserving their surrounding directory working-directory: temp @@ -128,13 +128,15 @@ jobs: retention-days: 5 # Upload Database Export - - name: Archive exported database - uses: actions/upload-artifact@v3 - with: - name: code-report-database-export-${{ matrix.java }}-python-${{ matrix.python }}-mambaforge-${{ matrix.mambaforge }} - path: ./temp/**/import - if-no-files-found: error - retention-days: 5 + # Only possible after an export with "./../../scripts/analysis/analyze.sh --report DatabaseCsvExport" + # Won't be done here because of performance and security concerns + #- name: Archive exported database + # uses: actions/upload-artifact@v3 + # with: + # name: code-report-database-export-${{ matrix.java }}-python-${{ matrix.python }}-mambaforge-${{ matrix.mambaforge }} + # path: ./temp/**/import + # if-no-files-found: error + # retention-days: 5 # Commit and push the native image agent results - name: Display environment variable "github.event_name" diff --git a/COMMANDS.md b/COMMANDS.md index 3cf6a4316..1b0db2022 100644 --- a/COMMANDS.md +++ b/COMMANDS.md @@ -51,7 +51,7 @@ The [analyze.sh](./scripts/analysis/analyze.sh) command comes with these command line options: -- `--report Csv` only generates CSV reports. This speeds up the report generation and doesn't depend on Python, Jupyter Notebook or any other related dependencies. The default value os `All` to generate all reports. `Jupiter` will only generate Jupyter Notebook reports. +- `--report Csv` only generates CSV reports. This speeds up the report generation and doesn't depend on Python, Jupyter Notebook or any other related dependencies. The default value os `All` to generate all reports. `Jupiter` will only generate Jupyter Notebook reports. `DatabaseCsvExport` exports the whole graph database as a CSV file (performance intense, check if there are security concerns first). - `--profile Neo4jv4` uses the older long term support (june 2023) version v4.4.x of Neo4j and suitable compatible versions of plugins and JQAssistant. `Neo4jv5` will explicitly select the newest (june 2023) version 5.x of Neo4j. Without setting a profile, the newest versions will be used. Profiles are scripts that can be found in the directory [scripts/profiles](./scripts/profiles/). diff --git a/scripts/SCRIPTS.md b/scripts/SCRIPTS.md index a699ec1af..bd5a9cc47 100644 --- a/scripts/SCRIPTS.md +++ b/scripts/SCRIPTS.md @@ -22,7 +22,6 @@ Script | Directory | Description | [Neo4jv5.sh](./profiles/Neo4jv5.sh) | profiles | Sets all settings variables for an analysis with Neo4j v5.x (newest version as of june 2023). | | [CentralityCsv.sh](./reports/CentralityCsv.sh) | reports | Looks for centrality using the Graph Data Science Library of Neo4j and creates CSV reports. | | [CommunityCsv.sh](./reports/CommunityCsv.sh) | reports | Detects communities using the Graph Data Science Library of Neo4j and creates CSV reports. | -| [DatabaseCsvExport.sh](./reports/DatabaseCsvExport.sh) | reports | Exports the whole graph database as a CSV file using the APOC procedure "apoc.export.csv.all" | | [ExternalDependenciesCsv.sh](./reports/ExternalDependenciesCsv.sh) | reports | Executes "Package_Usage" Cypher queries to get the "external-dependencies-csv" CSV reports. | | [ExternalDependenciesJupyter.sh](./reports/ExternalDependenciesJupyter.sh) | reports | Creates the "overview" report (ipynb, md, pdf) based on the Jupyter Notebook "Overview.ipynb". | | [InternalDependenciesCsv.sh](./reports/InternalDependenciesCsv.sh) | reports | Executes "Package_Usage" Cypher queries to get the "internal-dependencies" CSV reports. | @@ -37,6 +36,7 @@ Script | Directory | Description | [WordcloudJupyter.sh](./reports/WordcloudJupyter.sh) | reports | Creates the "overview" report (ipynb, md, pdf) based on the Jupyter Notebook "Overview.ipynb". | | [AllReports.sh](./reports/compilations/AllReports.sh) | compilations | Runs all report scripts. | | [CsvReports.sh](./reports/compilations/CsvReports.sh) | compilations | Runs all CSV report scripts (no Python and Chromium required). | +| [DatabaseCsvExportReports.sh](./reports/compilations/DatabaseCsvExportReports.sh) | compilations | Exports the whole graph database as a CSV file using the APOC procedure "apoc.export.csv.all" | | [JupyterReports.sh](./reports/compilations/JupyterReports.sh) | compilations | Runs all Jupyter Notebook report scripts. | | [resetAndScan.sh](./resetAndScan.sh) | | Deletes all data in the Neo4j graph database and rescans the downloaded artifacts to create a new graph. | | [resetAndScanChanged.sh](./resetAndScanChanged.sh) | | Executes "resetAndScan.sh" only if "detectChangedArtifacts.sh" returns detected changes. | diff --git a/scripts/reports/DatabaseCsvExport.sh b/scripts/reports/compilations/DatabaseCsvExportReports.sh similarity index 60% rename from scripts/reports/DatabaseCsvExport.sh rename to scripts/reports/compilations/DatabaseCsvExportReports.sh index 87195b184..e6754f623 100755 --- a/scripts/reports/DatabaseCsvExport.sh +++ b/scripts/reports/compilations/DatabaseCsvExportReports.sh @@ -3,27 +3,31 @@ # Exports the whole graph database as a CSV file using the APOC procedure "apoc.export.csv.all" # The exported file can be found in the subdirectory "import" inside the tools/neo4j.. directory. +# Note: This is a special case. It is treated as a compilation even if it is just a single cypher execution. +# The reason for that is that it exports the whole graph database. This should only be done intentionally +# and not within a default "AllReports.sh" run because it is performance intense and could raise security concerns. + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} -## Get this "scripts/reports" directory if not already set +## Get this "scripts/reports/compilations" directory if not already set. # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. # This way non-standard tools like readlink aren't needed. -REPORTS_SCRIPT_DIR=${REPORTS_SCRIPT_DIR:-$( CDPATH=. cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P )} -echo "ExternalDependenciesCsv: REPORTS_SCRIPT_DIR=${REPORTS_SCRIPT_DIR}" +REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR:-$( CDPATH=. cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P )} +echo "DatabaseCsvExportReports: REPORT_COMPILATIONS_SCRIPT_DIR=${REPORT_COMPILATIONS_SCRIPT_DIR}" # Get the "scripts" directory by taking the path of this script and going one directory up. -SCRIPTS_DIR=${SCRIPTS_DIR:-"${REPORTS_SCRIPT_DIR}/.."} -echo "ExternalDependenciesCsv SCRIPTS_DIR=${SCRIPTS_DIR}" +SCRIPTS_DIR=${SCRIPTS_DIR:-"${REPORTS_SCRIPT_DIR}/../.."} +echo "DatabaseCsvExportReports: SCRIPTS_DIR=${SCRIPTS_DIR}" # Get the "cypher" directory by taking the path of this script and going two directory up and then to "cypher". CYPHER_DIR=${CYPHER_DIR:-"${SCRIPTS_DIR}/../cypher"} -echo "ExportDatabase: CYPHER_DIR=$CYPHER_DIR" +echo "DatabaseCsvExportReports: CYPHER_DIR=$CYPHER_DIR" # Define functions to execute a cypher query from within the given file (first and only argument) source "${SCRIPTS_DIR}/executeQueryFunctions.sh" # Execute Database Export Procedure in background # The exported file can then be found in the subdirectory "import" inside the tools/neo4j.. directory. -execute_cypher "${CYPHER_DIR}/Export_the_whole_database_as_CSV.cypher" & \ No newline at end of file +execute_cypher "${CYPHER_DIR}/Export_the_whole_database_as_CSV.cypher" \ No newline at end of file From 7816eb88737258dee45cb1edf48d9a1549886fe9 Mon Sep 17 00:00:00 2001 From: JohT Date: Mon, 10 Jul 2023 09:57:44 +0200 Subject: [PATCH 5/7] Add example download script to reference --- scripts/SCRIPTS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/SCRIPTS.md b/scripts/SCRIPTS.md index bd5a9cc47..ed0a4198d 100644 --- a/scripts/SCRIPTS.md +++ b/scripts/SCRIPTS.md @@ -10,6 +10,7 @@ Script | Directory | Description | [detectChangedArtifacts.sh](./detectChangedArtifacts.sh) | | Detect changed files in the artifacts directory with a text file containing the last hash code of the contents. | | [downloadMavenArtifact.sh](./downloadMavenArtifact.sh) | | Downloads an artifact from Maven Central (https://mvnrepository.com/repos/central) | | [downloadAxonFramework.sh](./downloader/downloadAxonFramework.sh) | downloader | Downloads AxonFramework (https://developer.axoniq.io/axon-framework) artifacts from Maven Central. | +| [analyzeAxonFramework.sh](./examples/analyzeAxonFramework.sh) | examples | This is an example for an analysis of AxonFramework | | [executeJupyterNotebook.sh](./executeJupyterNotebook.sh) | | Executes all steps in the given Jupyter Notebook (ipynb), stores it and converts it to Markdown (md) and PDF. | | [executeQuery.sh](./executeQuery.sh) | | Utilizes Neo4j's HTTP API to execute a Cypher query from an input file and provides the results in CSV format. | | [executeQueryFunctions.sh](./executeQueryFunctions.sh) | | Provides functions to execute Cypher queries using either "executeQuery.sh" or Neo4j's "cypher-shell". | From aa3d23a9c13eb451149fa7f0ad47b3b1bbb1e430 Mon Sep 17 00:00:00 2001 From: JohT Date: Mon, 17 Jul 2023 10:00:49 +0200 Subject: [PATCH 6/7] Standardize downloads with a shared script --- scripts/SCRIPTS.md | 1 + scripts/download.sh | 87 +++++++++++++++++++++ scripts/downloadMavenArtifact.sh | 30 +++---- scripts/downloader/downloadAxonFramework.sh | 6 +- scripts/setupJQAssistant.sh | 43 +++++----- scripts/setupNeo4j.sh | 69 +++------------- 6 files changed, 130 insertions(+), 106 deletions(-) create mode 100755 scripts/download.sh diff --git a/scripts/SCRIPTS.md b/scripts/SCRIPTS.md index ed0a4198d..c115c2fbd 100644 --- a/scripts/SCRIPTS.md +++ b/scripts/SCRIPTS.md @@ -8,6 +8,7 @@ Script | Directory | Description | [analyze.sh](./analysis/analyze.sh) | analysis | Coordinates the end-to-end analysis process, encompassing tool installation, graph generation, and report generation. | | [copyReportsIntoResults.sh](./copyReportsIntoResults.sh) | | Copies the results from the temp directory to the results directory grouped by the analysis name. | | [detectChangedArtifacts.sh](./detectChangedArtifacts.sh) | | Detect changed files in the artifacts directory with a text file containing the last hash code of the contents. | +| [download.sh](./download.sh) | | Downloads a file into the directory of the environment variable SHARED_DOWNLOADS_DIRECTORY (or default "../downloads"). | | [downloadMavenArtifact.sh](./downloadMavenArtifact.sh) | | Downloads an artifact from Maven Central (https://mvnrepository.com/repos/central) | | [downloadAxonFramework.sh](./downloader/downloadAxonFramework.sh) | downloader | Downloads AxonFramework (https://developer.axoniq.io/axon-framework) artifacts from Maven Central. | | [analyzeAxonFramework.sh](./examples/analyzeAxonFramework.sh) | examples | This is an example for an analysis of AxonFramework | diff --git a/scripts/download.sh b/scripts/download.sh new file mode 100755 index 000000000..b6287e98a --- /dev/null +++ b/scripts/download.sh @@ -0,0 +1,87 @@ +#!/usr/bin/env bash + +# Downloads a file into the directory of the environment variable SHARED_DOWNLOADS_DIRECTORY (or default "../downloads"). +# Does nothing if the file already exists. + +# Command line options: +# --url Download URL (required) +# --filename Target file name with extension without path (optional, default = basename of download URL) + +# Function to display script usage +usage() { + echo "Usage: $0 --url https://my.download.url [--filename download-file-name-without-path.ext> (default=url filename)]" + exit 1 +} + +# Default values +downloadUrl="" +filename="" + +# Parse command line arguments +while [[ $# -gt 0 ]]; do + key="$1" + case $key in + --url) + downloadUrl="$2" + shift + ;; + --filename) + filename="$2" + shift + ;; + *) + echo "download: Error: Unknown option: ${key}" + usage + ;; + esac + shift +done + +if [[ -z ${downloadUrl} ]]; then + echo "${USAGE}" + exit 1 +fi + +if ! curl --head --fail ${downloadUrl} >/dev/null 2>&1; then + echo "download: Error: Invalid URL: ${downloadUrl}" + exit 1 +fi + +if [[ -z ${filename} ]]; then + filename=$(basename -- "${downloadUrl}") +fi + +# Get shared download directory and create it if it doesn't exist +SHARED_DOWNLOADS_DIRECTORY="${SHARED_DOWNLOADS_DIRECTORY:-$(dirname "$( pwd )")/downloads}" +if [ ! -d "${SHARED_DOWNLOADS_DIRECTORY}" ] ; then + echo "download: Creating shared downloads directory ${SHARED_DOWNLOADS_DIRECTORY}" + mkdir -p ${SHARED_DOWNLOADS_DIRECTORY} +fi + +# Download the file if it doesn't exist in the shared downloads directory +if [ ! -f "${SHARED_DOWNLOADS_DIRECTORY}/${filename}" ] ; then + echo "download: Downloading ${filename} from ${downloadUrl} into ${SHARED_DOWNLOADS_DIRECTORY}" + + # Download the file + if ! curl -L --fail-with-body -o "${SHARED_DOWNLOADS_DIRECTORY}/${filename}" "${downloadUrl}"; then + echo "download: Error: Failed to download ${filename}" + rm -f "${SHARED_DOWNLOADS_DIRECTORY}/${filename}" + exit 1 + fi +else + echo "download: ${filename} already downloaded" +fi + +# Check downloaded file size to be at least 600 bytes or otherwise delete the invalid file +downloaded_file_size=$(wc -c "${SHARED_DOWNLOADS_DIRECTORY}/${filename}" | awk '{print $1}') +if [[ "${downloaded_file_size}" -le 600 ]]; then + echo "download: Error: Failed to download ${filename}: Filesize: ${downloaded_file_size} < 600 bytes" + rm -f "${SHARED_DOWNLOADS_DIRECTORY}/${filename}" + exit 1 +fi + +# Fail if download failed +if [ ! -f "${SHARED_DOWNLOADS_DIRECTORY}/${filename}" ] ; then + echo "download: Error: Failed to download ${filename}" + exit 1 +fi \ No newline at end of file diff --git a/scripts/downloadMavenArtifact.sh b/scripts/downloadMavenArtifact.sh index 466174b3a..25bf4ee7c 100755 --- a/scripts/downloadMavenArtifact.sh +++ b/scripts/downloadMavenArtifact.sh @@ -8,8 +8,11 @@ # -t Maven Artifact Type (defaults to jar) # -d Target directory for the downloaded file +# Requires download.sh + # Overrideable constants ARTIFACTS_DIRECTORY=${ARTIFACTS_DIRECTORY:-"artifacts"} +SHARED_DOWNLOADS_DIRECTORY="${SHARED_DOWNLOADS_DIRECTORY:-$(dirname "$( pwd )")/downloads}" # Default and initial values for command line options groupId="" @@ -50,6 +53,12 @@ if [[ -z ${groupId} || -z ${artifactId} || -z ${version} || -z ${artifactType} | exit 1 fi +## Get this "scripts" directory if not already set +# Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. +# CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. +# This way non-standard tools like readlink aren't needed. +SCRIPTS_DIR=${SCRIPTS_DIR:-$( CDPATH=. cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P )} + # Internal constants BASE_URL="https://repo1.maven.org/maven2" ARTIFACT_FILENAME="${artifactId}-${version}.${artifactType}" @@ -58,22 +67,7 @@ DOWNLOAD_URL="${BASE_URL}/${GROUP_ID_FOR_API}/${artifactId}/${version}/${ARTIFAC # Download Maven Artifact into the "targetDirectory" if [ ! -f "./${targetDirectory}/${ARTIFACT_FILENAME}" ] ; then - echo "downloadMavenArtifact: Downloading ${DOWNLOAD_URL} into target directory ${targetDirectory}" - - # Download Maven Artifact - if ! curl -L --fail-with-body -O "${DOWNLOAD_URL}"; then - echo "downloadMavenArtifact: Error: Failed to download ${ARTIFACT_FILENAME}" - rm -f "${ARTIFACT_FILENAME}" - exit 1 - fi - - # Check downloaded file size to be at least 100 bytes - downloaded_file_size=$(wc -c "${ARTIFACT_FILENAME}" | awk '{print $1}') - if [[ "${downloaded_file_size}" -le 600 ]]; then - echo "downloadMavenArtifact: Error: Failed to download ${ARTIFACT_FILENAME}: Invalid Filesize: ${downloaded_file_size} bytes" - rm -f "${ARTIFACT_FILENAME}" - exit 1 - fi + source ${SCRIPTS_DIR}/download.sh --url "${DOWNLOAD_URL}" || exit 1 # Create artifacts targetDirectory if it doen't exist mkdir -p "./${targetDirectory}" || exit 1 @@ -81,8 +75,8 @@ if [ ! -f "./${targetDirectory}/${ARTIFACT_FILENAME}" ] ; then # Delete already existing older versions of the artifact rm -f "./${targetDirectory}/${artifactId}"* || exit 1 - # Move artifact to artifacts targetDirectory - mv "${ARTIFACT_FILENAME}" "./${targetDirectory}" || exit 1 + # Copy artifact into artifacts targetDirectory + cp -R "${SHARED_DOWNLOADS_DIRECTORY}/${ARTIFACT_FILENAME}" "./${targetDirectory}" || exit 1 else echo "downloadMavenArtifact: ${ARTIFACT_FILENAME} already downloaded into target directory ${targetDirectory}" fi diff --git a/scripts/downloader/downloadAxonFramework.sh b/scripts/downloader/downloadAxonFramework.sh index 92913ce42..209864a62 100755 --- a/scripts/downloader/downloadAxonFramework.sh +++ b/scripts/downloader/downloadAxonFramework.sh @@ -30,11 +30,11 @@ echo "download${ANALYSIS_NAME}: ARTIFACTS_VERSION=${ARTIFACTS_VERSION}" # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. # This way non-standard tools like readlink aren't needed. -ANALYSIS_SCRIPT_DIR=${ANALYSIS_SCRIPT_DIR:-$( CDPATH=. cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P )} -echo "download${ANALYSIS_NAME}: ANALYSIS_SCRIPT_DIR=${ANALYSIS_SCRIPT_DIR}" +DOWNLOADER_SCRIPTS_DIR=${DOWNLOADER_SCRIPTS_DIR:-$( CDPATH=. cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P )} +echo "download${ANALYSIS_NAME}: DOWNLOADER_SCRIPTS_DIR=${DOWNLOADER_SCRIPTS_DIR}" # Get the "scripts" directory by taking the path of this script and going one directory up. -SCRIPTS_DIR=${SCRIPTS_DIR:-$(dirname -- "${ANALYSIS_SCRIPT_DIR}")} +SCRIPTS_DIR=${SCRIPTS_DIR:-$(dirname -- "${DOWNLOADER_SCRIPTS_DIR}")} echo "download${ANALYSIS_NAME}: SCRIPTS_DIR=${SCRIPTS_DIR}" ################################################################ diff --git a/scripts/setupJQAssistant.sh b/scripts/setupJQAssistant.sh index 4519ad02a..cff6543dc 100755 --- a/scripts/setupJQAssistant.sh +++ b/scripts/setupJQAssistant.sh @@ -2,8 +2,10 @@ # Installs (download and unzip) jQAssistant (https://jqassistant.org/get-started). -# Be aware that this script runs in the current directory. -# If you want JQassistant to be installed in the "tools" directory, then create and change it beforehand. +# Requires download.sh + +# Note: This script runs in the current directory. If you want JQassistant to be installed in e.a. the "tools" directory, +# then create and change it beforehand. JQASSISTANT_CLI_VERSION=${JQASSISTANT_CLI_VERSION:-"2.0.4"} # Neo4j v5: 2.0.3 (june 2023), Neo4j v4: 1.12.2 (april 2023) JQASSISTANT_CLI_DOWNLOAD_URL=${JQASSISTANT_CLI_DOWNLOAD_URL:-"https://repo1.maven.org/maven2/com/buschmais/jqassistant/cli"} @@ -12,6 +14,13 @@ JQASSISTANT_CLI_DISTRIBUTION=${JQASSISTANT_CLI_DISTRIBUTION:-"bin.zip"} # Neo4j TOOLS_DIRECTORY=${TOOLS_DIRECTORY:-"tools"} # Get the tools directory (defaults to "tools") SHARED_DOWNLOADS_DIRECTORY="${SHARED_DOWNLOADS_DIRECTORY:-$(dirname "$( pwd )")/downloads}" +## Get this "scripts" directory if not already set +# Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. +# CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. +# This way non-standard tools like readlink aren't needed. +SCRIPTS_DIR=${SCRIPTS_DIR:-$( CDPATH=. cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P )} +echo "setupJQAssistant: SCRIPTS_DIR=$SCRIPTS_DIR" + # Check if TOOLS_DIRECTORY variable is set if [ -z "${TOOLS_DIRECTORY}" ]; then echo "setupJQAssistant: Error: Requires variable TOOLS_DIRECTORY to be set. If it is the current directory, then use a dot to reflect that." @@ -19,7 +28,7 @@ if [ -z "${TOOLS_DIRECTORY}" ]; then else # Create tools directory if it doesn't exists echo "setupJQAssistant: Creating tools directory <${TOOLS_DIRECTORY}> if neccessary" - mkdir -p "${TOOLS_DIRECTORY}" + mkdir -p "${TOOLS_DIRECTORY}" || exit 1 fi # Check if SHARED_DOWNLOADS_DIRECTORY variable is set @@ -37,29 +46,13 @@ JQASSISTANT_INSTALLATION_NAME="${JQASSISTANT_CLI_ARTIFACT}-${JQASSISTANT_CLI_VER JQASSISTANT_INSTALLATION_DIRECTORY="${TOOLS_DIRECTORY}/${JQASSISTANT_INSTALLATION_NAME}" # Download and unpack jQAssistant -if [ ! -d "${JQASSISTANT_INSTALLATION_DIRECTORY}" ] ; then - - # Download jQAssistant - if [ ! -f "${SHARED_DOWNLOADS_DIRECTORY}/${JQASSISTANT_INSTALLATION_NAME}.zip" ] ; then - jqassistant_cli_fulldownload_url=${JQASSISTANT_CLI_DOWNLOAD_URL}/${JQASSISTANT_CLI_ARTIFACT}/${JQASSISTANT_CLI_VERSION}/${JQASSISTANT_CLI_ARTIFACT}-${JQASSISTANT_CLI_VERSION}-${JQASSISTANT_CLI_DISTRIBUTION} - echo "setupJQAssistant: Downloading ${JQASSISTANT_INSTALLATION_NAME}.zip from ${jqassistant_cli_fulldownload_url}" - - # Download jQAssistant - # With the option "-L" a redirection will be followed automatically - curl -L --fail-with-body -o "${SHARED_DOWNLOADS_DIRECTORY}/${JQASSISTANT_INSTALLATION_NAME}.zip" "${jqassistant_cli_fulldownload_url}" - else - echo "setupJQAssistant: ${JQASSISTANT_INSTALLATION_NAME} already downloaded" - fi - - downloaded_file_size=$(wc -c "${SHARED_DOWNLOADS_DIRECTORY}/${JQASSISTANT_INSTALLATION_NAME}.zip" | awk '{print $1}') - if [[ "${downloaded_file_size}" -le 1000 ]]; then - echo "setupJQAssistant: Error: Failed to download ${JQASSISTANT_INSTALLATION_NAME}. Invalid Filesize" - rm -f "${SHARED_DOWNLOADS_DIRECTORY}/${JQASSISTANT_INSTALLATION_NAME}.zip" - exit 1 - fi +if [ ! -d "${JQASSISTANT_INSTALLATION_DIRECTORY}" ] ; then + jqassistant_cli_fulldownload_url=${JQASSISTANT_CLI_DOWNLOAD_URL}/${JQASSISTANT_CLI_ARTIFACT}/${JQASSISTANT_CLI_VERSION}/${JQASSISTANT_CLI_ARTIFACT}-${JQASSISTANT_CLI_VERSION}-${JQASSISTANT_CLI_DISTRIBUTION} + jqassistant_cli_fulldownload_file="${JQASSISTANT_INSTALLATION_NAME}.zip" + source ${SCRIPTS_DIR}/download.sh --url "${jqassistant_cli_fulldownload_url}" --filename "${jqassistant_cli_fulldownload_file}" || exit 2 # Unpack the ZIP file (-q option for less verbose output) - unzip -q "${SHARED_DOWNLOADS_DIRECTORY}/${JQASSISTANT_INSTALLATION_NAME}.zip" -d "${TOOLS_DIRECTORY}" + unzip -q "${SHARED_DOWNLOADS_DIRECTORY}/${jqassistant_cli_fulldownload_file}" -d "${TOOLS_DIRECTORY}" || exit 3 else - echo "setupJQAssistant: ${JQASSISTANT_INSTALLATION_NAME}.zip already installed" + echo "setupJQAssistant: ${jqassistant_cli_fulldownload_file} already installed" fi \ No newline at end of file diff --git a/scripts/setupNeo4j.sh b/scripts/setupNeo4j.sh index 42cd2aed7..40f64a99b 100755 --- a/scripts/setupNeo4j.sh +++ b/scripts/setupNeo4j.sh @@ -66,29 +66,11 @@ fi # Download and extract Neo4j if [ ! -d "${NEO4J_INSTALLATION_DIRECTORY}" ] ; then - # Download Neo4j - if [ ! -f "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_INSTALLATION_NAME}-unix.tar.gz" ] ; then - echo "setupNeo4j: Downloading ${NEO4J_INSTALLATION_NAME}" - - # Download Neo4j - # With the option "-L" a redirection will be followed automatically - curl -L --fail-with-body -o "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_INSTALLATION_NAME}-unix.tar.gz" "https://dist.neo4j.org/${NEO4J_INSTALLATION_NAME}-unix.tar.gz" || exit 1 - else - echo "setupNeo4j: ${NEO4J_INSTALLATION_NAME} already downloaded" - fi - - downloaded_neo4j_archive="${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_INSTALLATION_NAME}-unix.tar.gz" - - # Check downloaded file size to be at least 100 bytes - downloaded_file_size=$(wc -c "${downloaded_neo4j_archive}" | awk '{print $1}') - if [[ "$downloaded_file_size" -le 100 ]]; then - echo "setupNeo4j: Error: Failed to download ${NEO4J_INSTALLATION_NAME}: Invalid Filesize." - rm -f "${downloaded_neo4j_archive}" - exit 1 - fi + neo4jDownloadArchiveFileName="${NEO4J_INSTALLATION_NAME}-unix.tar.gz" + source ${SCRIPTS_DIR}/download.sh --url "https://dist.neo4j.org/${neo4jDownloadArchiveFileName}" || exit 1 # Extract the tar file - tar -xf "${downloaded_neo4j_archive}" --directory "${TOOLS_DIRECTORY}" + tar -xf "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jDownloadArchiveFileName}" --directory "${TOOLS_DIRECTORY}" || exit 1 # Fail if Neo4j hadn't been downloaded successfully if [ ! -d "${NEO4J_INSTALLATION_DIRECTORY}" ] ; then @@ -157,22 +139,7 @@ fi # Download and Install the Neo4j Plugin "Awesome Procedures for Neo4j" (APOC) if [ ! -f "${NEO4J_PLUGINS}/${NEO4J_APOC_PLUGIN_ARTIFACT}" ] ; then - # Download the Neo4j Plugin "Awesome Procedures for Neo4j" - if [ ! -f "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_APOC_PLUGIN_ARTIFACT}" ] ; then - # Download the Neo4j Plugin "Awesome Procedures for Neo4j" - echo "setupNeo4j: Downloading ${NEO4J_APOC_PLUGIN_ARTIFACT}" - curl -L --fail-with-body -o "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_APOC_PLUGIN_ARTIFACT}" https://github.com/${NEO4J_APOC_PLUGIN_GITHUB}/releases/download/${NEO4J_APOC_PLUGIN_VERSION}/apoc-${NEO4J_APOC_PLUGIN_VERSION}-${NEO4J_APOC_PLUGIN_EDITION}.jar || exit 1 - else - echo "setupNeo4j: ${NEO4J_APOC_PLUGIN_ARTIFACT} already downloaded" - fi - - # Check downloaded file size to be at least 100 bytes - downloaded_file_size=$(wc -c "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_APOC_PLUGIN_ARTIFACT}" | awk '{print $1}') - if [[ "$downloaded_file_size" -le 100 ]]; then - echo "setupNeo4j: Error: Failed to download ${NEO4J_APOC_PLUGIN_ARTIFACT}: Invalid Filesize." - rm -f "${SHARED_DOWNLOADS_DIRECTORY}/${NEO4J_APOC_PLUGIN_ARTIFACT}" - exit 1 - fi + source ${SCRIPTS_DIR}/download.sh --url "https://github.com/${NEO4J_APOC_PLUGIN_GITHUB}/releases/download/${NEO4J_APOC_PLUGIN_VERSION}/${NEO4J_APOC_PLUGIN_ARTIFACT}" || exit 1 # Uninstall previously installed Neo4j Plugin "Awesome Procedures for Neo4j" (APOC) rm -f "${NEO4J_PLUGINS}/apoc*.jar" @@ -183,7 +150,7 @@ if [ ! -f "${NEO4J_PLUGINS}/${NEO4J_APOC_PLUGIN_ARTIFACT}" ] ; then # Fail if Neo4j Plugin "Awesome Procedures for Neo4j" (APOC) hadn't been downloaded successfully if [ ! -f "${NEO4J_PLUGINS}/${NEO4J_APOC_PLUGIN_ARTIFACT}" ] ; then - echo "setupNeo4j: Error: Failed to download and install ${NEO4J_APOC_PLUGIN_ARTIFACT}" + echo "setupNeo4j: Error: Failed to install ${NEO4J_APOC_PLUGIN_ARTIFACT}" exit 1 fi @@ -208,36 +175,18 @@ if [[ ${NEO4J_GDS_PLUGIN_EDITION} == "open" ]]; then else neo4jGraphDataScienceNeo4jVersion="4.4.23" fi - neo4jGraphDataScienceReleaseArtifactPrefix="open-graph-data-science" - neo4jGraphDataScienceReleaseArtifact="${neo4jGraphDataScienceReleaseArtifactPrefix}-${NEO4J_OPEN_GDS_PLUGIN_VERSION}-for-neo4j-${neo4jGraphDataScienceNeo4jVersion}.jar" + neo4jGraphDataScienceReleaseArtifact="open-graph-data-science-${NEO4J_OPEN_GDS_PLUGIN_VERSION}-for-neo4j-${neo4jGraphDataScienceNeo4jVersion}.jar" else neo4jGraphDataScienceDownloadUrl="https://github.com/neo4j/graph-data-science/releases/download/${NEO4J_GDS_PLUGIN_VERSION}" - neo4jGraphDataScienceReleaseArtifactPrefix="neo4j-graph-data-science" - neo4jGraphDataScienceReleaseArtifact="${neo4jGraphDataScienceReleaseArtifactPrefix}-${NEO4J_GDS_PLUGIN_VERSION}-${NEO4J_GDS_PLUGIN_EDITION}.jar" + neo4jGraphDataScienceReleaseArtifact="neo4j-graph-data-science-${NEO4J_GDS_PLUGIN_VERSION}-${NEO4J_GDS_PLUGIN_EDITION}.jar" fi if [ ! -f "${NEO4J_PLUGINS}/${neo4jGraphDataScienceReleaseArtifact}" ] ; then # Download the Neo4j Plugin "Graph Data Science" (GDS) - #source ${SCRIPTS_DIR}/download.sh --url "${neo4jGraphDataScienceDownloadUrl}/${neo4jGraphDataScienceReleaseArtifact}" - - # Download the Neo4j Plugin "Graph Data Science" (GDS) - if [ ! -f "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jGraphDataScienceReleaseArtifact}" ] ; then - echo "setupNeo4j: Downloading ${neo4jGraphDataScienceReleaseArtifact} from ${neo4jGraphDataScienceDownloadUrl}/${neo4jGraphDataScienceReleaseArtifact}" - curl -L --fail-with-body -o "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jGraphDataScienceReleaseArtifact}" ${neo4jGraphDataScienceDownloadUrl}/${neo4jGraphDataScienceReleaseArtifact} || exit 1 - else - echo "setupNeo4j: ${neo4jGraphDataScienceReleaseArtifact} already downloaded" - fi - - # Check downloaded file size to be at least 100 bytes - downloaded_file_size=$(wc -c "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jGraphDataScienceReleaseArtifact}" | awk '{print $1}') - if [[ "$downloaded_file_size" -le 100 ]]; then - echo "setupNeo4j: Error: Failed to download ${neo4jGraphDataScienceReleaseArtifact}. Invalid Filesize." - rm -f "${SHARED_DOWNLOADS_DIRECTORY}/${neo4jGraphDataScienceReleaseArtifact}" - exit 1 - fi + source ${SCRIPTS_DIR}/download.sh --url "${neo4jGraphDataScienceDownloadUrl}/${neo4jGraphDataScienceReleaseArtifact}" || exit 1 # Uninstall previously installed Neo4j Plugin "Graph Data Science" (GDS) - rm -f "${NEO4J_PLUGINS}/${neo4jGraphDataScienceReleaseArtifactPrefix}*.jar" + rm -f "${NEO4J_PLUGINS}/*graph-data-science*.jar" # Install the Neo4j Plugin "Graph Data Science" (GDS) echo "setupNeo4j: Installing ${neo4jGraphDataScienceReleaseArtifact}" From 518362987263e85a719ab61673dad5f0c06ab67b Mon Sep 17 00:00:00 2001 From: JohT Date: Mon, 17 Jul 2023 17:33:50 +0200 Subject: [PATCH 7/7] Add comment for dependencies to other scripts --- scripts/analysis/analyze.sh | 2 ++ scripts/copyReportsIntoResults.sh | 2 ++ scripts/downloader/downloadAxonFramework.sh | 2 ++ scripts/executeQueryFunctions.sh | 2 ++ scripts/prepareAnalysis.sh | 2 ++ scripts/reports/CentralityCsv.sh | 2 ++ scripts/reports/CommunityCsv.sh | 2 ++ scripts/reports/ExternalDependenciesCsv.sh | 2 ++ scripts/reports/ExternalDependenciesJupyter.sh | 2 ++ scripts/reports/InternalDependenciesCsv.sh | 2 ++ scripts/reports/InternalDependenciesJupyter.sh | 2 ++ scripts/reports/ObjectOrientedDesignMetricsCsv.sh | 2 ++ scripts/reports/ObjectOrientedDesignMetricsJupyter.sh | 2 ++ scripts/reports/OverviewCsv.sh | 2 ++ scripts/reports/OverviewJupyter.sh | 2 ++ scripts/reports/SimilarityCsv.sh | 2 ++ scripts/reports/VisibilityMetricsCsv.sh | 2 ++ scripts/reports/VisibilityMetricsJupyter.sh | 2 ++ scripts/reports/WordcloudJupyter.sh | 2 ++ scripts/reports/compilations/AllReports.sh | 2 ++ scripts/reports/compilations/CsvReports.sh | 2 ++ scripts/reports/compilations/DatabaseCsvExportReports.sh | 2 ++ scripts/reports/compilations/JupyterReports.sh | 2 ++ scripts/resetAndScanChanged.sh | 2 ++ scripts/setupNeo4j.sh | 2 ++ scripts/startNeo4j.sh | 2 ++ scripts/waitForNeo4jHttp.sh | 4 +++- 27 files changed, 55 insertions(+), 1 deletion(-) diff --git a/scripts/analysis/analyze.sh b/scripts/analysis/analyze.sh index 4f50c56d0..3d7bf1987 100755 --- a/scripts/analysis/analyze.sh +++ b/scripts/analysis/analyze.sh @@ -28,6 +28,8 @@ # when it comes to subsequent executions. # Existing downloads, installations, scans and processes will be detected. +# Requires setupNeo4j.sh,setupJQAssistant.sh,startNeo4j.sh,resetAndScanChanged.sh,prepareAnalysis.sh,stopNeo4j.sh,comilations/*.sh,profiles/*.sh + # Overrideable variables with directory names REPORTS_SCRIPTS_DIRECTORY=${REPORTS_SCRIPTS_DIRECTORY:-"reports"} REPORT_COMPILATIONS_SCRIPTS_DIRECTORY=${REPORT_COMPILATIONS_SCRIPTS_DIRECTORY:-"compilations"} diff --git a/scripts/copyReportsIntoResults.sh b/scripts/copyReportsIntoResults.sh index 3bb5509e6..b56789de8 100755 --- a/scripts/copyReportsIntoResults.sh +++ b/scripts/copyReportsIntoResults.sh @@ -6,6 +6,8 @@ # Notice that this scripts needs to be executed within the "temp" directory. +# Requires generateMarkdownReference.sh + ## Get this "scripts" directory if not already set # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. diff --git a/scripts/downloader/downloadAxonFramework.sh b/scripts/downloader/downloadAxonFramework.sh index 209864a62..22be1cad8 100755 --- a/scripts/downloader/downloadAxonFramework.sh +++ b/scripts/downloader/downloadAxonFramework.sh @@ -8,6 +8,8 @@ # Note: This script is meant to be started within the temporary analysis directory (e.g. "temp/AnalysisName/") +# Requires downloadMavenArtifact.sh + # Get the analysis name from the middle part of the current file name (without prefix "download" and without extension) SCRIPT_FILE_NAME="$(basename -- "${BASH_SOURCE[0]}")" SCRIPT_FILE_NAME_WITHOUT_EXTENSION="${SCRIPT_FILE_NAME%%.*}" diff --git a/scripts/executeQueryFunctions.sh b/scripts/executeQueryFunctions.sh index 472385634..26c09279e 100644 --- a/scripts/executeQueryFunctions.sh +++ b/scripts/executeQueryFunctions.sh @@ -2,6 +2,8 @@ # Provides functions to execute Cypher queries using either "executeQuery.sh" or Neo4j's "cypher-shell". +# Requires executeQuery.sh + ## Get this "scripts" directory if not already set # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. diff --git a/scripts/prepareAnalysis.sh b/scripts/prepareAnalysis.sh index 5925b65a9..fbe999375 100644 --- a/scripts/prepareAnalysis.sh +++ b/scripts/prepareAnalysis.sh @@ -2,6 +2,8 @@ # Prepares and validates the graph database before analysis +# Requires executeQueryFunctions.sh + ## Get this "scripts" directory if not already set # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. diff --git a/scripts/reports/CentralityCsv.sh b/scripts/reports/CentralityCsv.sh index 5a2a1d1d8..9da6a4495 100755 --- a/scripts/reports/CentralityCsv.sh +++ b/scripts/reports/CentralityCsv.sh @@ -5,6 +5,8 @@ # The reports (csv files) will be written into the sub directory reports/community. # Note that "scripts/prepareAnalysis.sh" is required to run prior to this script. +# Requires executeQueryFunctions.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/CommunityCsv.sh b/scripts/reports/CommunityCsv.sh index bb44a3da8..a47b60b6b 100755 --- a/scripts/reports/CommunityCsv.sh +++ b/scripts/reports/CommunityCsv.sh @@ -5,6 +5,8 @@ # The reports (csv files) will be written into the sub directory reports/community. # Note that "scripts/prepareAnalysis.sh" is required to run prior to this script. +# Requires executeQueryFunctions.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/ExternalDependenciesCsv.sh b/scripts/reports/ExternalDependenciesCsv.sh index a77eb1ff7..71a8f43ed 100755 --- a/scripts/reports/ExternalDependenciesCsv.sh +++ b/scripts/reports/ExternalDependenciesCsv.sh @@ -3,6 +3,8 @@ # Executes "Package_Usage" Cypher queries to get the "external-dependencies-csv" CSV reports. # They list external library package usage like how often a external package is called. +# Requires executeQueryFunctions.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/ExternalDependenciesJupyter.sh b/scripts/reports/ExternalDependenciesJupyter.sh index e6d290efa..1204d1ab2 100755 --- a/scripts/reports/ExternalDependenciesJupyter.sh +++ b/scripts/reports/ExternalDependenciesJupyter.sh @@ -5,6 +5,8 @@ # how they relate to each other, distribution of Methods and their effective lines of code # and how the cyclomatic complexity is distributed across all Methods per artifact. +# Requires executeJupyterNotebook.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/InternalDependenciesCsv.sh b/scripts/reports/InternalDependenciesCsv.sh index 23af6afbe..a49c44ebf 100755 --- a/scripts/reports/InternalDependenciesCsv.sh +++ b/scripts/reports/InternalDependenciesCsv.sh @@ -4,6 +4,8 @@ # It contains lists of e.g. incoming and outgoing package dependencies, # abstractness, instability and the distance to the so called "main sequence". +# Requires executeQueryFunctions.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/InternalDependenciesJupyter.sh b/scripts/reports/InternalDependenciesJupyter.sh index 4eae5802f..71b9ea125 100755 --- a/scripts/reports/InternalDependenciesJupyter.sh +++ b/scripts/reports/InternalDependenciesJupyter.sh @@ -4,6 +4,8 @@ # It contains lists of e.g. cyclic dependencies, dependencies that are only used by a few packages, # classes that are used by many different packages and some more. +# Requires executeJupyterNotebook.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/ObjectOrientedDesignMetricsCsv.sh b/scripts/reports/ObjectOrientedDesignMetricsCsv.sh index 7036f3514..aa37ca394 100755 --- a/scripts/reports/ObjectOrientedDesignMetricsCsv.sh +++ b/scripts/reports/ObjectOrientedDesignMetricsCsv.sh @@ -4,6 +4,8 @@ # It contains lists of e.g. incoming and outgoing package dependencies, # abstractness, instability and the distance to the so called "main sequence". +# Requires executeQueryFunctions.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/ObjectOrientedDesignMetricsJupyter.sh b/scripts/reports/ObjectOrientedDesignMetricsJupyter.sh index 013bcca44..23bd77ce4 100755 --- a/scripts/reports/ObjectOrientedDesignMetricsJupyter.sh +++ b/scripts/reports/ObjectOrientedDesignMetricsJupyter.sh @@ -4,6 +4,8 @@ # It contains lists of e.g. incoming and outgoing package dependencies, # abstractness, instability and the distance to the so called "main sequence". +# Requires executeJupyterNotebook.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/OverviewCsv.sh b/scripts/reports/OverviewCsv.sh index 855541cc8..ce96f4045 100755 --- a/scripts/reports/OverviewCsv.sh +++ b/scripts/reports/OverviewCsv.sh @@ -3,6 +3,8 @@ # Executes "Overview" Cypher queries to get the "overview-csv" CSV reports. # It contains the numbers of packages, types, methods, cyclic complexity, etc. +# Requires executeQueryFunctions.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/OverviewJupyter.sh b/scripts/reports/OverviewJupyter.sh index a7cb1471e..9d0b59de0 100755 --- a/scripts/reports/OverviewJupyter.sh +++ b/scripts/reports/OverviewJupyter.sh @@ -5,6 +5,8 @@ # how they relate to each other, distribution of Methods and their effective lines of code # and how the cyclomatic complexity is distributed across all Methods per artifact. +# Requires executeJupyterNotebook.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/SimilarityCsv.sh b/scripts/reports/SimilarityCsv.sh index 12f7a8c8f..4fdf88c9e 100755 --- a/scripts/reports/SimilarityCsv.sh +++ b/scripts/reports/SimilarityCsv.sh @@ -5,6 +5,8 @@ # The reports (csv files) will be written into the sub directory reports/community. # Note that "scripts/prepareAnalysis.sh" is required to run prior to this script. +# Requires executeQueryFunctions.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/VisibilityMetricsCsv.sh b/scripts/reports/VisibilityMetricsCsv.sh index e51257b43..b96f83253 100755 --- a/scripts/reports/VisibilityMetricsCsv.sh +++ b/scripts/reports/VisibilityMetricsCsv.sh @@ -4,6 +4,8 @@ # It contains lists of packages with their relative visibility (public types divided by all types) # as well as the global statistics for every artifact. +# Requires executeQueryFunctions.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/VisibilityMetricsJupyter.sh b/scripts/reports/VisibilityMetricsJupyter.sh index 809ed3e74..c558a85f9 100755 --- a/scripts/reports/VisibilityMetricsJupyter.sh +++ b/scripts/reports/VisibilityMetricsJupyter.sh @@ -3,6 +3,8 @@ # Creates the "visibility-metrics" report (ipynb, md, pdf) based on the Jupyter Notebook "VisibilityMetrics.ipynb". # It contains lists of how many components are visible everywhere in comparison to all (including internal) components. +# Requires executeJupyterNotebook.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/WordcloudJupyter.sh b/scripts/reports/WordcloudJupyter.sh index 2b28fd88a..58bf4a712 100755 --- a/scripts/reports/WordcloudJupyter.sh +++ b/scripts/reports/WordcloudJupyter.sh @@ -5,6 +5,8 @@ # how they relate to each other, distribution of Methods and their effective lines of code # and how the cyclomatic complexity is distributed across all Methods per artifact. +# Requires executeJupyterNotebook.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/compilations/AllReports.sh b/scripts/reports/compilations/AllReports.sh index f4e13f087..398b8bf72 100755 --- a/scripts/reports/compilations/AllReports.sh +++ b/scripts/reports/compilations/AllReports.sh @@ -3,6 +3,8 @@ # Runs all report scripts. # It only consideres scripts in the "reports" directory (overridable with REPORTS_SCRIPT_DIR) one directory above this one. +# Requires reports/*.sh + ## Get this "scripts/reports/compilations" directory if not already set. # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. diff --git a/scripts/reports/compilations/CsvReports.sh b/scripts/reports/compilations/CsvReports.sh index 17983f846..f66882bfe 100755 --- a/scripts/reports/compilations/CsvReports.sh +++ b/scripts/reports/compilations/CsvReports.sh @@ -3,6 +3,8 @@ # Runs all CSV report scripts (no Python and Chromium required). # It only consideres scripts in the "reports" directory (overridable with REPORTS_SCRIPT_DIR) one directory above this one. +# Requires reports/*.sh + ## Get this "scripts/reports/compilations" directory if not already set. # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. diff --git a/scripts/reports/compilations/DatabaseCsvExportReports.sh b/scripts/reports/compilations/DatabaseCsvExportReports.sh index e6754f623..2c6fe171f 100755 --- a/scripts/reports/compilations/DatabaseCsvExportReports.sh +++ b/scripts/reports/compilations/DatabaseCsvExportReports.sh @@ -7,6 +7,8 @@ # The reason for that is that it exports the whole graph database. This should only be done intentionally # and not within a default "AllReports.sh" run because it is performance intense and could raise security concerns. +# Requires repexecuteQueryFunctions.sh + # Overrideable Constants (defaults also defined in sub scripts) REPORTS_DIRECTORY=${REPORTS_DIRECTORY:-"reports"} diff --git a/scripts/reports/compilations/JupyterReports.sh b/scripts/reports/compilations/JupyterReports.sh index cc778195c..7c5dc9cec 100755 --- a/scripts/reports/compilations/JupyterReports.sh +++ b/scripts/reports/compilations/JupyterReports.sh @@ -6,6 +6,8 @@ # For PDF generation chromium is required additionally. # Therefore these reports will take longer and require more ressources than just plain database queries/procedures. +# Requires reports/*.sh + ## Get this "scripts/reports/compilations" directory if not already set. # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. diff --git a/scripts/resetAndScanChanged.sh b/scripts/resetAndScanChanged.sh index 33056be73..3377d65b1 100755 --- a/scripts/resetAndScanChanged.sh +++ b/scripts/resetAndScanChanged.sh @@ -4,6 +4,8 @@ # Note: "resetAndScan" expects jQAssistant to be installed in the "tools" directory. +# Requires resetAndScan.sh + ## Get this "scripts" directory if not already set # Even if $BASH_SOURCE is made for Bourne-like shells it is also supported by others and therefore here the preferred solution. # CDPATH reduces the scope of the cd command to potentially prevent unintended directory changes. diff --git a/scripts/setupNeo4j.sh b/scripts/setupNeo4j.sh index 40f64a99b..5c26e61b2 100755 --- a/scripts/setupNeo4j.sh +++ b/scripts/setupNeo4j.sh @@ -4,6 +4,8 @@ # Note: The environment variable NEO4J_INITIAL_PASSWORD needs to be set. +# Requires download.sh,setupNeo4jInitialPassword.sh + NEO4J_EDITION=${NEO4J_EDITION:-"community"} # Choose "community" or "enterprise" NEO4J_VERSION=${NEO4J_VERSION:-"5.9.0"} NEO4J_APOC_PLUGIN_VERSION=${NEO4J_APOC_PLUGIN_VERSION:-"5.10.1"} #Awesome Procedures for Neo4j Plugin, Version needs to be compatible to Neo4j diff --git a/scripts/startNeo4j.sh b/scripts/startNeo4j.sh index 469d508a4..f7bfd5253 100755 --- a/scripts/startNeo4j.sh +++ b/scripts/startNeo4j.sh @@ -5,6 +5,8 @@ # Note: Does nothing if the database is already running. # Note: It requires Neo4j to be installed in the TOOLS_DIRECTORY. +# Requires waitForNeo4jHttp.sh + NEO4J_EDITION=${NEO4J_EDITION:-"community"} # Choose "community" or "enterprise" NEO4J_VERSION=${NEO4J_VERSION:-"5.9.0"} TOOLS_DIRECTORY=${TOOLS_DIRECTORY:-"tools"} # Get the tools directory (defaults to "tools") diff --git a/scripts/waitForNeo4jHttp.sh b/scripts/waitForNeo4jHttp.sh index ddf3df935..62c358085 100755 --- a/scripts/waitForNeo4jHttp.sh +++ b/scripts/waitForNeo4jHttp.sh @@ -3,6 +3,8 @@ # Waits until the HTTP Transactions API of Neo4j Graph Database is available. # It queries the number of nodes and relationships to assert the connection. +# Requires executeQueryFunctions.sh + NEO4J_HTTP_PORT=${NEO4J_HTTP_PORT:-"7474"} ## Get this "scripts" directory if not already set @@ -28,7 +30,7 @@ echo "${WAIT_TIMES}" | tr ' ' '\n' | while read -r waitTime; do echo "waitForNeo4jHttp: Waiting for ${waitTime} second(s)" sleep "${waitTime}" - # Calls the Neo4j HTTP API using cURL ( https://curl.se ) + # Queries node and relationship count as a basic validation if ! cyper_elements_query_result=$(execute_cypher "${CYPHER_DIR}/Count_nodes_and_relationships.cypher"); then continue; # query failed -> try again