diff --git a/.gitignore b/.gitignore index 00557a628..f91f8dbbf 100644 --- a/.gitignore +++ b/.gitignore @@ -24,3 +24,5 @@ local_tests/serverless-init/datadog-agent local_tests/serverless-init/logs.txt bottlecap/target bottlecap/proptest-regressions + +.gitlab/pipeline-** diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b59bcd75f..41a420938 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,134 +1,82 @@ variables: - GIT_DEPTH: 1 - REGION_TO_DEPLOY: - description: "use sa-east-1 for dev, us-east-1 for RC, all for all regions" - value: sa-east-1 + DOCKER_TARGET_IMAGE: registry.ddbuild.io/ci/datadog-lambda-extension + DOCKER_TARGET_VERSION: latest + # Manual trigger variables AGENT_BRANCH: - description: "datadog-agent branch you want to release" + description: "Branch of the datadog-agent repository to use." value: main LAYER_SUFFIX: - description: "Suffix to be appended to the layer name (default empty)" + description: "Suffix to be appended to the layer name (default empty)." value: "" -image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/docker:20.10-py3 - stages: - - build_tools_if_needed - - build_layer - - prepare_multi_region - - trigger + - generate + - build -build_tools: - stage: build_tools_if_needed - variables: - CI_ENABLE_CONTAINER_IMAGE_BUILDS: "true" - TARGET: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-lambda-extension +ci image: + stage: build + image: registry.ddbuild.io/images/docker:20.10 + tags: ["arch:arm64"] rules: - - if: $CI_PIPELINE_SOURCE == "web" - when: never - - changes: - - build-tools/**/* - tags: ["runner:docker"] - script: - - cd build-tools && docker buildx build --tag ${TARGET} --push . - -build_and_deploy_layer: - stage: build_layer - rules: - - if: $CI_PIPELINE_SOURCE == "web" + - if: '$CI_COMMIT_BRANCH == "main" && $CI_PIPELINE_SOURCE == "push"' + changes: + - .gitlab/Dockerfile + when: on_success variables: - CI_ENABLE_CONTAINER_IMAGE_BUILDS: "true" - ROLE_TO_ASSUME: arn:aws:iam::425362996713:role/sandbox-layer-deployer - TARGET: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-lambda-extension - tags: ["runner:docker"] - artifacts: - paths: - - tmp/serverless/datadog_extension_signed.zip + DOCKER_TARGET: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} script: - - mkdir tmp - - git clone --branch ${AGENT_BRANCH} --depth=1 https://github.com/DataDog/datadog-agent.git - - dockerId=$(docker create --platform linux/amd64 ${TARGET}) - - docker cp $dockerId:/build_tools . - - EXTERNAL_ID=$(aws ssm get-parameter - --region us-east-1 - --name ci.datadog-lambda-extension.externalid - --with-decryption - --query "Parameter.Value" - --out text) - - # build - - ./build_tools - build - --version 1 - --agent-version 1 - --architecture amd64 - --context-path . - --destination-path tmp/serverless - --docker-path "scripts_v2/Dockerfile.build" - --artifact-name "datadog_extension.zip" + - docker buildx build --platform linux/amd64,linux/arm64 --no-cache --pull --push --tag ${DOCKER_TARGET} -f .gitlab/Dockerfile . - # sign - - ./build_tools - sign - --layer-path tmp/serverless/datadog_extension.zip - --destination-path tmp/serverless/datadog_extension_signed.zip - --assume-role "$ROLE_TO_ASSUME" - --external-id "$EXTERNAL_ID" +.go-cache: &go-cache + key: datadog-lambda-extension-go-cache + policy: pull - # ls artifacts - - ls tmp/serverless - - # deploy to single region if needed - - if [ "${REGION_TO_DEPLOY}" = "all" ]; then exit 0; fi - - ./build_tools - deploy - --layer-path tmp/serverless/datadog_extension_signed.zip - --architecture amd64 - --layer-name "Datadog-Extension" - --layer-suffix "$LAYER_SUFFIX" - --region "$REGION_TO_DEPLOY" - --assume-role "$ROLE_TO_ASSUME" - --external-id "$EXTERNAL_ID" - -prepare_multi_region: - stage: prepare_multi_region - tags: ["runner:docker"] +generator: + stage: generate + image: registry.ddbuild.io/images/mirror/golang:alpine + tags: ["arch:amd64"] + cache: *go-cache + script: + - apk add --no-cache gomplate + - gomplate --config .gitlab/config.yaml artifacts: paths: - - trigger_region.yaml - - tmp/serverless/datadog_extension_signed.zip + - .gitlab/pipeline-bottlecap.yaml + - .gitlab/pipeline-go-agent.yaml + - .gitlab/pipeline-lambda-extension.yaml + +bottlecap-only: + stage: build + trigger: + include: + - artifact: .gitlab/pipeline-bottlecap.yaml + job: generator + strategy: depend + rules: + - when: on_success + +go-agent-only: + stage: build + trigger: + include: + - artifact: .gitlab/pipeline-go-agent.yaml + job: generator + strategy: depend rules: - - if: $REGION_TO_DEPLOY != "all" - when: never - if: $CI_PIPELINE_SOURCE == "web" - - if: $CI_PIPELINE_SOURCE == "external" - - if: $CI_PIPELINE_SOURCE == "trigger" - - if: $CI_PIPELINE_SOURCE == "pipeline" - - if: $CI_PIPELINE_SOURCE == "parent_pipeline" variables: - TARGET: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-lambda-extension - ROLE_TO_ASSUME: arn:aws:iam::425362996713:role/sandbox-layer-deployer - script: - - echo $CI_PIPELINE_SOURCE - - if [ "${REGION_TO_DEPLOY}" != "all" ]; then exit 0; fi - - EXTERNAL_ID=$(aws ssm get-parameter - --region us-east-1 - --name ci.datadog-lambda-extension.externalid - --with-decryption - --query "Parameter.Value" - --out text) - - dockerId=$(docker create --platform linux/amd64 ${TARGET}) - - docker cp $dockerId:/build_tools . - - regions=$(./build_tools list_region --assume-role "$ROLE_TO_ASSUME" --external-id "$EXTERNAL_ID") - - sed "s/xxx_layer_sufix_xxx/${LAYER_SUFFIX}/" trigger_region.orig.yaml > trigger_region.tmp.yaml - - sed "s/xxx_aws_regions_xxx/${regions}/" trigger_region.tmp.yaml > trigger_region.yaml - - cat trigger_region.yaml + AGENT_BRANCH: $AGENT_BRANCH + LAYER_SUFFIX: $LAYER_SUFFIX -multi_region: - rules: - - if: $REGION_TO_DEPLOY == "all" - stage: trigger +lambda-extension: + stage: build trigger: include: - - artifact: trigger_region.yaml - job: prepare_multi_region \ No newline at end of file + - artifact: .gitlab/pipeline-lambda-extension.yaml + job: generator + strategy: depend + rules: + - if: $CI_PIPELINE_SOURCE == "web" + variables: + AGENT_BRANCH: $AGENT_BRANCH + LAYER_SUFFIX: $LAYER_SUFFIX diff --git a/.gitlab/Dockerfile b/.gitlab/Dockerfile new file mode 100644 index 000000000..d33cb7c69 --- /dev/null +++ b/.gitlab/Dockerfile @@ -0,0 +1,23 @@ +FROM registry.ddbuild.io/images/docker:24.0.5 + +RUN apt-get update && apt-get install -y --fix-missing --no-install-recommends \ + curl gcc gnupg g++ make cmake unzip openssl g++ uuid-runtime + +# Install AWS CLI +RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" +RUN unzip awscliv2.zip && ./aws/install + +# Install Protocol Buffers compiler by hand +COPY ./scripts/install-protoc.sh / +RUN chmod +x /install-protoc.sh && /install-protoc.sh + +# Install Rust +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | \ + sh -s -- --profile minimal --default-toolchain nightly -y + +RUN source $HOME/.cargo/env +ENV PATH /root/.cargo/bin/:$PATH + +RUN rustup component add rust-src --toolchain nightly + + diff --git a/.gitlab/config.yaml b/.gitlab/config.yaml new file mode 100644 index 000000000..3e3899556 --- /dev/null +++ b/.gitlab/config.yaml @@ -0,0 +1,21 @@ +# gomplate template generation pipeline + +inputFiles: + - .gitlab/templates/bottlecap.yaml.tpl + - .gitlab/templates/go-agent.yaml.tpl + - .gitlab/templates/lambda-extension.yaml.tpl + +outputFiles: + - .gitlab/pipeline-bottlecap.yaml + - .gitlab/pipeline-go-agent.yaml + - .gitlab/pipeline-lambda-extension.yaml + +datasources: + architectures: + url: .gitlab/datasources/architectures.yaml + + environments: + url: .gitlab/datasources/environments.yaml + + regions: + url: .gitlab/datasources/regions.yaml diff --git a/.gitlab/datasources/architectures.yaml b/.gitlab/datasources/architectures.yaml new file mode 100644 index 000000000..4b9b1a60b --- /dev/null +++ b/.gitlab/datasources/architectures.yaml @@ -0,0 +1,3 @@ +architectures: + - name: amd64 + - name: arm64 diff --git a/.gitlab/datasources/environments.yaml b/.gitlab/datasources/environments.yaml new file mode 100644 index 000000000..90056ab00 --- /dev/null +++ b/.gitlab/datasources/environments.yaml @@ -0,0 +1,9 @@ +environments: + - name: sandbox + external_id: sandbox-publish-externalid + role_to_assume: sandbox-layer-deployer + account: 425362996713 + - name: prod + external_id: prod-publish-externalid + role_to_assume: dd-serverless-layer-deployer-role + account: 464622532012 diff --git a/.gitlab/datasources/regions.yaml b/.gitlab/datasources/regions.yaml new file mode 100644 index 000000000..9f12a0118 --- /dev/null +++ b/.gitlab/datasources/regions.yaml @@ -0,0 +1,30 @@ +regions: + - code: "us-east-1" + - code: "us-east-2" + - code: "us-west-1" + - code: "us-west-2" + - code: "af-south-1" + - code: "ap-east-1" + - code: "ap-south-1" + - code: "ap-south-2" + - code: "ap-southeast-1" + - code: "ap-southeast-2" + - code: "ap-southeast-3" + - code: "ap-southeast-4" + - code: "ap-northeast-1" + - code: "ap-northeast-2" + - code: "ap-northeast-3" + - code: "ca-central-1" + - code: "ca-west-1" + - code: "eu-central-1" + - code: "eu-central-2" + - code: "eu-north-1" + - code: "eu-west-1" + - code: "eu-west-2" + - code: "eu-west-3" + - code: "eu-south-1" + - code: "eu-south-2" + - code: "il-central-1" + - code: "me-south-1" + - code: "me-central-1" + - code: "sa-east-1" diff --git a/.gitlab/scripts/build_bottlecap.sh b/.gitlab/scripts/build_bottlecap.sh new file mode 100755 index 000000000..bb774e665 --- /dev/null +++ b/.gitlab/scripts/build_bottlecap.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2024 Datadog, Inc. + +set -e + +if [ -z "$ARCHITECTURE" ]; then + printf "[ERROR]: ARCHITECTURE not specified\n" + exit 1 +fi + +if [ -z "$ALPINE" ]; then + printf "Building bottlecap" +else + echo "Building bottlecap for alpine" + BUILD_SUFFIX="-alpine" +fi + +prepare_folders() { + # Move into the root directory, so this script can be called from any directory + SCRIPTS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" + ROOT_DIR=$SCRIPTS_DIR/../.. + cd $ROOT_DIR + + echo $ROOT_DIR + + EXTENSION_DIR=".layers" + TARGET_DIR=$(pwd)/$EXTENSION_DIR + + rm -rf $EXTENSION_DIR/datadog_bottlecap-${ARCHITECTURE}${BUILD_SUFFIX} 2>/dev/null + rm -rf $EXTENSION_DIR/datadog_bottlecap-${ARCHITECTURE}${BUILD_SUFFIX}.zip 2>/dev/null + + cd $ROOT_DIR +} + + +docker_build() { + local arch=$1 + if [ "$arch" == "amd64" ]; then + PLATFORM="x86_64" + else + PLATFORM="aarch64" + fi + + docker buildx build --platform linux/${arch} \ + -t datadog/build-bottlecap-${arch} \ + -f ./scripts/Dockerfile.bottlecap.build \ + --build-arg PLATFORM=$PLATFORM \ + --build-arg GO_AGENT_PATH="datadog_extension-${arch}${BUILD_SUFFIX}" \ + . -o $TARGET_DIR/datadog_bottlecap-${arch}${BUILD_SUFFIX} + + cp $TARGET_DIR/datadog_bottlecap-${arch}${BUILD_SUFFIX}/datadog_extension.zip $TARGET_DIR/datadog_bottlecap-${arch}${BUILD_SUFFIX}.zip + + unzip $TARGET_DIR/datadog_bottlecap-${arch}${BUILD_SUFFIX}/datadog_extension.zip -d $TARGET_DIR/datadog_bottlecap-${arch}${BUILD_SUFFIX} + rm -rf $TARGET_DIR/datadog_bottlecap-${arch}${BUILD_SUFFIX}/datadog_extension.zip + rm -rf $TARGET_DIR/datadog_extension-${arch}${BUILD_SUFFIX} + rm -rf $TARGET_DIR/datadog_extension-${arch}${BUILD_SUFFIX}.zip +} + +prepare_folders +docker_build $ARCHITECTURE diff --git a/.gitlab/scripts/build_go_agent.sh b/.gitlab/scripts/build_go_agent.sh new file mode 100755 index 000000000..f3c9009c1 --- /dev/null +++ b/.gitlab/scripts/build_go_agent.sh @@ -0,0 +1,96 @@ +#!/bin/bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2024 Datadog, Inc. + +# Usage +# ARCHITECTURE=arm64 ./scripts/build_go_agent.sh + +set -e + +if [ -z "$ARCHITECTURE" ]; then + printf "[ERROR]: ARCHITECTURE not specified\n" + exit 1 +fi + + +if [ -z "$CI_COMMIT_TAG" ]; then + # Running on dev + printf "Running on dev environment\n" + VERSION="dev" +else + printf "Found version tag in environment\n" + VERSION=$(echo "${CI_COMMIT_TAG##*v}" | cut -d. -f2) +fi + +if [ -z "$SERVERLESS_INIT" ]; then + echo "Building Datadog Lambda Extension" + CMD_PATH="cmd/serverless" +else + echo "Building Serverless Init" + CMD_PATH="cmd/serverless-init" +fi + + +if [ -z "$ALPINE" ]; then + BUILD_FILE=Dockerfile.build +else + echo "Building for alpine" + BUILD_FILE=Dockerfile.alpine.build + BUILD_SUFFIX="-alpine" +fi + +# Allow override build tags +if [ -z "$BUILD_TAGS" ]; then + BUILD_TAGS="serverless otlp" +fi + +# Allow override agent path +if [ -z "$AGENT_PATH" ]; then + AGENT_PATH="../datadog-agent" +fi + +MAIN_DIR=$(pwd) # datadog-lambda-extension + +EXTENSION_DIR=".layers" +TARGET_DIR=$MAIN_DIR/$EXTENSION_DIR + +# Make sure the folder does not exist +rm -rf $EXTENSION_DIR 2>/dev/null + +mkdir -p $EXTENSION_DIR + +# Prepare folder with only *mod and *sum files to enable Docker caching capabilities +mkdir -p $MAIN_DIR/scripts/.src $MAIN_DIR/scripts/.cache +echo "Copy mod files to build a cache" +cp $AGENT_PATH/go.mod $MAIN_DIR/scripts/.cache +cp $AGENT_PATH/go.sum $MAIN_DIR/scripts/.cache + +# Compress all files to speed up docker copy +touch $MAIN_DIR/scripts/.src/datadog-agent.tgz +cd $AGENT_PATH/.. +tar --exclude=.git -czf $MAIN_DIR/scripts/.src/datadog-agent.tgz datadog-agent +cd $MAIN_DIR + +function docker_build { + arch=$1 + file=$2 + + docker buildx build --platform linux/${arch} \ + -t datadog/build-go-agent-${arch}:${VERSION} \ + -f ${MAIN_DIR}/scripts/${file} \ + --build-arg EXTENSION_VERSION="${VERSION}" \ + --build-arg AGENT_VERSION="${AGENT_VERSION}" \ + --build-arg CMD_PATH="${CMD_PATH}" \ + --build-arg BUILD_TAGS="${BUILD_TAGS}" \ + . -o $TARGET_DIR/datadog_extension-${arch}${BUILD_SUFFIX} + + cp $TARGET_DIR/datadog_extension-${arch}${BUILD_SUFFIX}/datadog_extension.zip $TARGET_DIR/datadog_extension-${arch}${BUILD_SUFFIX}.zip + unzip $TARGET_DIR/datadog_extension-${arch}${BUILD_SUFFIX}/datadog_extension.zip -d $TARGET_DIR/datadog_extension-${arch}${BUILD_SUFFIX} + rm -rf $TARGET_DIR/datadog_extension-${arch}${BUILD_SUFFIX}/datadog_extension.zip +} + +docker_build $ARCHITECTURE $BUILD_FILE + diff --git a/.gitlab/scripts/check_layer_size.sh b/.gitlab/scripts/check_layer_size.sh new file mode 100755 index 000000000..d54b1daa4 --- /dev/null +++ b/.gitlab/scripts/check_layer_size.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2024 Datadog, Inc. + +# Compares layer size to threshold, and fails if below that threshold + +set -e + +if [ -z "$LAYER_FILE" ]; then + echo "[ERROR]: LAYER_FILE not specified" + exit 1 +fi + +MAX_LAYER_COMPRESSED_SIZE_KB=$(expr 17 \* 1024) # 17 MB, amd64 is 17, while arm64 is 15 +MAX_LAYER_UNCOMPRESSED_SIZE_KB=$(expr 46 \* 1024) # 46 MB, amd is 46, while arm64 is 45 + +LAYERS_DIR=".layers" + +FILE=$LAYERS_DIR/$LAYER_FILE +FILE_SIZE=$(stat --printf="%s" $FILE) +FILE_SIZE_KB="$(( ${FILE_SIZE%% *} / 1024))" +echo "Layer file ${FILE} has zipped size ${FILE_SIZE_KB} kb" +if [ "$FILE_SIZE_KB" -gt "$MAX_LAYER_COMPRESSED_SIZE_KB" ]; then + echo "Zipped size exceeded limit $MAX_LAYER_COMPRESSED_SIZE_KB kb" + exit 1 +fi +mkdir tmp +unzip -q $FILE -d tmp +UNZIPPED_FILE_SIZE=$(du -shb tmp/ | cut -f1) +UNZIPPED_FILE_SIZE_KB="$(( ${UNZIPPED_FILE_SIZE%% *} / 1024))" +rm -rf tmp +echo "Layer file ${FILE} has unzipped size ${UNZIPPED_FILE_SIZE_KB} kb" +if [ "$UNZIPPED_FILE_SIZE_KB" -gt "$MAX_LAYER_UNCOMPRESSED_SIZE_KB" ]; then + echo "Unzipped size exceeded limit $MAX_LAYER_UNCOMPRESSED_SIZE_KB kb" + exit 1 +fi diff --git a/.gitlab/scripts/get_secrets.sh b/.gitlab/scripts/get_secrets.sh new file mode 100755 index 000000000..dd019061a --- /dev/null +++ b/.gitlab/scripts/get_secrets.sh @@ -0,0 +1,48 @@ +#!/bin/bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2024 Datadog, Inc. + +set -e + +if [ -z "$EXTERNAL_ID_NAME" ]; then + printf "[Error] No EXTERNAL_ID_NAME found.\n" + printf "Exiting script...\n" + exit 1 +fi + +if [ -z "$ROLE_TO_ASSUME" ]; then + printf "[Error] No ROLE_TO_ASSUME found.\n" + printf "Exiting script...\n" + exit 1 +fi + +printf "Getting AWS External ID...\n" + +EXTERNAL_ID=$(aws ssm get-parameter \ + --region us-east-1 \ + --name "ci.datadog-lambda-extension.$EXTERNAL_ID_NAME" \ + --with-decryption \ + --query "Parameter.Value" \ + --out text) + +printf "Getting DD API KEY...\n" + +export DD_API_KEY=$(aws ssm get-parameter \ + --region us-east-1 \ + --name ci.datadog-lambda-extension.dd-api-key \ + --with-decryption \ + --query "Parameter.Value" \ + --out text) + +printf "Assuming role...\n" + +export $(printf "AWS_ACCESS_KEY_ID=%s AWS_SECRET_ACCESS_KEY=%s AWS_SESSION_TOKEN=%s" \ + $(aws sts assume-role \ + --role-arn "arn:aws:iam::$AWS_ACCOUNT:role/$ROLE_TO_ASSUME" \ + --role-session-name "ci.datadog-lambda-extension-$CI_JOB_ID-$CI_JOB_STAGE" \ + --query "Credentials.[AccessKeyId,SecretAccessKey,SessionToken]" \ + --external-id $EXTERNAL_ID \ + --output text)) diff --git a/.gitlab/scripts/publish_layers.sh b/.gitlab/scripts/publish_layers.sh new file mode 100755 index 000000000..336593d95 --- /dev/null +++ b/.gitlab/scripts/publish_layers.sh @@ -0,0 +1,150 @@ +#!/bin/bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2021 Datadog, Inc. + +set -e + +LAYER_DIR=".layers" +VALID_ACCOUNTS=("sandbox" "prod") + +publish_layer() { + region=$1 + layer=$2 + file=$3 + compatible_architectures=$4 + + version_nbr=$(aws lambda publish-layer-version --layer-name $layer \ + --description "Datadog Lambda Extension" \ + --compatible-architectures $compatible_architectures \ + --zip-file "fileb://${file}" \ + --region $region \ + | jq -r '.Version' + ) + + # Add permissions only for prod + if [ "$STAGE" == "prod" ]; then + permission=$(aws lambda add-layer-version-permission --layer-name $layer \ + --version-number $version_nbr \ + --statement-id "release-$version_nbr" \ + --action lambda:GetLayerVersion \ + --principal "*" \ + --region $region + ) + fi + + echo $version_nbr +} + + +if [ -z "$ARCHITECTURE" ]; then + printf "[ERROR]: ARCHITECTURE not specified." + exit 1 +fi + + +if [ -z "$LAYER_FILE" ]; then + printf "[ERROR]: LAYER_FILE not specified." + exit 1 +fi + +LAYER_PATH="${LAYER_DIR}/${LAYER_FILE}" +# Check that the layer files exist +if [ ! -f $LAYER_PATH ]; then + printf "[ERROR]: Could not find ${LAYER_PATH}." + exit 1 +fi + +if [ "$ARCHITECTURE" == "amd64" ]; then + LAYER_NAME="Datadog-Extension" +else + LAYER_NAME="Datadog-Extension-ARM" +fi + +if [ -z "$LAYER_NAME" ]; then + printf "[ERROR]: LAYER_NAME not specified." + exit 1 +fi + +AVAILABLE_REGIONS=$(aws ec2 describe-regions | jq -r '.[] | .[] | .RegionName') + +if [ -z "$REGION" ]; then + printf "[ERROR]: REGION not specified." + exit 1 +else + echo "Region specified: $REGION" + if [[ ! "$AVAILABLE_REGIONS" == *"$REGION"* ]]; then + printf "Could not find $REGION in available regions: $AVAILABLE_REGIONS" + exit 1 + fi +fi + +if [ -z "$STAGE" ]; then + printf "[ERROR]: STAGE not specified.\n" + exit 1 +fi + +printf "[$REGION] Starting publishing layers...\n" + +if [ -z "$LAYER_SUFFIX" ]; then + printf "[$REGION] Deploying layers without suffix\n" +else + printf "[$REGION] Deploying layers with specified suffix: ${LAYER_SUFFIX}\n" + LAYER_NAME="${LAYER_NAME}-${LAYER_SUFFIX}" +fi + +if [[ "$STAGE" =~ ^(staging|sandbox)$ ]]; then + # Deploy latest version + latest_version=$(aws lambda list-layer-versions --region $REGION --layer-name $LAYER_NAME --query 'LayerVersions[0].Version || `0`') + VERSION=$(($latest_version + 1)) +else + # Running on prod + if [ -z "$CI_COMMIT_TAG" ]; then + printf "[Error] No CI_COMMIT_TAG found.\n" + printf "Exiting script...\n" + exit 1 + else + printf "Tag found in environment: $CI_COMMIT_TAG\n" + fi + + VERSION=$(echo "${CI_COMMIT_TAG##*v}" | cut -d. -f2) +fi + +if [ -z "$VERSION" ]; then + printf "[ERROR]: Layer VERSION not specified" + exit 1 +else + echo "Layer version parsed: $VERSION" +fi + +# Compatible Architectures +if [ "$ARCHITECTURE" == "amd64" ]; then + architectures="x86_64" +else + architectures="arm64" +fi + +latest_version=$(aws lambda list-layer-versions --region $REGION --layer-name $LAYER_NAME --query 'LayerVersions[0].Version || `0`') +if [ $latest_version -ge $VERSION ]; then + printf "[$REGION] Layer $layer version $VERSION already exists in region $REGION, skipping...\n" + exit 1 +elif [ $latest_version -lt $((VERSION-1)) ]; then + printf "[$REGION][WARNING] The latest version of layer $layer in region $REGION is $latest_version, this will publish all the missing versions including $VERSION\n" +fi + +while [ $latest_version -lt $VERSION ]; do + latest_version=$(publish_layer $REGION $LAYER_NAME $LAYER_PATH $architectures) + printf "[$REGION] Published version $latest_version for layer $LAYER_NAME in region $REGION\n" + + # This shouldn't happen unless someone manually deleted the latest version, say 28, and + # then tries to republish 28 again. The published version would actually be 29, because + # Lambda layers are immutable and AWS will skip deleted version and use the next number. + if [ $latest_version -gt $VERSION ]; then + printf "[$REGION] Published version $latest_version is greater than the desired version $VERSION!" + exit 1 + fi +done + +printf "[$REGION] Finished publishing layers...\n" diff --git a/.gitlab/scripts/sign_layers.sh b/.gitlab/scripts/sign_layers.sh new file mode 100755 index 000000000..7792fbc72 --- /dev/null +++ b/.gitlab/scripts/sign_layers.sh @@ -0,0 +1,121 @@ +#!/bin/bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2024 Datadog, Inc. +# +# Usage +# +# LAYER_FILE= ./scripts/sign_layers.sh +# +# LAYER_FILE=datadog_extension-amd64.zip ./scripts/sign_layers.sh sandbox + +set -e + +LAYER_DIR=".layers" +SIGNING_PROFILE_NAME="DatadogLambdaSigningProfile" +VALID_ACCOUNTS=("sandbox" "prod") + +if [ -z "$LAYER_FILE" ]; then + echo "[ERROR]: $LAYER_FILE not specified." + exit 1 +fi + +# Check account parameter +if [ -z "$1" ]; then + echo "[ERROR]: Account parameter not specified." + exit 1 +fi + +if [[ ! "${VALID_ACCOUNTS[@]}" =~ $1 ]]; then + echo "[ERROR]: Account parameter is invalid. Not in `sandbox` or `prod`." + exit 1 +fi + +if [ "$1" = "sandbox" ]; then + REGION="sa-east-1" + S3_BUCKET_NAME="dd-lambda-signing-bucket-serverless-sandbox" +fi +if [ "$1" = "prod" ]; then + REGION="us-east-1" + S3_BUCKET_NAME="dd-lambda-signing-bucket" +fi + +echo "---" +echo "Signing layer for $LAYER_FILE" + +LAYER_LOCAL_PATH="${LAYER_DIR}/${LAYER_FILE}" + +# Upload the layer to S3 for signing +echo "---" +echo "Uploading layer to S3 for signing..." + +UUID=$(uuidgen) +S3_UNSIGNED_ZIP_KEY="${UUID}.zip" +S3_UNSIGNED_ZIP_URI="s3://${S3_BUCKET_NAME}/${S3_UNSIGNED_ZIP_KEY}" + +aws s3 cp $LAYER_LOCAL_PATH $S3_UNSIGNED_ZIP_URI + +# Start a signing job +echo "---" +echo "Starting the signing job..." +SIGNING_JOB_ID=$(aws signer start-signing-job \ + --source "s3={bucketName=${S3_BUCKET_NAME},key=${S3_UNSIGNED_ZIP_KEY},version=null}" \ + --destination "s3={bucketName=${S3_BUCKET_NAME}}" \ + --profile-name $SIGNING_PROFILE_NAME \ + --region $REGION \ + | jq -r '.jobId'\ +) + +# Wait for the signing job to complete +echo "---" +echo "Waiting for the signing job to complete..." +SECONDS_WAITED_SO_FAR=0 +while : +do + sleep 3 + SECONDS_WAITED_SO_FAR=$((SECONDS_WAITED_SO_FAR + 3)) + + SIGNING_JOB_DESCRIPTION=$(aws signer describe-signing-job \ + --job-id $SIGNING_JOB_ID \ + --region $REGION\ + ) + SIGNING_JOB_STATUS=$(echo $SIGNING_JOB_DESCRIPTION | jq -r '.status') + SIGNING_JOB_STATUS_REASON=$(echo $SIGNING_JOB_DESCRIPTION | jq -r '.statusReason') + + echo "---" + if [ $SIGNING_JOB_STATUS = "Succeeded" ]; then + echo "Signing job succeeded!" + break + fi + + if [ $SIGNING_JOB_STATUS = "Failed" ]; then + echo "[ERROR]: Signing job failed" + echo $SIGNING_JOB_STATUS_REASON + exit 1 + fi + + if [ $SECONDS_WAITED_SO_FAR -ge 60 ]; then + echo "[ERROR]: Timed out waiting for the signing job to complete" + exit 1 + fi + + echo "Signing job still in progress..." +done + +# Download the signed ZIP, overwriting the original ZIP +echo "---" +echo "Replacing the local layer with the signed layer from S3..." +S3_SIGNED_ZIP_KEY="${SIGNING_JOB_ID}.zip" +S3_SIGNED_ZIP_URI="s3://${S3_BUCKET_NAME}/${S3_SIGNED_ZIP_KEY}" +aws s3 cp $S3_SIGNED_ZIP_URI $LAYER_LOCAL_PATH + +# Delete the signed and unsigned ZIPs in S3 +echo "Cleaning up the S3 bucket..." +aws s3api delete-object --bucket $S3_BUCKET_NAME --key $S3_UNSIGNED_ZIP_KEY +aws s3api delete-object --bucket $S3_BUCKET_NAME --key $S3_SIGNED_ZIP_KEY + + +echo "---" +echo "Successfully signed layer ${LAYER_FILE}!" diff --git a/.gitlab/templates/bottlecap.yaml.tpl b/.gitlab/templates/bottlecap.yaml.tpl new file mode 100644 index 000000000..3898a5b02 --- /dev/null +++ b/.gitlab/templates/bottlecap.yaml.tpl @@ -0,0 +1,103 @@ +stages: + - build + - test + - sign + - publish + +default: + retry: + max: 1 + when: + - runner_system_failure + +variables: + DOCKER_TARGET_IMAGE: registry.ddbuild.io/ci/datadog-lambda-extension + DOCKER_TARGET_VERSION: latest + +{{ range $architecture := (ds "architectures").architectures }} + +build layer ({{ $architecture.name }}): + stage: build + image: registry.ddbuild.io/images/docker:20.10 + tags: ["arch:amd64"] + artifacts: + expire_in: 1 hr + paths: + - .layers/datadog_bottlecap-{{ $architecture.name }}.zip + variables: + ARCHITECTURE: {{ $architecture.name }} + script: + - ./scripts/build_bottlecap_layer.sh + +check layer size ({{ $architecture.name }}): + stage: test + image: registry.ddbuild.io/images/docker:20.10 + tags: ["arch:amd64"] + needs: + - build layer ({{ $architecture.name }}) + dependencies: + - build layer ({{ $architecture.name }}) + variables: + LAYER_FILE: datadog_bottlecap-{{ $architecture.name }}.zip + script: + - .gitlab/scripts/check_layer_size.sh + +fmt ({{ $architecture.name }}): + stage: test + tags: ["arch:{{ $architecture.name }}"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + needs: [] + script: + - cd bottlecap && cargo fmt + +check ({{ $architecture.name }}): + stage: test + tags: ["arch:{{ $architecture.name }}"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + needs: [] + script: + - cd bottlecap && cargo check + +clippy ({{ $architecture.name }}): + stage: test + tags: ["arch:{{ $architecture.name }}"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + needs: [] + script: + - cd bottlecap && cargo clippy --all-features + +{{ range $environment := (ds "environments").environments }} + +publish layer {{ $environment.name }} ({{ $architecture.name }}): + stage: publish + tags: ["arch:amd64"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + rules: + - if: '"{{ $environment.name }}" =~ /^(sandbox|staging)/' + when: manual + allow_failure: true + needs: + - build layer ({{ $architecture.name }}) + - check layer size ({{ $architecture.name }}) + - fmt ({{ $architecture.name }}) + - check ({{ $architecture.name }}) + - clippy ({{ $architecture.name }}) + dependencies: + - build layer ({{ $architecture.name }}) + parallel: + matrix: + - REGION: {{ range (ds "regions").regions }} + - {{ .code }} + {{- end}} + variables: + ARCHITECTURE: {{ $architecture.name }} + LAYER_FILE: datadog_bottlecap-{{ $architecture.name }}.zip + STAGE: {{ $environment.name }} + before_script: + - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source .gitlab/scripts/get_secrets.sh + script: + - .gitlab/scripts/publish_layers.sh + +{{- end }} # environments end + +{{- end }} # architectures end diff --git a/.gitlab/templates/go-agent.yaml.tpl b/.gitlab/templates/go-agent.yaml.tpl new file mode 100644 index 000000000..bc2ed612e --- /dev/null +++ b/.gitlab/templates/go-agent.yaml.tpl @@ -0,0 +1,77 @@ +stages: + - build + - test + - sign + - publish + +default: + retry: + max: 1 + when: + - runner_system_failure + +variables: + DOCKER_TARGET_IMAGE: registry.ddbuild.io/ci/datadog-lambda-extension + DOCKER_TARGET_VERSION: latest + GIT_DEPTH: 1 + +{{ range $architecture := (ds "architectures").architectures }} + +build layer ({{ $architecture.name }}): + stage: build + image: registry.ddbuild.io/images/docker:20.10 + tags: ["arch:amd64"] + artifacts: + expire_in: 1 hr + paths: + - .layers/datadog_extension-{{ $architecture.name }}.zip + variables: + ARCHITECTURE: {{ $architecture.name }} + script: + - cd .. && git clone -b $AGENT_BRANCH --single-branch https://github.com/DataDog/datadog-agent.git && cd datadog-lambda-extension + - .gitlab/scripts/build_go_agent.sh + +check layer size ({{ $architecture.name }}): + stage: test + image: registry.ddbuild.io/images/docker:20.10 + tags: ["arch:amd64"] + needs: + - build layer ({{ $architecture.name }}) + dependencies: + - build layer ({{ $architecture.name }}) + variables: + LAYER_FILE: datadog_extension-{{ $architecture.name }}.zip + script: + - .gitlab/scripts/check_layer_size.sh + +{{ range $environment := (ds "environments").environments }} + +publish layer {{ $environment.name }} ({{ $architecture.name }}): + stage: publish + tags: ["arch:amd64"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + rules: + - if: '"{{ $environment.name }}" =~ /^(sandbox|staging)/' + when: manual + allow_failure: true + needs: + - build layer ({{ $architecture.name }}) + dependencies: + - build layer ({{ $architecture.name }}) + parallel: + matrix: + - REGION: {{ range (ds "regions").regions }} + - {{ .code }} + {{- end}} + variables: + ARCHITECTURE: {{ $architecture.name }} + LAYER_FILE: datadog_extension-{{ $architecture.name }}.zip + STAGE: {{ $environment.name }} + before_script: + - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source .gitlab/scripts/get_secrets.sh + script: + - .gitlab/scripts/publish_layers.sh + +{{- end }} # environments end + +{{- end }} # architectures end diff --git a/.gitlab/templates/lambda-extension.yaml.tpl b/.gitlab/templates/lambda-extension.yaml.tpl new file mode 100644 index 000000000..5e44bd33e --- /dev/null +++ b/.gitlab/templates/lambda-extension.yaml.tpl @@ -0,0 +1,159 @@ +stages: + - build + - test + - sign + - publish + +default: + retry: + max: 1 + when: + - runner_system_failure + +variables: + DOCKER_TARGET_IMAGE: registry.ddbuild.io/ci/datadog-lambda-extension + DOCKER_TARGET_VERSION: latest + +{{ range $architecture := (ds "architectures").architectures }} + +build go agent ({{ $architecture.name }}): + stage: build + image: registry.ddbuild.io/images/docker:20.10 + tags: ["arch:amd64"] + artifacts: + expire_in: 1 hr + paths: + - .layers/datadog_extension-{{ $architecture.name }}.zip + - .layers/datadog_extension-{{ $architecture.name }}/* + variables: + ARCHITECTURE: {{ $architecture.name }} + script: + - cd .. && git clone -b $AGENT_BRANCH --single-branch https://github.com/DataDog/datadog-agent.git && cd datadog-lambda-extension + - .gitlab/scripts/build_go_agent.sh + +build bottlecap ({{ $architecture.name }}): + stage: build + image: registry.ddbuild.io/images/docker:20.10 + tags: ["arch:amd64"] + needs: + - build go agent ({{ $architecture.name }}) + dependencies: + - build go agent ({{ $architecture.name }}) + artifacts: + expire_in: 1 hr + paths: + - .layers/datadog_bottlecap-{{ $architecture.name }}.zip + variables: + ARCHITECTURE: {{ $architecture.name }} + script: + - .gitlab/scripts/build_bottlecap.sh + +check layer size ({{ $architecture.name }}): + stage: test + image: registry.ddbuild.io/images/docker:20.10 + tags: ["arch:amd64"] + needs: + - build bottlecap ({{ $architecture.name }}) + dependencies: + - build bottlecap ({{ $architecture.name }}) + variables: + LAYER_FILE: datadog_bottlecap-{{ $architecture.name }}.zip + script: + - .gitlab/scripts/check_layer_size.sh + +fmt ({{ $architecture.name }}): + stage: test + tags: ["arch:{{ $architecture.name }}"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + needs: [] + script: + - cd bottlecap && cargo fmt + +check ({{ $architecture.name }}): + stage: test + tags: ["arch:{{ $architecture.name }}"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + needs: [] + script: + - cd bottlecap && cargo check + +clippy ({{ $architecture.name }}): + stage: test + tags: ["arch:{{ $architecture.name }}"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + needs: [] + script: + - cd bottlecap && cargo clippy --all-features + +{{ range $environment := (ds "environments").environments }} + +{{ if or (eq $environment.name "prod") }} +sign layer ({{ $architecture.name }}): + stage: sign + tags: ["arch:amd64"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + rules: + - if: '$CI_COMMIT_TAG =~ /^v.*/' + when: manual + needs: + - build bottlecap ({{ $architecture.name }}) + - check layer size ({{ $architecture.name }}) + - fmt ({{ $architecture.name }}) + - check ({{ $architecture.name }}) + - clippy ({{ $architecture.name }}) + dependencies: + - build bottlecap ({{ $architecture.name }}) + artifacts: # Re specify artifacts so the modified signed file is passed + expire_in: 1 day # Signed layers should expire after 1 day + paths: + - .layers/datadog_bottlecap-{{ $architecture.name }}.zip + variables: + LAYER_FILE: datadog_bottlecap-{{ $architecture.name }}.zip + before_script: + - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source .gitlab/scripts/get_secrets.sh + script: + - .gitlab/scripts/sign_layers.sh {{ $environment.name }} +{{ end }} + +publish layer {{ $environment.name }} ({{ $architecture.name }}): + stage: publish + tags: ["arch:amd64"] + image: ${DOCKER_TARGET_IMAGE}:${DOCKER_TARGET_VERSION} + rules: + - if: '"{{ $environment.name }}" =~ /^(sandbox|staging)/' + when: manual + allow_failure: true + - if: '$CI_COMMIT_TAG =~ /^v.*/' + needs: +{{ if or (eq $environment.name "prod") }} + - sign layer ({{ $architecture.name }}) +{{ else }} + - build bottlecap ({{ $architecture.name }}) + - check layer size ({{ $architecture.name }}) + - fmt ({{ $architecture.name }}) + - check ({{ $architecture.name }}) + - clippy ({{ $architecture.name }}) +{{ end }} + dependencies: +{{ if or (eq $environment.name "prod") }} + - sign layer ({{ $architecture.name }}) +{{ else }} + - build bottlecap ({{ $architecture.name }}) +{{ end }} + parallel: + matrix: + - REGION: {{ range (ds "regions").regions }} + - {{ .code }} + {{- end}} + variables: + ARCHITECTURE: {{ $architecture.name }} + LAYER_FILE: datadog_bottlecap-{{ $architecture.name }}.zip + STAGE: {{ $environment.name }} + before_script: + - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source .gitlab/scripts/get_secrets.sh + script: + - .gitlab/scripts/publish_layers.sh + +{{- end }} # environments end + +{{- end }} # architectures end diff --git a/scripts/Dockerfile.bottlecap.build b/scripts/Dockerfile.bottlecap.build index 78d7d9400..cf4e765e0 100644 --- a/scripts/Dockerfile.bottlecap.build +++ b/scripts/Dockerfile.bottlecap.build @@ -2,14 +2,19 @@ FROM public.ecr.aws/lambda/provided:al2 as bottlecap-builder ARG PLATFORM -RUN yum install -y curl gcc gcc-c++ make unzip openssl openssl-devel +RUN yum install -y curl gcc gcc-c++ make unzip + # Install Protocol Buffers compiler by hand, since AL2 does not have a recent enough version. COPY ./scripts/install-protoc.sh / RUN chmod +x /install-protoc.sh && /install-protoc.sh + +# Install Rust Toolchain RUN curl https://sh.rustup.rs -sSf | \ sh -s -- --profile minimal --default-toolchain nightly-$PLATFORM-unknown-linux-gnu -y ENV PATH=/root/.cargo/bin:$PATH RUN rustup component add rust-src --toolchain nightly-$PLATFORM-unknown-linux-gnu + +# Build Bottlecap RUN mkdir -p /tmp/dd COPY ./bottlecap/src /tmp/dd/bottlecap/src COPY ./bottlecap/Cargo.toml /tmp/dd/bottlecap/Cargo.toml @@ -19,13 +24,17 @@ WORKDIR /tmp/dd/bottlecap RUN --mount=type=cache,target=/usr/local/cargo/registry cargo +nightly build -Z build-std=std,panic_abort -Z build-std-features=panic_immediate_abort --release --target $PLATFORM-unknown-linux-gnu RUN cp /tmp/dd/bottlecap/target/$PLATFORM-unknown-linux-gnu/release/bottlecap /tmp/dd/bottlecap/bottlecap -# zip the extension +# Zip Extension FROM ubuntu:latest as compresser ARG DATADOG_WRAPPER=datadog_wrapper +ARG GO_AGENT_PATH RUN apt-get update RUN apt-get install -y zip binutils -COPY --from=public.ecr.aws/datadog/lambda-extension:57 /opt/extensions/datadog-agent /datadog-agent-go + +COPY .layers/$GO_AGENT_PATH/extensions/datadog-agent /datadog-agent-go +RUN strip /datadog-agent-go # just in case + RUN mkdir /extensions WORKDIR /extensions diff --git a/scripts/Dockerfile.bottlecap.dev b/scripts/Dockerfile.bottlecap.dev new file mode 100644 index 000000000..78d7d9400 --- /dev/null +++ b/scripts/Dockerfile.bottlecap.dev @@ -0,0 +1,41 @@ +# syntax = docker/dockerfile:experimental + +FROM public.ecr.aws/lambda/provided:al2 as bottlecap-builder +ARG PLATFORM +RUN yum install -y curl gcc gcc-c++ make unzip openssl openssl-devel +# Install Protocol Buffers compiler by hand, since AL2 does not have a recent enough version. +COPY ./scripts/install-protoc.sh / +RUN chmod +x /install-protoc.sh && /install-protoc.sh +RUN curl https://sh.rustup.rs -sSf | \ + sh -s -- --profile minimal --default-toolchain nightly-$PLATFORM-unknown-linux-gnu -y +ENV PATH=/root/.cargo/bin:$PATH +RUN rustup component add rust-src --toolchain nightly-$PLATFORM-unknown-linux-gnu +RUN mkdir -p /tmp/dd +COPY ./bottlecap/src /tmp/dd/bottlecap/src +COPY ./bottlecap/Cargo.toml /tmp/dd/bottlecap/Cargo.toml +COPY ./bottlecap/Cargo.lock /tmp/dd/bottlecap/Cargo.lock +ENV RUSTFLAGS="-C panic=abort -Zlocation-detail=none" +WORKDIR /tmp/dd/bottlecap +RUN --mount=type=cache,target=/usr/local/cargo/registry cargo +nightly build -Z build-std=std,panic_abort -Z build-std-features=panic_immediate_abort --release --target $PLATFORM-unknown-linux-gnu +RUN cp /tmp/dd/bottlecap/target/$PLATFORM-unknown-linux-gnu/release/bottlecap /tmp/dd/bottlecap/bottlecap + +# zip the extension +FROM ubuntu:latest as compresser +ARG DATADOG_WRAPPER=datadog_wrapper + +RUN apt-get update +RUN apt-get install -y zip binutils +COPY --from=public.ecr.aws/datadog/lambda-extension:57 /opt/extensions/datadog-agent /datadog-agent-go +RUN mkdir /extensions +WORKDIR /extensions + +COPY --from=bottlecap-builder /tmp/dd/bottlecap/bottlecap /extensions/datadog-agent + +COPY ./scripts/$DATADOG_WRAPPER /$DATADOG_WRAPPER +RUN chmod +x /$DATADOG_WRAPPER +RUN zip -r datadog_extension.zip /extensions /$DATADOG_WRAPPER /datadog-agent-go + +# keep the smallest possible docker image +FROM scratch +COPY --from=compresser /extensions/datadog_extension.zip / +ENTRYPOINT ["/datadog_extension.zip"] diff --git a/scripts/build_bottlecap_layer.sh b/scripts/build_bottlecap_layer.sh index 3e1f40fd4..cf8cd0d22 100755 --- a/scripts/build_bottlecap_layer.sh +++ b/scripts/build_bottlecap_layer.sh @@ -37,15 +37,16 @@ _docker_build_bottlecap_zip() { PLATFORM="aarch64" fi - docker build --platform linux/${arch} \ + docker buildx build --platform linux/${arch} \ -t datadog/build-bottlecap-${arch} \ - -f ./scripts/Dockerfile.bottlecap.build \ + -f ./scripts/Dockerfile.bottlecap.dev \ --build-arg PLATFORM=$PLATFORM \ - . --load - local dockerId=$(docker create datadog/build-bottlecap-${arch}) - docker cp $dockerId:/datadog_extension.zip $TARGET_DIR/datadog_bottlecap-${arch}.zip - docker rm $dockerId - unzip $TARGET_DIR/datadog_bottlecap-${arch}.zip -d $TARGET_DIR/datadog_bottlecap-${arch} + . -o $TARGET_DIR/datadog_bottlecap-${arch} + + cp $TARGET_DIR/datadog_bottlecap-${arch}/datadog_extension.zip $TARGET_DIR/datadog_bottlecap-${arch}.zip + + unzip $TARGET_DIR/datadog_bottlecap-${arch}/datadog_extension.zip -d $TARGET_DIR/datadog_bottlecap-${arch} + rm -rf $TARGET_DIR/datadog_bottlecap-${arch}/datadog_extension.zip } build_for_arch() {