diff --git a/README.md b/README.md index 0c4d9d00..14d2d8cb 100644 --- a/README.md +++ b/README.md @@ -103,6 +103,7 @@ Validate parameters and permissions without any function code or configuration m code-artifacts-dir: my-code-artifacts-dir dry-run: true ``` + **Note**: Dry run will still call `GetFunctionConfiguration` to check if the function exists and perform configuration diffs against what's currently deployed. ## Build from Source To automate building your source code, add a build step based on your runtime and build process. This build step should be performed before the AWS Lambda Deploy step, and AWS Lambda Deploy's `code-artifacts-dir` parameter will typically be set to the build step's code artifact output directory. diff --git a/__tests__/dry_run_mode.test.js b/__tests__/dry_run_mode.test.js index 5d72b197..f0466f62 100644 --- a/__tests__/dry_run_mode.test.js +++ b/__tests__/dry_run_mode.test.js @@ -38,4 +38,73 @@ describe('Dry Run Mode Tests', () => { await mainModule.run(); }); -}); \ No newline at end of file + + test('should check if function exists even in dry run mode', async () => { + const mockSend = jest.fn() + // First call: checkFunctionExists - function exists + .mockResolvedValueOnce({ Runtime: 'nodejs18.x' }) + // Second call: GetFunctionConfigurationCommand for config check + .mockResolvedValueOnce({ Runtime: 'nodejs18.x', MemorySize: 256 }) + // Third call: UpdateFunctionCodeCommand dry run + .mockResolvedValueOnce({ + FunctionArn: 'arn:aws:lambda:us-east-1:123456789012:function:test-function', + Version: '$LATEST' + }); + + const mockClient = { send: mockSend }; + LambdaClient.mockImplementation(() => mockClient); + + validations.validateAllInputs = jest.fn().mockReturnValue({ + valid: true, + functionName: 'test-function', + parsedEnvironment: {}, + dryRun: true, + codeArtifactsDir: './code' + }); + + jest.spyOn(mainModule, 'packageCodeArtifacts').mockResolvedValue('/tmp/test.zip'); + const fs = require('fs/promises'); + fs.readFile = jest.fn().mockResolvedValue(Buffer.from('test')); + + await mainModule.run(); + + // Verify that checkFunctionExists was called (first send call should be GetFunctionConfigurationCommand) + expect(mockSend).toHaveBeenCalled(); + expect(mockSend.mock.calls[0][0].constructor.name).toEqual('GetFunctionConfigurationCommand'); + + // Verify dry run proceeded successfully + expect(core.info).toHaveBeenCalledWith('Checking if test-function exists'); + expect(core.info).toHaveBeenCalledWith('DRY RUN MODE: No AWS resources will be created or modified'); + expect(core.setFailed).not.toHaveBeenCalledWith('DRY RUN MODE can only be used for updating function code of existing functions'); + }); + + test('should fail dry run mode when function does not exist', async () => { + const mockSend = jest.fn() + // checkFunctionExists - function does not exist (ResourceNotFoundException) + .mockRejectedValueOnce({ name: 'ResourceNotFoundException' }); + + const mockClient = { send: mockSend }; + LambdaClient.mockImplementation(() => mockClient); + + validations.validateAllInputs = jest.fn().mockReturnValue({ + valid: true, + functionName: 'non-existent-function', + parsedEnvironment: {}, + dryRun: true, + codeArtifactsDir: './code' + }); + + jest.spyOn(mainModule, 'packageCodeArtifacts').mockResolvedValue('/tmp/test.zip'); + + await mainModule.run(); + + // Verify that checkFunctionExists was called (first send call should be GetFunctionConfigurationCommand) + expect(mockSend).toHaveBeenCalled(); + expect(mockSend.mock.calls[0][0].constructor.name).toEqual('GetFunctionConfigurationCommand'); + + // Verify dry run failed with correct error message + expect(core.info).toHaveBeenCalledWith('Checking if non-existent-function exists'); + expect(core.info).toHaveBeenCalledWith('DRY RUN MODE: No AWS resources will be created or modified'); + expect(core.setFailed).toHaveBeenCalledWith('DRY RUN MODE can only be used for updating function code of existing functions'); + }); +}); diff --git a/__tests__/s3_key_generation.test.js b/__tests__/s3_key_generation.test.js index 7b101e4b..96840a29 100644 --- a/__tests__/s3_key_generation.test.js +++ b/__tests__/s3_key_generation.test.js @@ -37,17 +37,26 @@ describe('S3 Key Generation and Dry Run Tests', () => { }); test('should log dry run message and fail when function does not exist', async () => { + const mockSend = jest.fn() + // checkFunctionExists - function does not exist (ResourceNotFoundException) + .mockRejectedValueOnce({ name: 'ResourceNotFoundException' }); + + const mockClient = { send: mockSend }; + LambdaClient.mockImplementation(() => mockClient); + validations.validateAllInputs = jest.fn().mockReturnValue({ valid: true, functionName: 'test-function', - dryRun: true + parsedEnvironment: {}, + dryRun: true, + codeArtifactsDir: './code' }); - jest.spyOn(mainModule, 'checkFunctionExists').mockResolvedValue(false); + jest.spyOn(mainModule, 'packageCodeArtifacts').mockResolvedValue('/tmp/test.zip'); await mainModule.run(); expect(core.info).toHaveBeenCalledWith('DRY RUN MODE: No AWS resources will be created or modified'); expect(core.setFailed).toHaveBeenCalledWith('DRY RUN MODE can only be used for updating function code of existing functions'); }); -}); \ No newline at end of file +}); diff --git a/dist/index.js b/dist/index.js index fd602467..7f002737 100644 --- a/dist/index.js +++ b/dist/index.js @@ -8,13 +8,13 @@ const core = __nccwpck_require__(2186); const { LambdaClient, CreateFunctionCommand, GetFunctionConfigurationCommand, UpdateFunctionConfigurationCommand, UpdateFunctionCodeCommand, waitUntilFunctionUpdated } = __nccwpck_require__(6584); const { S3Client, PutObjectCommand, CreateBucketCommand, HeadBucketCommand, PutBucketEncryptionCommand, PutPublicAccessBlockCommand, PutBucketVersioningCommand} = __nccwpck_require__(9250); const { STSClient, GetCallerIdentityCommand } = __nccwpck_require__(2209); -const fs = __nccwpck_require__(3292); +const fs = __nccwpck_require__(3292); const path = __nccwpck_require__(1017); const AdmZip = __nccwpck_require__(6761); const validations = __nccwpck_require__(5764); const { version } = __nccwpck_require__(4147); async function run() { - try { + try { // Receiving and validating inputs const inputs = validations.validateAllInputs(); @@ -26,11 +26,11 @@ async function run() { functionName, codeArtifactsDir, ephemeralStorage, parsedMemorySize, timeout, role, codeSigningConfigArn, kmsKeyArn, sourceKmsKeyArn, - environment, vpcConfig, deadLetterConfig, tracingConfig, - layers, fileSystemConfigs, imageConfig, snapStart, + environment, vpcConfig, deadLetterConfig, tracingConfig, + layers, fileSystemConfigs, imageConfig, snapStart, loggingConfig, tags, - parsedEnvironment, parsedVpcConfig, parsedDeadLetterConfig, - parsedTracingConfig, parsedLayers, parsedFileSystemConfigs, + parsedEnvironment, parsedVpcConfig, parsedDeadLetterConfig, + parsedTracingConfig, parsedLayers, parsedFileSystemConfigs, parsedImageConfig, parsedSnapStart, parsedLoggingConfig, parsedTags, functionDescription, dryRun, publish, revisionId, runtime, handler, architectures @@ -47,7 +47,7 @@ async function run() { region, customUserAgent: customUserAgentString }); - + // Handling S3 Buckets const { s3Bucket, useS3Method } = inputs; let s3Key = inputs.s3Key; @@ -57,19 +57,17 @@ async function run() { } // Determine if function exists - let functionExists; - if (!dryRun) { - core.info(`Checking if ${functionName} exists`); - functionExists = await checkFunctionExists(client, functionName); - } + core.info(`Checking if ${functionName} exists`); + let functionExists = await checkFunctionExists(client, functionName); + if (dryRun) { core.info('DRY RUN MODE: No AWS resources will be created or modified'); if (!functionExists) { core.setFailed('DRY RUN MODE can only be used for updating function code of existing functions'); - return; + return; } } - + // Creating zip file core.info(`Packaging code artifacts from ${codeArtifactsDir}`); let finalZipPath = await packageCodeArtifacts(codeArtifactsDir); @@ -117,7 +115,7 @@ async function run() { if (dryRun) { core.info('[DRY RUN] Configuration updates are not simulated in dry run mode'); return; - } + } await updateFunctionConfiguration(client, { functionName, @@ -168,7 +166,7 @@ async function run() { }); core.info('Lambda function deployment completed successfully'); - + } catch (error) { if (error.name === 'ThrottlingException' || error.name === 'TooManyRequestsException' || error.$metadata?.httpStatusCode === 429) { core.setFailed(`Rate limit exceeded and maximum retries reached: ${error.message}`); @@ -189,45 +187,45 @@ async function run() { async function packageCodeArtifacts(artifactsDir) { const tempDir = path.join((__nccwpck_require__(2037).tmpdir)(), `lambda-temp-${Date.now()}`); const zipPath = path.join((__nccwpck_require__(2037).tmpdir)(), `lambda-function-${Date.now()}.zip`); - + try { try { await fs.rm(tempDir, { recursive: true, force: true }); } catch (error) { } - + await fs.mkdir(tempDir, { recursive: true }); const workingDir = process.cwd(); - + if (!artifactsDir) { throw new Error('Code artifacts directory path must be provided'); } - + const resolvedArtifactsDir = validations.validateAndResolvePath(artifactsDir, workingDir); - + core.info(`Copying artifacts from ${resolvedArtifactsDir} to ${tempDir}`); - + try { await fs.access(resolvedArtifactsDir); } catch (error) { throw new Error(`Code artifacts directory '${resolvedArtifactsDir}' does not exist or is not accessible: ${error.message}`); } - + const sourceFiles = await fs.readdir(resolvedArtifactsDir); - + if (sourceFiles.length === 0) { throw new Error(`Code artifacts directory '${resolvedArtifactsDir}' is empty, no files to package`); } - + core.info(`Found ${sourceFiles.length} files/directories to copy`); - + for (const file of sourceFiles) { const sourcePath = path.join(resolvedArtifactsDir, file); const destPath = path.join(tempDir, file); - + core.info(`Copying ${sourcePath} to ${destPath}`); - + await fs.cp( sourcePath, destPath, @@ -237,12 +235,12 @@ async function packageCodeArtifacts(artifactsDir) { core.info('Creating ZIP file with standard options'); const zip = new AdmZip(); - + const tempFiles = await fs.readdir(tempDir, { withFileTypes: true }); - + for (const file of tempFiles) { const fullPath = path.join(tempDir, file.name); - + if (file.isDirectory()) { core.info(`Adding directory: ${file.name}`); zip.addLocalFolder(fullPath, file.name); @@ -251,17 +249,17 @@ async function packageCodeArtifacts(artifactsDir) { zip.addLocalFile(fullPath); } } - + core.info('Writing ZIP file with standard options'); zip.writeZip(zipPath); - + try { const stats = await fs.stat(zipPath); core.info(`Generated ZIP file size: ${stats.size} bytes`); - + const verifyZip = new AdmZip(zipPath); const entries = verifyZip.getEntries(); - + core.info(`ZIP verification passed - contains ${entries.length} entries:`); for (let i = 0; i < entries.length; i++) { core.info(` ${i+1}. ${entries[i].entryName} (${entries[i].header.size} bytes)`); @@ -298,7 +296,7 @@ async function checkFunctionExists(client, functionName) { // Helper functions for creating Lambda function async function createFunction(client, inputs, functionExists) { const { - functionName, region, finalZipPath, dryRun, role, s3Bucket, s3Key, + functionName, region, finalZipPath, dryRun, role, s3Bucket, s3Key, sourceKmsKeyArn, runtime, handler, functionDescription, parsedMemorySize, timeout, publish, architectures, ephemeralStorage, revisionId, vpcConfig, parsedEnvironment, deadLetterConfig, tracingConfig, @@ -307,11 +305,11 @@ async function createFunction(client, inputs, functionExists) { parsedTracingConfig, parsedLayers, parsedFileSystemConfigs, parsedImageConfig, parsedSnapStart, parsedLoggingConfig, parsedTags } = inputs; - + if (!functionExists) { if (dryRun) { core.setFailed('DRY RUN MODE can only be used for updating function code of existing functions'); - return; + return; } core.info(`Function ${functionName} doesn't exist, creating new function`); @@ -330,7 +328,7 @@ async function createFunction(client, inputs, functionExists) { try { await uploadToS3(finalZipPath, s3Bucket, s3Key, region); core.info(`Successfully uploaded package to S3: s3://${s3Bucket}/${s3Key}`); - + codeParameter = { S3Bucket: s3Bucket, S3Key: s3Key, @@ -341,13 +339,13 @@ async function createFunction(client, inputs, functionExists) { if (error.stack) { core.debug(error.stack); } - throw error; + throw error; } } else { try { const zipFileContent = await fs.readFile(finalZipPath); core.info(`Zip file read successfully, size: ${zipFileContent.length} bytes`); - + codeParameter = { ZipFile: zipFileContent, ...(sourceKmsKeyArn && { SourceKmsKeyArn: sourceKmsKeyArn }) @@ -362,7 +360,7 @@ async function createFunction(client, inputs, functionExists) { if (error.stack) { core.debug(error.stack); } - throw error; + throw error; } } @@ -396,14 +394,14 @@ async function createFunction(client, inputs, functionExists) { core.info(`Creating new Lambda function: ${functionName}`); const command = new CreateFunctionCommand(input); const response = await client.send(command); - + core.setOutput('function-arn', response.FunctionArn); if (response.Version) { core.setOutput('version', response.Version); } - + core.info('Lambda function created successfully'); - + core.info(`Waiting for function ${functionName} to become active before proceeding`); await waitForFunctionActive(client, functionName); } catch (error) { @@ -416,49 +414,49 @@ async function createFunction(client, inputs, functionExists) { } else { core.setFailed(`Failed to create function: ${error.message}`); } - + if (error.stack) { core.debug(error.stack); } - throw error; + throw error; } } } async function waitForFunctionActive(client, functionName, waitForMinutes = 5) { const MAX_WAIT_MINUTES = 30; - + if (waitForMinutes > MAX_WAIT_MINUTES) { waitForMinutes = MAX_WAIT_MINUTES; core.info(`Wait time capped to maximum of ${MAX_WAIT_MINUTES} minutes`); } - + core.info(`Waiting for function ${functionName} to become active. Will wait for up to ${waitForMinutes} minutes`); - + const startTime = Date.now(); const maxWaitTimeMs = waitForMinutes * 60 * 1000; - const DELAY_BETWEEN_CHECKS_MS = 5000; + const DELAY_BETWEEN_CHECKS_MS = 5000; let lastState = null; - + while (Date.now() - startTime < maxWaitTimeMs) { try { const command = new GetFunctionConfigurationCommand({ FunctionName: functionName }); const response = await client.send(command); - + const currentState = response.State; - + if (currentState !== lastState) { core.info(`Function ${functionName} is in state: ${currentState}`); lastState = currentState; } - + if (currentState === 'Active') { core.info(`Function ${functionName} is now active`); return; } else if (currentState === 'Failed') { throw new Error(`Function ${functionName} deployment failed with reason: ${response.StateReason || 'Unknown reason'}`); } - + await new Promise(resolve => setTimeout(resolve, DELAY_BETWEEN_CHECKS_MS)); } catch (error) { if (error.name === 'ResourceNotFoundException') { @@ -471,7 +469,7 @@ async function waitForFunctionActive(client, functionName, waitForMinutes = 5) { } } } - + throw new Error(`Timed out waiting for function ${functionName} to become active after ${waitForMinutes} minutes`); } @@ -523,33 +521,33 @@ async function updateFunctionConfiguration(client, params) { } else { core.setFailed(`Failed to update function configuration: ${error.message}`); } - + if (error.stack) { core.debug(error.stack); } - throw error; + throw error; } } async function waitForFunctionUpdated(client, functionName, waitForMinutes = 5) { const MAX_WAIT_MINUTES = 30; - + if (waitForMinutes > MAX_WAIT_MINUTES) { waitForMinutes = MAX_WAIT_MINUTES; core.info(`Wait time capped to maximum of ${MAX_WAIT_MINUTES} minutes`); } - + core.info(`Waiting for function update to complete. Will wait for ${waitForMinutes} minutes`); - + try { await waitUntilFunctionUpdated({ client: client, - minDelay: 2, - maxWaitTime: waitForMinutes * 60, + minDelay: 2, + maxWaitTime: waitForMinutes * 60, }, { FunctionName: functionName }); - + core.info('Function update completed successfully'); } catch (error) { if (error.name === 'TimeoutError') { @@ -578,7 +576,7 @@ async function updateFunctionCode(client, params) { } = params; core.info(`Updating function code for ${functionName} with ${finalZipPath}`); - + try { const commonCodeParams = { FunctionName: functionName, @@ -587,15 +585,15 @@ async function updateFunctionCode(client, params) { ...(revisionId && { RevisionId: revisionId }), ...(sourceKmsKeyArn && { SourceKmsKeyArn: sourceKmsKeyArn }) }; - + let codeInput; - + if (useS3Method) { core.info(`Using S3 deployment method with bucket: ${s3Bucket}, key: ${s3Key}`); await uploadToS3(finalZipPath, s3Bucket, s3Key, region); core.info(`Successfully uploaded package to S3: s3://${s3Bucket}/${s3Key}`); - + codeInput = { ...commonCodeParams, S3Bucket: s3Bucket, @@ -603,7 +601,7 @@ async function updateFunctionCode(client, params) { }; } else { let zipFileContent; - + try { zipFileContent = await fs.readFile(finalZipPath); } catch (error) { @@ -614,22 +612,22 @@ async function updateFunctionCode(client, params) { } else if (error.code === 'EACCES') { core.error('Permission denied. Check file access permissions.'); } - + if (error.stack) { core.debug(error.stack); } - + throw error; } - + codeInput = { ...commonCodeParams, ZipFile: zipFileContent }; - + core.info(`Original buffer length: ${zipFileContent.length} bytes`); } - + if (dryRun) { core.info(`[DRY RUN] Performing dry-run function code update with parameters:`); const logInput = {...codeInput}; @@ -638,10 +636,10 @@ async function updateFunctionCode(client, params) { } core.info(JSON.stringify(logInput, null, 2)); codeInput.DryRun = true; - + const command = new UpdateFunctionCodeCommand(codeInput); const response = await client.send(command); - + core.info('[DRY RUN] Function code validation passed'); core.setOutput('function-arn', response.FunctionArn || `arn:aws:lambda:${region}:000000000000:function:${functionName}`); core.setOutput('version', response.Version || '$LATEST'); @@ -664,7 +662,7 @@ async function updateFunctionCode(client, params) { } else { core.setFailed(`Failed to update function code: ${error.message}`); } - + if (error.stack) { core.debug(error.stack); } @@ -680,7 +678,7 @@ async function hasConfigurationChanged(currentConfig, updatedConfig) { const cleanedUpdated = cleanNullKeys(updatedConfig) || {}; let hasChanged = false; - + for (const [key, value] of Object.entries(cleanedUpdated)) { if (value !== undefined) { if (!(key in currentConfig)) { @@ -688,7 +686,7 @@ async function hasConfigurationChanged(currentConfig, updatedConfig) { hasChanged = true; continue; } - + if (typeof value === 'object' && value !== null) { if (!deepEqual(currentConfig[key] || {}, value)) { core.info(`Configuration difference detected in ${key}`); @@ -717,11 +715,11 @@ function isEmptyValue(value) { if ('SubnetIds' in value || 'SecurityGroupIds' in value) { return false; } - return Object.keys(value).length === 0 || + return Object.keys(value).length === 0 || Object.values(value).every(val => isEmptyValue(val)); } - return false; + return false; } function cleanNullKeys(obj) { @@ -734,7 +732,7 @@ function cleanNullKeys(obj) { } const isVpcConfig = obj && typeof obj === 'object' && ('SubnetIds' in obj || 'SecurityGroupIds' in obj); - + if (Array.isArray(obj)) { const filtered = obj.filter(item => !isEmptyValue(item)); return filtered.length > 0 ? filtered : undefined; @@ -752,7 +750,7 @@ function cleanNullKeys(obj) { } if (value === null || value === undefined || value === '') { - continue; + continue; } const cleaned = cleanNullKeys(value); @@ -765,45 +763,45 @@ function cleanNullKeys(obj) { return hasProperties ? result : undefined; } - return obj; + return obj; } function deepEqual(obj1, obj2) { if (obj1 === null || obj2 === null || typeof obj1 !== 'object' || typeof obj2 !== 'object') { return obj1 === obj2; } - + if (Array.isArray(obj1) && Array.isArray(obj2)) { if (obj1.length !== obj2.length) { return false; } - + for (let i = 0; i < obj1.length; i++) { if (!deepEqual(obj1[i], obj2[i])) { return false; } } - + return true; } - + if (Array.isArray(obj1) !== Array.isArray(obj2)) { return false; } - + const keys1 = Object.keys(obj1); const keys2 = Object.keys(obj2); - + if (keys1.length !== keys2.length) { return false; } - + for (const key of keys1) { if (!keys2.includes(key) || !deepEqual(obj1[key], obj2[key])) { return false; } } - + return true; } @@ -811,18 +809,18 @@ function deepEqual(obj1, obj2) { function generateS3Key(functionName) { const date = new Date(); const timestamp = date.toISOString().replace(/[:.]/g, '-').replace('T', '-').split('Z')[0]; - + let commitHash = ''; if (process.env.GITHUB_SHA) { commitHash = `-${process.env.GITHUB_SHA.substring(0, 7)}`; } - + return `lambda-deployments/${functionName}/${timestamp}${commitHash}.zip`; } async function checkBucketExists(s3Client, bucketName) { try { - const command = new HeadBucketCommand({ + const command = new HeadBucketCommand({ Bucket: bucketName }); await s3Client.send(command); @@ -842,7 +840,7 @@ async function checkBucketExists(s3Client, bucketName) { statusCode: error.$metadata?.httpStatusCode, requestId: error.$metadata?.requestId })}`); - + if (error.$metadata?.httpStatusCode === 301) { core.error(`REGION MISMATCH ERROR: The bucket "${bucketName}" exists but in a different region than specified (${s3Client.config.region}). S3 buckets are global but region-specific. `); throw new Error(`Bucket "${bucketName}" exists in a different region than ${s3Client.config.region}`); @@ -855,12 +853,12 @@ async function checkBucketExists(s3Client, bucketName) { async function createBucket(s3Client, bucketName, region) { core.info(`Creating S3 bucket: ${bucketName}`); - + try { if (!validateBucketName(bucketName)) { throw new Error(`Invalid bucket name: "${bucketName}". Bucket names must be 3-63 characters, lowercase, start/end with a letter/number, and contain only letters, numbers, dots, and hyphens.`); } - + const input = { Bucket: bucketName, }; @@ -873,12 +871,12 @@ async function createBucket(s3Client, bucketName, region) { core.info(`Sending CreateBucket request for bucket: ${bucketName} in region: ${region || 'default'}`); const command = new CreateBucketCommand(input); - + try { const response = await s3Client.send(command); core.info(`Successfully created S3 bucket: ${bucketName}`); core.info(`Bucket location: ${response.Location}`); - + // Apply security configurations after bucket creation try { core.info(`Configuring public access block for bucket: ${bucketName}`); @@ -891,7 +889,7 @@ async function createBucket(s3Client, bucketName, region) { RestrictPublicBuckets: true } })); - + core.info(`Enabling default encryption for bucket: ${bucketName}`); await s3Client.send(new PutBucketEncryptionCommand({ Bucket: bucketName, @@ -906,7 +904,7 @@ async function createBucket(s3Client, bucketName, region) { ] } })); - + core.info(`Enabling versioning for bucket: ${bucketName}`); await s3Client.send(new PutBucketVersioningCommand({ Bucket: bucketName, @@ -914,13 +912,13 @@ async function createBucket(s3Client, bucketName, region) { Status: 'Enabled' } })); - + core.info(`Security configurations successfully applied to bucket: ${bucketName}`); } catch (securityError) { core.warning(`Applied partial security settings to bucket. Some security features couldn't be enabled: ${securityError.message}`); core.debug(securityError.stack); } - + return true; } catch (sendError) { core.error(`Error creating bucket: ${sendError.name} - ${sendError.message}`); @@ -931,7 +929,7 @@ async function createBucket(s3Client, bucketName, region) { statusCode: sendError.$metadata?.httpStatusCode, requestId: sendError.$metadata?.requestId })}`); - + if (sendError.name === 'BucketAlreadyExists' || sendError.name === 'BucketAlreadyOwnedByYou') { core.error(`Bucket name ${bucketName} is already taken but may be owned by another account.`); throw sendError; @@ -951,33 +949,33 @@ async function createBucket(s3Client, bucketName, region) { function validateBucketName(name) { if (!name || typeof name !== 'string') return false; - + if (name.length < 3 || name.length > 63) return false; - + if (!/^[a-z0-9.-]+$/.test(name)) return false; - + if (!/^[a-z0-9].*[a-z0-9]$/.test(name)) return false; - + if (/^(\d{1,3}\.){3}\d{1,3}$/.test(name)) return false; - + if (/\.\./.test(name)) return false; - + if (/^xn--/.test(name)) return false; - + if (/^sthree-/.test(name)) return false; - + if (/^sthree-configurator/.test(name)) return false; - + if (/^amzn-s3-demo-bucket/.test(name)) return false; - + return true; } async function uploadToS3(zipFilePath, bucketName, s3Key, region) { core.info(`Uploading Lambda deployment package to S3: s3://${bucketName}/${s3Key}`); - + try { - const s3Client = new S3Client({ + const s3Client = new S3Client({ region, customUserAgent: `LambdaGitHubAction/${version}` }); @@ -987,14 +985,14 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { } catch (checkError) { core.error(`Failed to check if bucket exists: ${checkError.name} - ${checkError.message}`); core.error(`Error type: ${checkError.name}, Code: ${checkError.code}`); - + if (checkError.$metadata?.httpStatusCode === 403) { throw new Error(`Access denied to S3 bucket`); } else { throw checkError; } } - + if (!bucketExists) { core.info(`Bucket ${bucketName} does not exist. Attempting to create it...`); try { @@ -1009,7 +1007,7 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { message: bucketError.message, statusCode: bucketError.$metadata?.httpStatusCode })}`); - + if (bucketError.name === 'BucketAlreadyExists' || bucketError.name === 'BucketAlreadyOwnedByYou') { core.info(`Bucket name ${bucketName} is already taken. Please try a different name.`); } else if (bucketError.$metadata?.httpStatusCode === 403) { @@ -1028,10 +1026,10 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { } throw new Error(`Cannot access deployment package at ${zipFilePath}: ${fileError.message}`); } - + const fileContent = await fs.readFile(zipFilePath); core.info(`Read deployment package, size: ${fileContent.length} bytes`); - + try { expectedBucketOwner = await getAwsAccountId(region); @@ -1046,13 +1044,13 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { Body: fileContent, ExpectedBucketOwner: expectedBucketOwner }; - + core.info(`Sending PutObject request to S3 (bucket: ${bucketName}, key: ${s3Key})`); const command = new PutObjectCommand(input); const response = await s3Client.send(command); - + core.info(`S3 upload successful, file size: ${fileContent.length} bytes`); - + return { bucket: bucketName, key: s3Key, @@ -1067,16 +1065,16 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { statusCode: uploadError.$metadata?.httpStatusCode, requestId: uploadError.$metadata?.requestId })}`); - + if (uploadError.$metadata?.httpStatusCode === 403) { throw new Error('Access denied when uploading to S3. Ensure your IAM policy includes s3:PutObject permission.'); } throw uploadError; } - + } catch (error) { core.error(`S3 upload failed: ${error.name} - ${error.message}`); - + if (error.code === 'NoSuchBucket') { core.error(`Bucket ${bucketName} does not exist and could not be created automatically. Please create it manually or check your permissions.`); } else if (error.code === 'AccessDenied' || error.name === 'AccessDenied' || error.$metadata?.httpStatusCode === 403) { @@ -1091,7 +1089,7 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { core.error(`Invalid bucket name: ${bucketName}. Bucket names must follow S3 naming rules.`); core.error('See s3-troubleshooting.md for S3 bucket naming rules.'); } - + throw error; } } @@ -1099,7 +1097,7 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { // Helper function for retrieving AWS account ID async function getAwsAccountId(region) { try { - const stsClient = new STSClient({ + const stsClient = new STSClient({ region, customUserAgent: `LambdaGitHubAction/${version}` }); diff --git a/index.js b/index.js index e07cf12c..d61c9281 100644 --- a/index.js +++ b/index.js @@ -2,13 +2,13 @@ const core = require('@actions/core'); const { LambdaClient, CreateFunctionCommand, GetFunctionConfigurationCommand, UpdateFunctionConfigurationCommand, UpdateFunctionCodeCommand, waitUntilFunctionUpdated } = require('@aws-sdk/client-lambda'); const { S3Client, PutObjectCommand, CreateBucketCommand, HeadBucketCommand, PutBucketEncryptionCommand, PutPublicAccessBlockCommand, PutBucketVersioningCommand} = require('@aws-sdk/client-s3'); const { STSClient, GetCallerIdentityCommand } = require('@aws-sdk/client-sts'); -const fs = require('fs/promises'); +const fs = require('fs/promises'); const path = require('path'); const AdmZip = require('adm-zip'); const validations = require('./validations'); const { version } = require('./package.json'); async function run() { - try { + try { // Receiving and validating inputs const inputs = validations.validateAllInputs(); @@ -20,11 +20,11 @@ async function run() { functionName, codeArtifactsDir, ephemeralStorage, parsedMemorySize, timeout, role, codeSigningConfigArn, kmsKeyArn, sourceKmsKeyArn, - environment, vpcConfig, deadLetterConfig, tracingConfig, - layers, fileSystemConfigs, imageConfig, snapStart, + environment, vpcConfig, deadLetterConfig, tracingConfig, + layers, fileSystemConfigs, imageConfig, snapStart, loggingConfig, tags, - parsedEnvironment, parsedVpcConfig, parsedDeadLetterConfig, - parsedTracingConfig, parsedLayers, parsedFileSystemConfigs, + parsedEnvironment, parsedVpcConfig, parsedDeadLetterConfig, + parsedTracingConfig, parsedLayers, parsedFileSystemConfigs, parsedImageConfig, parsedSnapStart, parsedLoggingConfig, parsedTags, functionDescription, dryRun, publish, revisionId, runtime, handler, architectures @@ -41,7 +41,7 @@ async function run() { region, customUserAgent: customUserAgentString }); - + // Handling S3 Buckets const { s3Bucket, useS3Method } = inputs; let s3Key = inputs.s3Key; @@ -51,19 +51,17 @@ async function run() { } // Determine if function exists - let functionExists; - if (!dryRun) { - core.info(`Checking if ${functionName} exists`); - functionExists = await checkFunctionExists(client, functionName); - } + core.info(`Checking if ${functionName} exists`); + let functionExists = await checkFunctionExists(client, functionName); + if (dryRun) { core.info('DRY RUN MODE: No AWS resources will be created or modified'); if (!functionExists) { core.setFailed('DRY RUN MODE can only be used for updating function code of existing functions'); - return; + return; } } - + // Creating zip file core.info(`Packaging code artifacts from ${codeArtifactsDir}`); let finalZipPath = await packageCodeArtifacts(codeArtifactsDir); @@ -111,7 +109,7 @@ async function run() { if (dryRun) { core.info('[DRY RUN] Configuration updates are not simulated in dry run mode'); return; - } + } await updateFunctionConfiguration(client, { functionName, @@ -162,7 +160,7 @@ async function run() { }); core.info('Lambda function deployment completed successfully'); - + } catch (error) { if (error.name === 'ThrottlingException' || error.name === 'TooManyRequestsException' || error.$metadata?.httpStatusCode === 429) { core.setFailed(`Rate limit exceeded and maximum retries reached: ${error.message}`); @@ -183,45 +181,45 @@ async function run() { async function packageCodeArtifacts(artifactsDir) { const tempDir = path.join(require('os').tmpdir(), `lambda-temp-${Date.now()}`); const zipPath = path.join(require('os').tmpdir(), `lambda-function-${Date.now()}.zip`); - + try { try { await fs.rm(tempDir, { recursive: true, force: true }); } catch (error) { } - + await fs.mkdir(tempDir, { recursive: true }); const workingDir = process.cwd(); - + if (!artifactsDir) { throw new Error('Code artifacts directory path must be provided'); } - + const resolvedArtifactsDir = validations.validateAndResolvePath(artifactsDir, workingDir); - + core.info(`Copying artifacts from ${resolvedArtifactsDir} to ${tempDir}`); - + try { await fs.access(resolvedArtifactsDir); } catch (error) { throw new Error(`Code artifacts directory '${resolvedArtifactsDir}' does not exist or is not accessible: ${error.message}`); } - + const sourceFiles = await fs.readdir(resolvedArtifactsDir); - + if (sourceFiles.length === 0) { throw new Error(`Code artifacts directory '${resolvedArtifactsDir}' is empty, no files to package`); } - + core.info(`Found ${sourceFiles.length} files/directories to copy`); - + for (const file of sourceFiles) { const sourcePath = path.join(resolvedArtifactsDir, file); const destPath = path.join(tempDir, file); - + core.info(`Copying ${sourcePath} to ${destPath}`); - + await fs.cp( sourcePath, destPath, @@ -231,12 +229,12 @@ async function packageCodeArtifacts(artifactsDir) { core.info('Creating ZIP file with standard options'); const zip = new AdmZip(); - + const tempFiles = await fs.readdir(tempDir, { withFileTypes: true }); - + for (const file of tempFiles) { const fullPath = path.join(tempDir, file.name); - + if (file.isDirectory()) { core.info(`Adding directory: ${file.name}`); zip.addLocalFolder(fullPath, file.name); @@ -245,17 +243,17 @@ async function packageCodeArtifacts(artifactsDir) { zip.addLocalFile(fullPath); } } - + core.info('Writing ZIP file with standard options'); zip.writeZip(zipPath); - + try { const stats = await fs.stat(zipPath); core.info(`Generated ZIP file size: ${stats.size} bytes`); - + const verifyZip = new AdmZip(zipPath); const entries = verifyZip.getEntries(); - + core.info(`ZIP verification passed - contains ${entries.length} entries:`); for (let i = 0; i < entries.length; i++) { core.info(` ${i+1}. ${entries[i].entryName} (${entries[i].header.size} bytes)`); @@ -292,7 +290,7 @@ async function checkFunctionExists(client, functionName) { // Helper functions for creating Lambda function async function createFunction(client, inputs, functionExists) { const { - functionName, region, finalZipPath, dryRun, role, s3Bucket, s3Key, + functionName, region, finalZipPath, dryRun, role, s3Bucket, s3Key, sourceKmsKeyArn, runtime, handler, functionDescription, parsedMemorySize, timeout, publish, architectures, ephemeralStorage, revisionId, vpcConfig, parsedEnvironment, deadLetterConfig, tracingConfig, @@ -301,11 +299,11 @@ async function createFunction(client, inputs, functionExists) { parsedTracingConfig, parsedLayers, parsedFileSystemConfigs, parsedImageConfig, parsedSnapStart, parsedLoggingConfig, parsedTags } = inputs; - + if (!functionExists) { if (dryRun) { core.setFailed('DRY RUN MODE can only be used for updating function code of existing functions'); - return; + return; } core.info(`Function ${functionName} doesn't exist, creating new function`); @@ -324,7 +322,7 @@ async function createFunction(client, inputs, functionExists) { try { await uploadToS3(finalZipPath, s3Bucket, s3Key, region); core.info(`Successfully uploaded package to S3: s3://${s3Bucket}/${s3Key}`); - + codeParameter = { S3Bucket: s3Bucket, S3Key: s3Key, @@ -335,13 +333,13 @@ async function createFunction(client, inputs, functionExists) { if (error.stack) { core.debug(error.stack); } - throw error; + throw error; } } else { try { const zipFileContent = await fs.readFile(finalZipPath); core.info(`Zip file read successfully, size: ${zipFileContent.length} bytes`); - + codeParameter = { ZipFile: zipFileContent, ...(sourceKmsKeyArn && { SourceKmsKeyArn: sourceKmsKeyArn }) @@ -356,7 +354,7 @@ async function createFunction(client, inputs, functionExists) { if (error.stack) { core.debug(error.stack); } - throw error; + throw error; } } @@ -390,14 +388,14 @@ async function createFunction(client, inputs, functionExists) { core.info(`Creating new Lambda function: ${functionName}`); const command = new CreateFunctionCommand(input); const response = await client.send(command); - + core.setOutput('function-arn', response.FunctionArn); if (response.Version) { core.setOutput('version', response.Version); } - + core.info('Lambda function created successfully'); - + core.info(`Waiting for function ${functionName} to become active before proceeding`); await waitForFunctionActive(client, functionName); } catch (error) { @@ -410,49 +408,49 @@ async function createFunction(client, inputs, functionExists) { } else { core.setFailed(`Failed to create function: ${error.message}`); } - + if (error.stack) { core.debug(error.stack); } - throw error; + throw error; } } } async function waitForFunctionActive(client, functionName, waitForMinutes = 5) { const MAX_WAIT_MINUTES = 30; - + if (waitForMinutes > MAX_WAIT_MINUTES) { waitForMinutes = MAX_WAIT_MINUTES; core.info(`Wait time capped to maximum of ${MAX_WAIT_MINUTES} minutes`); } - + core.info(`Waiting for function ${functionName} to become active. Will wait for up to ${waitForMinutes} minutes`); - + const startTime = Date.now(); const maxWaitTimeMs = waitForMinutes * 60 * 1000; - const DELAY_BETWEEN_CHECKS_MS = 5000; + const DELAY_BETWEEN_CHECKS_MS = 5000; let lastState = null; - + while (Date.now() - startTime < maxWaitTimeMs) { try { const command = new GetFunctionConfigurationCommand({ FunctionName: functionName }); const response = await client.send(command); - + const currentState = response.State; - + if (currentState !== lastState) { core.info(`Function ${functionName} is in state: ${currentState}`); lastState = currentState; } - + if (currentState === 'Active') { core.info(`Function ${functionName} is now active`); return; } else if (currentState === 'Failed') { throw new Error(`Function ${functionName} deployment failed with reason: ${response.StateReason || 'Unknown reason'}`); } - + await new Promise(resolve => setTimeout(resolve, DELAY_BETWEEN_CHECKS_MS)); } catch (error) { if (error.name === 'ResourceNotFoundException') { @@ -465,7 +463,7 @@ async function waitForFunctionActive(client, functionName, waitForMinutes = 5) { } } } - + throw new Error(`Timed out waiting for function ${functionName} to become active after ${waitForMinutes} minutes`); } @@ -517,33 +515,33 @@ async function updateFunctionConfiguration(client, params) { } else { core.setFailed(`Failed to update function configuration: ${error.message}`); } - + if (error.stack) { core.debug(error.stack); } - throw error; + throw error; } } async function waitForFunctionUpdated(client, functionName, waitForMinutes = 5) { const MAX_WAIT_MINUTES = 30; - + if (waitForMinutes > MAX_WAIT_MINUTES) { waitForMinutes = MAX_WAIT_MINUTES; core.info(`Wait time capped to maximum of ${MAX_WAIT_MINUTES} minutes`); } - + core.info(`Waiting for function update to complete. Will wait for ${waitForMinutes} minutes`); - + try { await waitUntilFunctionUpdated({ client: client, - minDelay: 2, - maxWaitTime: waitForMinutes * 60, + minDelay: 2, + maxWaitTime: waitForMinutes * 60, }, { FunctionName: functionName }); - + core.info('Function update completed successfully'); } catch (error) { if (error.name === 'TimeoutError') { @@ -572,7 +570,7 @@ async function updateFunctionCode(client, params) { } = params; core.info(`Updating function code for ${functionName} with ${finalZipPath}`); - + try { const commonCodeParams = { FunctionName: functionName, @@ -581,15 +579,15 @@ async function updateFunctionCode(client, params) { ...(revisionId && { RevisionId: revisionId }), ...(sourceKmsKeyArn && { SourceKmsKeyArn: sourceKmsKeyArn }) }; - + let codeInput; - + if (useS3Method) { core.info(`Using S3 deployment method with bucket: ${s3Bucket}, key: ${s3Key}`); await uploadToS3(finalZipPath, s3Bucket, s3Key, region); core.info(`Successfully uploaded package to S3: s3://${s3Bucket}/${s3Key}`); - + codeInput = { ...commonCodeParams, S3Bucket: s3Bucket, @@ -597,7 +595,7 @@ async function updateFunctionCode(client, params) { }; } else { let zipFileContent; - + try { zipFileContent = await fs.readFile(finalZipPath); } catch (error) { @@ -608,22 +606,22 @@ async function updateFunctionCode(client, params) { } else if (error.code === 'EACCES') { core.error('Permission denied. Check file access permissions.'); } - + if (error.stack) { core.debug(error.stack); } - + throw error; } - + codeInput = { ...commonCodeParams, ZipFile: zipFileContent }; - + core.info(`Original buffer length: ${zipFileContent.length} bytes`); } - + if (dryRun) { core.info(`[DRY RUN] Performing dry-run function code update with parameters:`); const logInput = {...codeInput}; @@ -632,10 +630,10 @@ async function updateFunctionCode(client, params) { } core.info(JSON.stringify(logInput, null, 2)); codeInput.DryRun = true; - + const command = new UpdateFunctionCodeCommand(codeInput); const response = await client.send(command); - + core.info('[DRY RUN] Function code validation passed'); core.setOutput('function-arn', response.FunctionArn || `arn:aws:lambda:${region}:000000000000:function:${functionName}`); core.setOutput('version', response.Version || '$LATEST'); @@ -658,7 +656,7 @@ async function updateFunctionCode(client, params) { } else { core.setFailed(`Failed to update function code: ${error.message}`); } - + if (error.stack) { core.debug(error.stack); } @@ -674,7 +672,7 @@ async function hasConfigurationChanged(currentConfig, updatedConfig) { const cleanedUpdated = cleanNullKeys(updatedConfig) || {}; let hasChanged = false; - + for (const [key, value] of Object.entries(cleanedUpdated)) { if (value !== undefined) { if (!(key in currentConfig)) { @@ -682,7 +680,7 @@ async function hasConfigurationChanged(currentConfig, updatedConfig) { hasChanged = true; continue; } - + if (typeof value === 'object' && value !== null) { if (!deepEqual(currentConfig[key] || {}, value)) { core.info(`Configuration difference detected in ${key}`); @@ -711,11 +709,11 @@ function isEmptyValue(value) { if ('SubnetIds' in value || 'SecurityGroupIds' in value) { return false; } - return Object.keys(value).length === 0 || + return Object.keys(value).length === 0 || Object.values(value).every(val => isEmptyValue(val)); } - return false; + return false; } function cleanNullKeys(obj) { @@ -728,7 +726,7 @@ function cleanNullKeys(obj) { } const isVpcConfig = obj && typeof obj === 'object' && ('SubnetIds' in obj || 'SecurityGroupIds' in obj); - + if (Array.isArray(obj)) { const filtered = obj.filter(item => !isEmptyValue(item)); return filtered.length > 0 ? filtered : undefined; @@ -746,7 +744,7 @@ function cleanNullKeys(obj) { } if (value === null || value === undefined || value === '') { - continue; + continue; } const cleaned = cleanNullKeys(value); @@ -759,45 +757,45 @@ function cleanNullKeys(obj) { return hasProperties ? result : undefined; } - return obj; + return obj; } function deepEqual(obj1, obj2) { if (obj1 === null || obj2 === null || typeof obj1 !== 'object' || typeof obj2 !== 'object') { return obj1 === obj2; } - + if (Array.isArray(obj1) && Array.isArray(obj2)) { if (obj1.length !== obj2.length) { return false; } - + for (let i = 0; i < obj1.length; i++) { if (!deepEqual(obj1[i], obj2[i])) { return false; } } - + return true; } - + if (Array.isArray(obj1) !== Array.isArray(obj2)) { return false; } - + const keys1 = Object.keys(obj1); const keys2 = Object.keys(obj2); - + if (keys1.length !== keys2.length) { return false; } - + for (const key of keys1) { if (!keys2.includes(key) || !deepEqual(obj1[key], obj2[key])) { return false; } } - + return true; } @@ -805,18 +803,18 @@ function deepEqual(obj1, obj2) { function generateS3Key(functionName) { const date = new Date(); const timestamp = date.toISOString().replace(/[:.]/g, '-').replace('T', '-').split('Z')[0]; - + let commitHash = ''; if (process.env.GITHUB_SHA) { commitHash = `-${process.env.GITHUB_SHA.substring(0, 7)}`; } - + return `lambda-deployments/${functionName}/${timestamp}${commitHash}.zip`; } async function checkBucketExists(s3Client, bucketName) { try { - const command = new HeadBucketCommand({ + const command = new HeadBucketCommand({ Bucket: bucketName }); await s3Client.send(command); @@ -836,7 +834,7 @@ async function checkBucketExists(s3Client, bucketName) { statusCode: error.$metadata?.httpStatusCode, requestId: error.$metadata?.requestId })}`); - + if (error.$metadata?.httpStatusCode === 301) { core.error(`REGION MISMATCH ERROR: The bucket "${bucketName}" exists but in a different region than specified (${s3Client.config.region}). S3 buckets are global but region-specific. `); throw new Error(`Bucket "${bucketName}" exists in a different region than ${s3Client.config.region}`); @@ -849,12 +847,12 @@ async function checkBucketExists(s3Client, bucketName) { async function createBucket(s3Client, bucketName, region) { core.info(`Creating S3 bucket: ${bucketName}`); - + try { if (!validateBucketName(bucketName)) { throw new Error(`Invalid bucket name: "${bucketName}". Bucket names must be 3-63 characters, lowercase, start/end with a letter/number, and contain only letters, numbers, dots, and hyphens.`); } - + const input = { Bucket: bucketName, }; @@ -867,12 +865,12 @@ async function createBucket(s3Client, bucketName, region) { core.info(`Sending CreateBucket request for bucket: ${bucketName} in region: ${region || 'default'}`); const command = new CreateBucketCommand(input); - + try { const response = await s3Client.send(command); core.info(`Successfully created S3 bucket: ${bucketName}`); core.info(`Bucket location: ${response.Location}`); - + // Apply security configurations after bucket creation try { core.info(`Configuring public access block for bucket: ${bucketName}`); @@ -885,7 +883,7 @@ async function createBucket(s3Client, bucketName, region) { RestrictPublicBuckets: true } })); - + core.info(`Enabling default encryption for bucket: ${bucketName}`); await s3Client.send(new PutBucketEncryptionCommand({ Bucket: bucketName, @@ -900,7 +898,7 @@ async function createBucket(s3Client, bucketName, region) { ] } })); - + core.info(`Enabling versioning for bucket: ${bucketName}`); await s3Client.send(new PutBucketVersioningCommand({ Bucket: bucketName, @@ -908,13 +906,13 @@ async function createBucket(s3Client, bucketName, region) { Status: 'Enabled' } })); - + core.info(`Security configurations successfully applied to bucket: ${bucketName}`); } catch (securityError) { core.warning(`Applied partial security settings to bucket. Some security features couldn't be enabled: ${securityError.message}`); core.debug(securityError.stack); } - + return true; } catch (sendError) { core.error(`Error creating bucket: ${sendError.name} - ${sendError.message}`); @@ -925,7 +923,7 @@ async function createBucket(s3Client, bucketName, region) { statusCode: sendError.$metadata?.httpStatusCode, requestId: sendError.$metadata?.requestId })}`); - + if (sendError.name === 'BucketAlreadyExists' || sendError.name === 'BucketAlreadyOwnedByYou') { core.error(`Bucket name ${bucketName} is already taken but may be owned by another account.`); throw sendError; @@ -945,33 +943,33 @@ async function createBucket(s3Client, bucketName, region) { function validateBucketName(name) { if (!name || typeof name !== 'string') return false; - + if (name.length < 3 || name.length > 63) return false; - + if (!/^[a-z0-9.-]+$/.test(name)) return false; - + if (!/^[a-z0-9].*[a-z0-9]$/.test(name)) return false; - + if (/^(\d{1,3}\.){3}\d{1,3}$/.test(name)) return false; - + if (/\.\./.test(name)) return false; - + if (/^xn--/.test(name)) return false; - + if (/^sthree-/.test(name)) return false; - + if (/^sthree-configurator/.test(name)) return false; - + if (/^amzn-s3-demo-bucket/.test(name)) return false; - + return true; } async function uploadToS3(zipFilePath, bucketName, s3Key, region) { core.info(`Uploading Lambda deployment package to S3: s3://${bucketName}/${s3Key}`); - + try { - const s3Client = new S3Client({ + const s3Client = new S3Client({ region, customUserAgent: `LambdaGitHubAction/${version}` }); @@ -981,14 +979,14 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { } catch (checkError) { core.error(`Failed to check if bucket exists: ${checkError.name} - ${checkError.message}`); core.error(`Error type: ${checkError.name}, Code: ${checkError.code}`); - + if (checkError.$metadata?.httpStatusCode === 403) { throw new Error(`Access denied to S3 bucket`); } else { throw checkError; } } - + if (!bucketExists) { core.info(`Bucket ${bucketName} does not exist. Attempting to create it...`); try { @@ -1003,7 +1001,7 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { message: bucketError.message, statusCode: bucketError.$metadata?.httpStatusCode })}`); - + if (bucketError.name === 'BucketAlreadyExists' || bucketError.name === 'BucketAlreadyOwnedByYou') { core.info(`Bucket name ${bucketName} is already taken. Please try a different name.`); } else if (bucketError.$metadata?.httpStatusCode === 403) { @@ -1022,10 +1020,10 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { } throw new Error(`Cannot access deployment package at ${zipFilePath}: ${fileError.message}`); } - + const fileContent = await fs.readFile(zipFilePath); core.info(`Read deployment package, size: ${fileContent.length} bytes`); - + try { expectedBucketOwner = await getAwsAccountId(region); @@ -1040,13 +1038,13 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { Body: fileContent, ExpectedBucketOwner: expectedBucketOwner }; - + core.info(`Sending PutObject request to S3 (bucket: ${bucketName}, key: ${s3Key})`); const command = new PutObjectCommand(input); const response = await s3Client.send(command); - + core.info(`S3 upload successful, file size: ${fileContent.length} bytes`); - + return { bucket: bucketName, key: s3Key, @@ -1061,16 +1059,16 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { statusCode: uploadError.$metadata?.httpStatusCode, requestId: uploadError.$metadata?.requestId })}`); - + if (uploadError.$metadata?.httpStatusCode === 403) { throw new Error('Access denied when uploading to S3. Ensure your IAM policy includes s3:PutObject permission.'); } throw uploadError; } - + } catch (error) { core.error(`S3 upload failed: ${error.name} - ${error.message}`); - + if (error.code === 'NoSuchBucket') { core.error(`Bucket ${bucketName} does not exist and could not be created automatically. Please create it manually or check your permissions.`); } else if (error.code === 'AccessDenied' || error.name === 'AccessDenied' || error.$metadata?.httpStatusCode === 403) { @@ -1085,7 +1083,7 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { core.error(`Invalid bucket name: ${bucketName}. Bucket names must follow S3 naming rules.`); core.error('See s3-troubleshooting.md for S3 bucket naming rules.'); } - + throw error; } } @@ -1093,7 +1091,7 @@ async function uploadToS3(zipFilePath, bucketName, s3Key, region) { // Helper function for retrieving AWS account ID async function getAwsAccountId(region) { try { - const stsClient = new STSClient({ + const stsClient = new STSClient({ region, customUserAgent: `LambdaGitHubAction/${version}` });