Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion NOTICE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ available from http://www.digip.org/jansson/.


AWS SDK for Java
Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Copyright 2010-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.

This product includes software developed by
Amazon Technologies, Inc (http://www.amazon.com/).
Expand Down
2 changes: 1 addition & 1 deletion hadoop-project/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@
<exec-maven-plugin.version>1.3.1</exec-maven-plugin.version>
<make-maven-plugin.version>1.0-beta-1</make-maven-plugin.version>
<surefire.fork.timeout>900</surefire.fork.timeout>
<aws-java-sdk.version>1.11.1026</aws-java-sdk.version>
<aws-java-sdk.version>1.12.132</aws-java-sdk.version>
<hsqldb.version>2.3.4</hsqldb.version>
<frontend-maven-plugin.version>1.11.2</frontend-maven-plugin.version>
<jasmine-maven-plugin.version>2.1</jasmine-maven-plugin.version>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1662,6 +1662,7 @@ to AWS services.
* Try and get other people, especially anyone with their own endpoints,
apps or different deployment environments, to run their own tests.
* Run the load tests, especially `ILoadTestS3ABulkDeleteThrottling`.
* Checkout cloudstore, build it against your version of hadoop, then use its CLI to run some commands (`storediag` etc)

### Dealing with Deprecated APIs and New Features

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.fs.s3a;

import com.amazonaws.regions.Regions;
import org.assertj.core.api.Assertions;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -27,6 +28,14 @@

import static org.apache.hadoop.test.LambdaTestUtils.intercept;

/**
* Verifies the mapping of ARN declaration of resource to the associated
* access point.
* The region mapping assertions have been brittle to changes across AWS SDK
* versions to only verify partial matches, rather than the FQDN of the
* endpoints.
*
*/
public class TestArnResource extends HadoopTestBase {
private final static Logger LOG = LoggerFactory.getLogger(TestArnResource.class);

Expand All @@ -37,10 +46,10 @@ public void parseAccessPointFromArn() throws IllegalArgumentException {
String accessPoint = "testAp";
String accountId = "123456789101";
String[][] regionPartitionEndpoints = new String[][] {
{Regions.EU_WEST_1.getName(), "aws", "s3-accesspoint.eu-west-1.amazonaws.com"},
{Regions.EU_WEST_1.getName(), "aws", "eu-west-1.amazonaws.com"},
{Regions.US_GOV_EAST_1.getName(), "aws-us-gov",
"s3-accesspoint.us-gov-east-1.amazonaws.com"},
{Regions.CN_NORTH_1.getName(), "aws-cn", "s3-accesspoint.cn-north-1.amazonaws.com.cn"},
"us-gov-east-1.amazonaws.com"},
{Regions.CN_NORTH_1.getName(), "aws-cn", "cn-north-1.amazonaws.com"},
};

for (String[] testPair : regionPartitionEndpoints) {
Expand All @@ -57,7 +66,9 @@ public void parseAccessPointFromArn() throws IllegalArgumentException {
assertEquals("Access Point name does not match", accessPoint, resource.getName());
assertEquals("Account Id does not match", accountId, resource.getOwnerAccountId());
assertEquals("Region does not match", region, resource.getRegion());
assertEquals("Endpoint does not match", endpoint, resource.getEndpoint());
Assertions.assertThat(resource.getEndpoint())
.describedAs("Endpoint does not match")
.contains(endpoint);
}
}

Expand Down