From b124f0591b1490d50825c8eee692c5681a1ab68a Mon Sep 17 00:00:00 2001 From: Manika Joshi Date: Wed, 19 Mar 2025 02:50:40 -0700 Subject: [PATCH 1/7] UDS support for FNS-Blob --- .../fs/azurebfs/AzureBlobFileSystem.java | 2 + .../azurebfs/extensions/SASTokenProvider.java | 6 ++- .../fs/azurebfs/services/AbfsBlobClient.java | 39 ++++++++++--------- .../azurebfs/services/AbfsRestOperation.java | 4 ++ ...ITestAzureBlobFileSystemDelegationSAS.java | 10 ++++- .../utils/DelegationSASGenerator.java | 7 ++++ 6 files changed, 47 insertions(+), 21 deletions(-) diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java index e45603c278d25..c676b6f9f7a03 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java @@ -120,6 +120,7 @@ import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_STANDARD_OPTIONS; import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.*; import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.CPK_IN_NON_HNS_ACCOUNT_ERROR_MESSAGE; +import static org.apache.hadoop.fs.azurebfs.constants.AbfsServiceType.DFS; import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.DATA_BLOCKS_BUFFER; import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_IS_HNS_ENABLED; import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_BLOCK_UPLOAD_ACTIVE_BLOCKS; @@ -250,6 +251,7 @@ public void initialize(URI uri, Configuration configuration) try { if (abfsConfiguration.getAuthType(abfsConfiguration.getAccountName()) == AuthType.SAS && // Auth type is SAS !tryGetIsNamespaceEnabled(new TracingContext(initFSTracingContext)) && // Account is FNS + abfsConfiguration.getFsConfiguredServiceType() == DFS && // Service type is DFS !abfsConfiguration.isFixedSASTokenProviderConfigured()) { // Fixed SAS Token Provider is not configured throw new InvalidConfigurationValueException(FS_AZURE_SAS_FIXED_TOKEN, UNAUTHORIZED_SAS); } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/extensions/SASTokenProvider.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/extensions/SASTokenProvider.java index 6b506e53227a0..50d7df3159026 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/extensions/SASTokenProvider.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/extensions/SASTokenProvider.java @@ -33,6 +33,8 @@ public interface SASTokenProvider { String CHECK_ACCESS_OPERATION = "check-access"; + String COPY_BLOB_DST_OPERATION = "copy-blob-dst"; + String COPY_BLOB_SRC_OPERATION = "copy-blob-src"; String CREATE_DIRECTORY_OPERATION = "create-directory"; String CREATE_FILE_OPERATION = "create-file"; String DELETE_OPERATION = "delete"; @@ -40,7 +42,9 @@ public interface SASTokenProvider { String GET_ACL_OPERATION = "get-acl"; String GET_STATUS_OPERATION = "get-status"; String GET_PROPERTIES_OPERATION = "get-properties"; + String LEASE_BLOB_OPERATION = "lease-blob"; String LIST_OPERATION = "list"; + String LIST_OPERATION_BLOB = "list-blob"; String READ_OPERATION = "read"; String RENAME_SOURCE_OPERATION = "rename-source"; String RENAME_DESTINATION_OPERATION = "rename-destination"; @@ -49,8 +53,6 @@ public interface SASTokenProvider { String SET_PERMISSION_OPERATION = "set-permission"; String SET_PROPERTIES_OPERATION = "set-properties"; String WRITE_OPERATION = "write"; - // Generic HTTP operation can be used with FixedSASTokenProvider. - String FIXED_SAS_STORE_OPERATION = "fixed-sas"; /** * Initialize authorizer for Azure Blob File System. diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java index 2bc6397c369fd..0334345bcfebd 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java @@ -364,7 +364,7 @@ public AbfsRestOperation listPath(final String relativePath, final boolean recur abfsUriQueryBuilder.addQuery(QUERY_PARAM_DELIMITER, FORWARD_SLASH); } abfsUriQueryBuilder.addQuery(QUERY_PARAM_MAX_RESULTS, String.valueOf(listMaxResults)); - appendSASTokenToQuery(relativePath, SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilder); + appendSASTokenToQuery(relativePath, SASTokenProvider.LIST_OPERATION_BLOB, abfsUriQueryBuilder); final URL url = createRequestUrl(abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( @@ -539,11 +539,14 @@ public AbfsRestOperation createPathRestOp(final String path, final ContextEncryptionAdapter contextEncryptionAdapter, final TracingContext tracingContext) throws AzureBlobFileSystemException { final List requestHeaders = createDefaultHeaders(); + final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); if (isFile) { addEncryptionKeyRequestHeaders(path, requestHeaders, true, contextEncryptionAdapter, tracingContext); + appendSASTokenToQuery(path, SASTokenProvider.CREATE_FILE_OPERATION, abfsUriQueryBuilder); } else { requestHeaders.add(new AbfsHttpHeader(X_MS_META_HDI_ISFOLDER, TRUE)); + appendSASTokenToQuery(path, SASTokenProvider.CREATE_DIRECTORY_OPERATION, abfsUriQueryBuilder); } requestHeaders.add(new AbfsHttpHeader(CONTENT_LENGTH, ZERO)); if (isAppendBlob) { @@ -558,9 +561,6 @@ public AbfsRestOperation createPathRestOp(final String path, requestHeaders.add(new AbfsHttpHeader(HttpHeaderConfigurations.IF_MATCH, eTag)); } - final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); - appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilder); - final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( AbfsRestOperationType.PutBlob, @@ -682,7 +682,7 @@ public AbfsRestOperation acquireLease(final String path, final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, LEASE); - appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilder); + appendSASTokenToQuery(path, SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( @@ -710,7 +710,7 @@ public AbfsRestOperation renewLease(final String path, final String leaseId, final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, LEASE); - appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilder); + appendSASTokenToQuery(path, org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( @@ -738,7 +738,7 @@ public AbfsRestOperation releaseLease(final String path, final String leaseId, final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, LEASE); - appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilder); + appendSASTokenToQuery(path, org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( @@ -765,7 +765,7 @@ public AbfsRestOperation breakLease(final String path, final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, LEASE); - appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilder); + appendSASTokenToQuery(path, org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( @@ -813,12 +813,15 @@ destination, sourceEtag, isAtomicRenameKey(source), tracingContext if (blobRenameHandler.execute()) { final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); + appendSASTokenToQuery(source, SASTokenProvider.RENAME_SOURCE_OPERATION, + abfsUriQueryBuilder); final URL url = createRequestUrl(destination, abfsUriQueryBuilder.toString()); final List requestHeaders = createDefaultHeaders(); final AbfsRestOperation successOp = getSuccessOp( AbfsRestOperationType.RenamePath, HTTP_METHOD_PUT, url, requestHeaders); + successOp.setMask(); return new AbfsClientRenameResult(successOp, true, false); } else { throw new AbfsRestOperationException(HTTP_INTERNAL_ERROR, @@ -886,7 +889,7 @@ public AbfsRestOperation append(final String path, abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, BLOCK); abfsUriQueryBuilder.addQuery(QUERY_PARAM_BLOCKID, reqParams.getBlockId()); - String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, + String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.WRITE_OPERATION, abfsUriQueryBuilder, cachedSasToken); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); @@ -959,7 +962,7 @@ public AbfsRestOperation appendBlock(final String path, } final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, APPEND_BLOCK); - String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilder); + String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.WRITE_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( @@ -1051,7 +1054,7 @@ public AbfsRestOperation flush(byte[] buffer, final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, BLOCKLIST); abfsUriQueryBuilder.addQuery(QUERY_PARAM_CLOSE, String.valueOf(isClose)); - String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, + String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.WRITE_OPERATION, abfsUriQueryBuilder, cachedSasToken); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); @@ -1113,7 +1116,7 @@ public AbfsRestOperation setPathProperties(final String path, AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, METADATA); - appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilder); + appendSASTokenToQuery(path, SASTokenProvider.SET_PROPERTIES_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( @@ -1194,7 +1197,7 @@ public AbfsRestOperation getPathStatus(final String path, final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(HttpQueryParams.QUERY_PARAM_UPN, String.valueOf(getAbfsConfiguration().isUpnUsed())); - appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, + appendSASTokenToQuery(path, SASTokenProvider.GET_PROPERTIES_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); @@ -1271,7 +1274,7 @@ public AbfsRestOperation read(final String path, } final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); - String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.FIXED_SAS_STORE_OPERATION, + String sasTokenForReuse = appendSASTokenToQuery(path, SASTokenProvider.READ_OPERATION, abfsUriQueryBuilder, cachedSasToken); URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); @@ -1433,7 +1436,7 @@ public AbfsRestOperation getBlockList(final String path, final List requestHeaders = createDefaultHeaders(); final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); - String operation = SASTokenProvider.FIXED_SAS_STORE_OPERATION; + String operation = SASTokenProvider.READ_OPERATION; appendSASTokenToQuery(path, operation, abfsUriQueryBuilder); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, BLOCKLIST); @@ -1471,9 +1474,9 @@ public AbfsRestOperation copyBlob(Path sourceBlobPath, String dstBlobRelativePath = destinationBlobPath.toUri().getPath(); String srcBlobRelativePath = sourceBlobPath.toUri().getPath(); appendSASTokenToQuery(dstBlobRelativePath, - SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilderDst); + SASTokenProvider.COPY_BLOB_DST_OPERATION, abfsUriQueryBuilderDst); appendSASTokenToQuery(srcBlobRelativePath, - SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilderSrc); + SASTokenProvider.COPY_BLOB_SRC_OPERATION, abfsUriQueryBuilderSrc); final URL url = createRequestUrl(dstBlobRelativePath, abfsUriQueryBuilderDst.toString()); final String sourcePathUrl = createRequestUrl(srcBlobRelativePath, @@ -1507,7 +1510,7 @@ public AbfsRestOperation deleteBlobPath(final Path blobPath, AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); String blobRelativePath = blobPath.toUri().getPath(); appendSASTokenToQuery(blobRelativePath, - SASTokenProvider.FIXED_SAS_STORE_OPERATION, abfsUriQueryBuilder); + SASTokenProvider.DELETE_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(blobRelativePath, abfsUriQueryBuilder.toString()); final List requestHeaders = createDefaultHeaders(); diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java index c019fcbc3d3a7..6520050f06c44 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java @@ -139,6 +139,10 @@ public AbfsHttpOperation getResult() { return result; } + public void setMask() { + result.setMaskForSAS(); + } + public void hardSetResult(int httpStatus) { result = AbfsHttpOperation.getAbfsHttpOperationWithFixedResult(this.url, this.method, httpStatus); diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java index 70e5b23eadd83..7a506ff31d680 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java @@ -91,7 +91,9 @@ public ITestAzureBlobFileSystemDelegationSAS() throws Exception { public void setup() throws Exception { isHNSEnabled = this.getConfiguration().getBoolean( TestConfigurationKeys.FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT, false); - Assume.assumeTrue(isHNSEnabled); + if(!isHNSEnabled){ + assumeBlobServiceType(); + } createFilesystemForSASTests(); super.setup(); } @@ -99,6 +101,7 @@ public void setup() throws Exception { @Test // Test filesystem operations access, create, mkdirs, setOwner, getFileStatus public void testCheckAccess() throws Exception { + assumeHnsEnabled(); final AzureBlobFileSystem fs = getFileSystem(); Path rootPath = new Path("/"); @@ -217,6 +220,7 @@ public void testReadAndWrite() throws Exception { @Test public void checkExceptionForRenameOverwrites() throws Exception { + assumeHnsEnabled(); final AzureBlobFileSystem fs = getFileSystem(); Path src = new Path("a/b/f1.txt"); @@ -315,6 +319,7 @@ public void testList() throws Exception { // Test filesystem operations setAcl, getAclStatus, removeAcl // setPermissions and getFileStatus public void testAcl() throws Exception { + assumeHnsEnabled(); final AzureBlobFileSystem fs = getFileSystem(); Path reqPath = new Path(UUID.randomUUID().toString()); @@ -344,6 +349,7 @@ public void testAcl() throws Exception { @Test // Test getFileStatus and getAclStatus operations on root path public void testRootPath() throws Exception { + assumeHnsEnabled(); final AzureBlobFileSystem fs = getFileSystem(); Path rootPath = new Path(AbfsHttpConstants.ROOT_PATH); @@ -443,6 +449,7 @@ null, getTestTracingContext(getFileSystem(), false), @Test // SetPermission should fail when saoid is not the owner and succeed when it is. public void testSetPermissionForNonOwner() throws Exception { + assumeHnsEnabled(); final AzureBlobFileSystem fs = getFileSystem(); Path rootPath = new Path("/"); @@ -478,6 +485,7 @@ public void testSetPermissionForNonOwner() throws Exception { @Test // Without saoid or suoid, setPermission should succeed with sp=p for a non-owner. public void testSetPermissionWithoutAgentForNonOwner() throws Exception { + assumeHnsEnabled(); final AzureBlobFileSystem fs = getFileSystem(); Path path = new Path(MockDelegationSASTokenProvider.NO_AGENT_PATH); fs.create(path).close(); diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java index 6f2209a6e8ced..2d5ab5070d54f 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java @@ -60,6 +60,8 @@ public String getDelegationSAS(String accountName, String containerName, String case SASTokenProvider.CREATE_DIRECTORY_OPERATION: case SASTokenProvider.WRITE_OPERATION: case SASTokenProvider.SET_PROPERTIES_OPERATION: + case SASTokenProvider.LEASE_BLOB_OPERATION: + case SASTokenProvider.COPY_BLOB_DST_OPERATION: sp = "w"; break; case SASTokenProvider.DELETE_OPERATION: @@ -75,11 +77,16 @@ public String getDelegationSAS(String accountName, String containerName, String case SASTokenProvider.GET_STATUS_OPERATION: sp = "e"; break; + case SASTokenProvider.LIST_OPERATION_BLOB: + sp = "l"; + sr="c"; + break; case SASTokenProvider.LIST_OPERATION: sp = "l"; break; case SASTokenProvider.GET_PROPERTIES_OPERATION: case SASTokenProvider.READ_OPERATION: + case SASTokenProvider.COPY_BLOB_SRC_OPERATION: sp = "r"; break; case SASTokenProvider.RENAME_DESTINATION_OPERATION: From dab40ab4812c44f83c67b3bae327e60f8e3032f9 Mon Sep 17 00:00:00 2001 From: Manika Joshi Date: Wed, 19 Mar 2025 03:34:04 -0700 Subject: [PATCH 2/7] changing error msg --- .../java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java index 511848c4d0a50..500f8d870bfbf 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java @@ -62,7 +62,7 @@ public final class AbfsErrors { /** * Exception message on filesystem init if token-provider-auth-type configs are provided */ - public static final String UNAUTHORIZED_SAS = "Incorrect SAS token provider configured for non-hierarchical namespace account."; + public static final String UNAUTHORIZED_SAS = "Incorrect SAS token provider configured for non-hierarchical namespace account with DFS service type."; public static final String ERR_RENAME_BLOB = "FNS-Blob rename was not successful for source and destination path: "; public static final String ERR_DELETE_BLOB = From 48b46cf273d7d2f003ca68d2b30aeb00b8e278e8 Mon Sep 17 00:00:00 2001 From: Manika Joshi Date: Wed, 19 Mar 2025 22:01:06 -0700 Subject: [PATCH 3/7] changing .md file --- hadoop-tools/hadoop-azure/src/site/markdown/abfs.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md index fdf366f95d34b..d5021291b8cdc 100644 --- a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md +++ b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md @@ -663,13 +663,13 @@ To know more about how SAS Authentication works refer to [Grant limited access to Azure Storage resources using shared access signatures (SAS)](https://learn.microsoft.com/en-us/azure/storage/common/storage-sas-overview) There are three types of SAS supported by Azure Storage: -- [User Delegation SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas): Recommended for use with ABFS Driver with HNS Enabled ADLS Gen2 accounts. It is Identity based SAS that works at blob/directory level) +- [User Delegation SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas): Recommended for use with ABFS Driver with HNS Enabled ADLS Gen2 accounts or HNS-Disabled Blob Storage accounts. It is Identity based SAS that works at blob/directory level) - [Service SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-service-sas): Global and works at container level. - [Account SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-account-sas): Global and works at account level. #### Known Issues With SAS -- SAS Based Authentication works only with HNS Enabled ADLS Gen2 Accounts which -is a recommended account type to be used with ABFS. +- SAS Based Authentication works with HNS Enabled ADLS Gen2 Accounts (which +is a recommended account type to be used with ABFS) and HNS-Disabled Blob Storage accounts. - Certain root level operations are known to fail with SAS Based Authentication. #### Using User Delegation SAS with ABFS @@ -737,7 +737,7 @@ the following configurations apart from above two: - **Security**: More secure than Shared Key and allows granting limited access to data without exposing the access key. Recommended to be used only with HNS Enabled, -ADLS Gen 2 storage accounts. +ADLS Gen 2 storage accounts or HNS-Disabled Blob Storage accounts. #### Using Account/Service SAS with ABFS From e1e66c8dcf7b766303759775b2a1002edf4e5844 Mon Sep 17 00:00:00 2001 From: Manika Joshi Date: Tue, 25 Mar 2025 01:27:35 -0700 Subject: [PATCH 4/7] changes --- .../apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java index 0334345bcfebd..37f926f1461c7 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java @@ -710,7 +710,7 @@ public AbfsRestOperation renewLease(final String path, final String leaseId, final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, LEASE); - appendSASTokenToQuery(path, org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); + appendSASTokenToQuery(path, SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( @@ -738,7 +738,7 @@ public AbfsRestOperation releaseLease(final String path, final String leaseId, final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, LEASE); - appendSASTokenToQuery(path, org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); + appendSASTokenToQuery(path, SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( @@ -765,7 +765,7 @@ public AbfsRestOperation breakLease(final String path, final AbfsUriQueryBuilder abfsUriQueryBuilder = createDefaultUriQueryBuilder(); abfsUriQueryBuilder.addQuery(QUERY_PARAM_COMP, LEASE); - appendSASTokenToQuery(path, org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); + appendSASTokenToQuery(path, SASTokenProvider.LEASE_BLOB_OPERATION, abfsUriQueryBuilder); final URL url = createRequestUrl(path, abfsUriQueryBuilder.toString()); final AbfsRestOperation op = getAbfsRestOperation( From 021abb2f7327f6f6e482f63c334fd559f5acb7a7 Mon Sep 17 00:00:00 2001 From: Manika Joshi Date: Mon, 14 Apr 2025 00:40:04 -0700 Subject: [PATCH 5/7] tests addition --- .../fs/azurebfs/AzureBlobFileSystem.java | 26 +++- .../fs/azurebfs/services/AbfsBlobClient.java | 1 - .../fs/azurebfs/services/AbfsErrors.java | 3 +- .../azurebfs/services/AbfsRestOperation.java | 4 - .../hadoop-azure/src/site/markdown/abfs.md | 10 +- ...ITestAzureBlobFileSystemDelegationSAS.java | 146 ++++++++++++++++-- .../utils/DelegationSASGenerator.java | 8 +- 7 files changed, 171 insertions(+), 27 deletions(-) diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java index c676b6f9f7a03..0f70781927124 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java @@ -18,6 +18,7 @@ package org.apache.hadoop.fs.azurebfs; +import javax.annotation.Nullable; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; @@ -41,7 +42,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -118,7 +118,22 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.IOSTATISTICS_LOGGING_LEVEL; import static org.apache.hadoop.fs.CommonConfigurationKeys.IOSTATISTICS_LOGGING_LEVEL_DEFAULT; import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_STANDARD_OPTIONS; -import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.*; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_APPEND; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_CREATE; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_CREATE_NON_RECURSIVE; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_DELETE; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_EXIST; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_GET_DELEGATION_TOKEN; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_GET_FILE_STATUS; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_LIST_STATUS; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_MKDIRS; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_OPEN; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_RENAME; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.DIRECTORIES_CREATED; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.DIRECTORIES_DELETED; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.ERROR_IGNORED; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.FILES_CREATED; +import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.FILES_DELETED; import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.CPK_IN_NON_HNS_ACCOUNT_ERROR_MESSAGE; import static org.apache.hadoop.fs.azurebfs.constants.AbfsServiceType.DFS; import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.DATA_BLOCKS_BUFFER; @@ -241,9 +256,10 @@ public void initialize(URI uri, Configuration configuration) /* * Validates if the correct SAS Token provider is configured for non-HNS accounts. - * For non-HNS accounts, if the authentication type is set to SAS, only a fixed SAS Token is supported as of now. - * A custom SAS Token Provider should not be configured in such cases, as it will override the FixedSASTokenProvider and render it unused. - * If the namespace is not enabled and the FixedSASTokenProvider is not configured, + * For non-HNS accounts with Blob endpoint, both fixed SAS Token and custom SAS Token provider are supported. + * For non-HNS accounts with DFS endpoint, if the authentication type is set to SAS, only fixed SAS Token is supported as of now. + * A custom SAS Token Provider should not be configured in this case as it will override the FixedSASTokenProvider and render it unused. + * If the namespace is not enabled and the FixedSASTokenProvider is not configured for non-HNS accounts with DFS endpoint, * an InvalidConfigurationValueException will be thrown. * * @throws InvalidConfigurationValueException if account is not namespace enabled and FixedSASTokenProvider is not configured. diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java index 37f926f1461c7..d1e26193c3fa4 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlobClient.java @@ -821,7 +821,6 @@ destination, sourceEtag, isAtomicRenameKey(source), tracingContext final AbfsRestOperation successOp = getSuccessOp( AbfsRestOperationType.RenamePath, HTTP_METHOD_PUT, url, requestHeaders); - successOp.setMask(); return new AbfsClientRenameResult(successOp, true, false); } else { throw new AbfsRestOperationException(HTTP_INTERNAL_ERROR, diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java index 500f8d870bfbf..0ff3ee3d1246d 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsErrors.java @@ -62,7 +62,8 @@ public final class AbfsErrors { /** * Exception message on filesystem init if token-provider-auth-type configs are provided */ - public static final String UNAUTHORIZED_SAS = "Incorrect SAS token provider configured for non-hierarchical namespace account with DFS service type."; + public static final String UNAUTHORIZED_SAS + = "Incorrect SAS token provider configured for non-hierarchical namespace account with DFS service type."; public static final String ERR_RENAME_BLOB = "FNS-Blob rename was not successful for source and destination path: "; public static final String ERR_DELETE_BLOB = diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java index 6520050f06c44..c019fcbc3d3a7 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java @@ -139,10 +139,6 @@ public AbfsHttpOperation getResult() { return result; } - public void setMask() { - result.setMaskForSAS(); - } - public void hardSetResult(int httpStatus) { result = AbfsHttpOperation.getAbfsHttpOperationWithFixedResult(this.url, this.method, httpStatus); diff --git a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md index d5021291b8cdc..baf3bc1a6b626 100644 --- a/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md +++ b/hadoop-tools/hadoop-azure/src/site/markdown/abfs.md @@ -663,13 +663,17 @@ To know more about how SAS Authentication works refer to [Grant limited access to Azure Storage resources using shared access signatures (SAS)](https://learn.microsoft.com/en-us/azure/storage/common/storage-sas-overview) There are three types of SAS supported by Azure Storage: -- [User Delegation SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas): Recommended for use with ABFS Driver with HNS Enabled ADLS Gen2 accounts or HNS-Disabled Blob Storage accounts. It is Identity based SAS that works at blob/directory level) + +- [User Delegation SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas): + Recommended for use with ABFS Driver with HNS Enabled ADLS Gen2 accounts. It + is Identity based SAS that works at blob/directory level) - [Service SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-service-sas): Global and works at container level. - [Account SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-account-sas): Global and works at account level. #### Known Issues With SAS - SAS Based Authentication works with HNS Enabled ADLS Gen2 Accounts (which -is a recommended account type to be used with ABFS) and HNS-Disabled Blob Storage accounts. + is a recommended account type to be used with ABFS). It is also supported with + non-HNS (FNS) Blob accounts. It is **NOT SUPPORTED** with FNS-DFS accounts. - Certain root level operations are known to fail with SAS Based Authentication. #### Using User Delegation SAS with ABFS @@ -737,7 +741,7 @@ the following configurations apart from above two: - **Security**: More secure than Shared Key and allows granting limited access to data without exposing the access key. Recommended to be used only with HNS Enabled, -ADLS Gen 2 storage accounts or HNS-Disabled Blob Storage accounts. +ADLS Gen 2 storage accounts. #### Using Account/Service SAS with ABFS diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java index 7a506ff31d680..ec5b8b93013a3 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java @@ -23,12 +23,14 @@ import java.nio.file.AccessDeniedException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Hashtable; import java.util.List; import java.util.UUID; import org.assertj.core.api.Assertions; import org.junit.Assume; import org.junit.Test; +import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,10 +42,14 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants; import org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys; +import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultEntrySchema; import org.apache.hadoop.fs.azurebfs.extensions.MockDelegationSASTokenProvider; +import org.apache.hadoop.fs.azurebfs.services.AbfsBlobClient; +import org.apache.hadoop.fs.azurebfs.services.AbfsClient; +import org.apache.hadoop.fs.azurebfs.services.AbfsHttpOperation; import org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation; import org.apache.hadoop.fs.azurebfs.services.AuthType; -import org.apache.hadoop.fs.azurebfs.services.AbfsHttpOperation; +import org.apache.hadoop.fs.azurebfs.utils.TracingContext; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclEntryScope; import org.apache.hadoop.fs.permission.AclStatus; @@ -417,16 +423,75 @@ public void testProperties() throws Exception { } @Test - public void testSignatureMask() throws Exception { + // FileSystemProperties are not supported by delegation SAS and should throw exception + public void testSetFileSystemProperties() throws Exception { final AzureBlobFileSystem fs = getFileSystem(); - String src = String.format("/testABC/test%s.xt", UUID.randomUUID()); - fs.create(new Path(src)).close(); - AbfsRestOperation abfsHttpRestOperation = fs.getAbfsClient() - .renamePath(src, "/testABC" + "/abc.txt", null, - getTestTracingContext(fs, false), null, - false) - .getOp(); - AbfsHttpOperation result = abfsHttpRestOperation.getResult(); + final Hashtable + properties = new Hashtable<>(); + properties.put("FileSystemProperties", "true"); + TracingContext tracingContext = getTestTracingContext(fs, true); + assertThrows(IOException.class, ()-> fs.getAbfsStore().setFilesystemProperties(properties, tracingContext)); + assertThrows(IOException.class, ()-> fs.getAbfsStore().getFilesystemProperties(tracingContext)); + } + + @Test + //Test list and delete operation on implicit paths + public void testListAndDeleteImplicitPaths() throws Exception { + AzureBlobFileSystem fs = getFileSystem(); + AbfsBlobClient client = ((AbfsBlobClient) getFileSystem().getAbfsClient()); + assumeBlobServiceType(); + + Path file1 = new Path("/testDir/dir1/file1"); + Path file2 = new Path("/testDir/dir1/file2"); + Path implicitDir = file1.getParent(); + + createAzCopyFolder(implicitDir); + createAzCopyFile(file1); + createAzCopyFile(file2); + + AbfsRestOperation op = client.listPath( + implicitDir.toString(), false, 2, null, + getTestTracingContext(getFileSystem(), true)); + List list = op.getResult() + .getListResultSchema() + .paths(); + Assertions.assertThat(list).hasSize(2); + + client.deletePath(implicitDir.toString(), true, "", + getTestTracingContext(fs, false)); + + Assertions.assertThat(fs.exists(file1)) + .describedAs("Deleted file1 should not exist.").isFalse(); + Assertions.assertThat(fs.exists(file2)) + .describedAs("Deleted file2 should not exist.").isFalse(); + Assertions.assertThat(fs.exists(implicitDir)) + .describedAs("The parent dir should not exist.") + .isFalse(); + } + + + /** + * Spies on the AzureBlobFileSystem's store and client to enable mocking and verification + * of client interactions in tests. It replaces the actual store and client with mocked versions. + * + * @param fs the AzureBlobFileSystem instance + * @return the spied AbfsClient for interaction verification + */ + private AbfsClient addSpyHooksOnClient(final AzureBlobFileSystem fs) { + AzureBlobFileSystemStore store = Mockito.spy(fs.getAbfsStore()); + Mockito.doReturn(store).when(fs).getAbfsStore(); + AbfsClient client = Mockito.spy(store.getClient()); + Mockito.doReturn(client).when(store).getClient(); + return client; + } + + /** + * Asserts the signature masking in the URL and encoded URL of the AbfsRestOperation. + * + * @param op the AbfsRestOperation + */ + private void checkSignatureMaskAssertions(AbfsRestOperation op){ + AbfsHttpOperation result = op.getResult(); String url = result.getMaskedUrl(); String encodedUrl = result.getMaskedEncodedUrl(); Assertions.assertThat(url.substring(url.indexOf("sig="))) @@ -437,6 +502,67 @@ public void testSignatureMask() throws Exception { .startsWith("sig%3DXXXXX"); } + @Test + // Test masking of signature for rename operation for Blob + public void testSignatureMaskforBlob() throws Exception { + assumeBlobServiceType(); + final AzureBlobFileSystem fs = Mockito.spy(this.getFileSystem()); + AbfsBlobClient client = (AbfsBlobClient) addSpyHooksOnClient(fs); + + fs.getAbfsStore().setClient(client); + String src = String.format("/testABC/test%s.xt", UUID.randomUUID()); + String dest = "/testABC" + "/abc.txt"; + fs.create(new Path(src)).close(); + + Mockito.doAnswer(answer -> { + Path srcCopy = answer.getArgument(0); + Path dstCopy = answer.getArgument(1); + String leaseId = answer.getArgument(2); + TracingContext tracingContext = answer.getArgument(3); + AbfsRestOperation op + = ((AbfsBlobClient) getFileSystem().getAbfsClient()).copyBlob(srcCopy, + dstCopy, leaseId, tracingContext); + checkSignatureMaskAssertions(op); + return answer.callRealMethod(); + }) + .when(client) + .copyBlob(Mockito.any(Path.class), Mockito.any(Path.class), + Mockito.any(String.class), Mockito.any(TracingContext.class)); + + Mockito.doAnswer(answer -> { + Path blobPath = answer.getArgument(0); + String leaseId = answer.getArgument(1); + TracingContext tracingContext = answer.getArgument(2); + AbfsRestOperation op + = ((AbfsBlobClient) getFileSystem().getAbfsClient()).deleteBlobPath(blobPath, + leaseId, tracingContext); + checkSignatureMaskAssertions(op); + return answer.callRealMethod(); + }) + .when(client) + .deleteBlobPath(Mockito.any(Path.class), Mockito.any(String.class), + Mockito.any(TracingContext.class)); + + client.renamePath(src, dest, null, + getTestTracingContext(fs, false), null, + false); + } + + // Test masking of signature for rename operation for DFS + @Test + public void testSignatureMask() throws Exception { + assumeDfsServiceType(); + final AzureBlobFileSystem fs = getFileSystem(); + String src = String.format("/testABC/test%s.xt", UUID.randomUUID()); + fs.create(new Path(src)).close(); + AbfsRestOperation abfsHttpRestOperation = fs.getAbfsClient() + .renamePath(src, "/testABC" + "/abc.txt", null, + getTestTracingContext(fs, false), null, + false) + .getOp(); + checkSignatureMaskAssertions(abfsHttpRestOperation); + } + @Test public void testSignatureMaskOnExceptionMessage() throws Exception { intercept(IOException.class, "sig=XXXX", diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java index 2d5ab5070d54f..4af6025be9d9b 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java @@ -70,7 +70,7 @@ public String getDelegationSAS(String accountName, String containerName, String case SASTokenProvider.DELETE_RECURSIVE_OPERATION: sp = "d"; sr = "d"; - sdd = Integer.toString(StringUtils.countMatches(path, "/")); + sdd = path.equals("/")? "0": Integer.toString(StringUtils.countMatches(path, "/")); break; case SASTokenProvider.CHECK_ACCESS_OPERATION: case SASTokenProvider.GET_ACL_OPERATION: @@ -79,10 +79,12 @@ public String getDelegationSAS(String accountName, String containerName, String break; case SASTokenProvider.LIST_OPERATION_BLOB: sp = "l"; - sr="c"; + sr = "c"; break; case SASTokenProvider.LIST_OPERATION: sp = "l"; + sr = "d"; + sdd = path.equals("/")? "0": Integer.toString(StringUtils.countMatches(path, "/")); break; case SASTokenProvider.GET_PROPERTIES_OPERATION: case SASTokenProvider.READ_OPERATION: @@ -196,4 +198,4 @@ private String computeSignatureForSAS(String sp, String st, String se, String sv LOG.debug("Delegation SAS stringToSign: " + stringToSign.replace("\n", ".")); return computeHmac256(stringToSign); } -} \ No newline at end of file +} From 6651d384539ce9f2df975c9c5d7fac834a93696f Mon Sep 17 00:00:00 2001 From: Manika Joshi Date: Thu, 17 Apr 2025 06:25:54 -0700 Subject: [PATCH 6/7] comment suggestions --- hadoop-tools/hadoop-azure/src/site/markdown/index.md | 5 +++-- .../azurebfs/ITestAzureBlobFileSystemDelegationSAS.java | 8 +++++++- .../hadoop/fs/azurebfs/utils/DelegationSASGenerator.java | 5 +++-- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/hadoop-tools/hadoop-azure/src/site/markdown/index.md b/hadoop-tools/hadoop-azure/src/site/markdown/index.md index 296e41047b72f..62eb9f1613b97 100644 --- a/hadoop-tools/hadoop-azure/src/site/markdown/index.md +++ b/hadoop-tools/hadoop-azure/src/site/markdown/index.md @@ -652,8 +652,9 @@ To know more about how SAS Authentication works refer to There are three types of SAS supported by Azure Storage: - [User Delegation SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas): - Recommended for use with ABFS Driver with HNS Enabled ADLS Gen2 accounts. It - is Identity based SAS that works at blob/directory level) + SAS-based authentication works with HNS-enabled ADLS Gen2 accounts + (recommended for use with ABFS) and is also supported with non-HNS (FNS) Blob + accounts. However, it is **NOT SUPPORTED** with FNS-DFS accounts. - [Service SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-service-sas): Global and works at container level. - [Account SAS](https://learn.microsoft.com/en-us/rest/api/storageservices/create-account-sas): Global and works at account level. diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java index 9c6721f644945..eaf5498b9e9b0 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java @@ -97,7 +97,7 @@ public ITestAzureBlobFileSystemDelegationSAS() throws Exception { public void setup() throws Exception { isHNSEnabled = this.getConfiguration().getBoolean( TestConfigurationKeys.FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT, false); - if(!isHNSEnabled){ + if (!isHNSEnabled) { assumeBlobServiceType(); } createFilesystemForSASTests(); @@ -308,6 +308,7 @@ public void testList() throws Exception { final AzureBlobFileSystem fs = getFileSystem(); Path dirPath = new Path(UUID.randomUUID().toString()); Path filePath = new Path(dirPath, UUID.randomUUID().toString()); + Path filePath2 = new Path(dirPath, UUID.randomUUID().toString()); fs.mkdirs(dirPath); @@ -316,6 +317,11 @@ public void testList() throws Exception { stream.writeBytes("hello"); } + // create file with content "bye" + try (FSDataOutputStream stream = fs.create(filePath2)) { + stream.writeBytes("bye"); + } + fs.listStatus(filePath); fs.listStatus(dirPath); fs.listStatus(new Path("/")); diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java index 4af6025be9d9b..eec0d86f0b6bb 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/utils/DelegationSASGenerator.java @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider; import org.apache.hadoop.fs.azurebfs.services.AbfsUriQueryBuilder; +import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.ROOT_PATH; /** * Test Delegation SAS generator. @@ -70,7 +71,7 @@ public String getDelegationSAS(String accountName, String containerName, String case SASTokenProvider.DELETE_RECURSIVE_OPERATION: sp = "d"; sr = "d"; - sdd = path.equals("/")? "0": Integer.toString(StringUtils.countMatches(path, "/")); + sdd = path.equals(ROOT_PATH)? "0": Integer.toString(StringUtils.countMatches(path, "/")); break; case SASTokenProvider.CHECK_ACCESS_OPERATION: case SASTokenProvider.GET_ACL_OPERATION: @@ -84,7 +85,7 @@ public String getDelegationSAS(String accountName, String containerName, String case SASTokenProvider.LIST_OPERATION: sp = "l"; sr = "d"; - sdd = path.equals("/")? "0": Integer.toString(StringUtils.countMatches(path, "/")); + sdd = path.equals(ROOT_PATH)? "0": Integer.toString(StringUtils.countMatches(path, "/")); break; case SASTokenProvider.GET_PROPERTIES_OPERATION: case SASTokenProvider.READ_OPERATION: From 82ef314cc4421166188219605041c39881932ea5 Mon Sep 17 00:00:00 2001 From: Manika Joshi Date: Wed, 23 Apr 2025 22:04:12 -0700 Subject: [PATCH 7/7] adding observations --- hadoop-tools/hadoop-azure/src/site/markdown/index.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/hadoop-tools/hadoop-azure/src/site/markdown/index.md b/hadoop-tools/hadoop-azure/src/site/markdown/index.md index dba572abd15e8..6695d814c9335 100644 --- a/hadoop-tools/hadoop-azure/src/site/markdown/index.md +++ b/hadoop-tools/hadoop-azure/src/site/markdown/index.md @@ -1469,7 +1469,12 @@ Once the above properties are configured, `hdfs dfs -ls abfs://container1@abfswa Following failures are known and expected to fail as of now. 1. AzureBlobFileSystem.setXAttr() and AzureBlobFileSystem.getXAttr() will fail when attempted on root ("/") path with `Operation failed: "The request URI is invalid.", HTTP 400 Bad Request` - +2. If you're using user-delegation SAS authentication: + - Listing operation for HNS accounts (on DFS endpoint) works with SAS token supporting either blob or directory + scopes (Signed Resource Type as Blob or Directory), + though it is intended to work only at the directory scope. It is a known bug. + - AzureBlobFileSystem.getFileStatus() is expected to fail at root ("/") path with + `Operation failed: "Server failed to authenticate the request.", HTTP 401 Unauthorized Error` ## Testing ABFS See the relevant section in [Testing Azure](testing_azure.html).