Skip to content

Commit

Permalink
Storage STG93 (#38969)
Browse files Browse the repository at this point in the history
* Adding new service version 2024-05-04 (2nd try) (#38255)

* file changes

* asset files

* fixing audience error test without instrument

* accidentally did not commit the change lol

* re-enabling ClientName in HandleItem (#38273)

* STG93 Adding rename support In List Range (Diff Range) (#38324)

* swagger

* hooking up

* test and recording

* style

* async test

* style

* changing get to is

* adding javadoc description and changing names

* STG93 Added ACL response headers and x-ms-upn request header (#38450)

* implementation

* reformatting context logic, acl test, unfunctional upn test

* edit to parseList in PathAccessControlEntry

* style

* made upn header appear in request and finished upn test

* fixing issue with pipeline

* making upn context adjustment its own method and editing its usage

* fixing pipeline again

* fixing upn test

* resolving comments

* Stg93 Snapshot management support via REST for NFS shares (Cleaned) (#38845)

* swagger and hookup

* everything

* style

* removing preprod from test environment suffix

* adding better descriptions

* renaming

* re-recording

* Revert "Stg93 Snapshot management support via REST for NFS shares (Cleaned) (…" (#38859)

This reverts commit 3b390e6.

* Api Review Changes (#38587)

* all api review changes

* syntax

* revisions

* removing imports

* removing overwrite variable

* CPKN async edits

* docs changes

* making getPropertiesWithResponse in sync and async private, fixing breaking change in sync

* style

* recording fixes

* re-recording a test and suppressing spotbugs errors
  • Loading branch information
ibrandes authored Apr 4, 2024
1 parent 33cc083 commit f10cd0f
Show file tree
Hide file tree
Showing 51 changed files with 1,103 additions and 69 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,15 @@ public void addPolicyForBlobServiceClientBuilder() {

HttpPipeline pipeline = blobServiceClient.getHttpPipeline();
assertTrue(pipeline.getPolicyCount() >= 10);
assertEquals(SleuthHttpPolicy.class, pipeline.getPolicy(6).getClass());
boolean sleuthPolicyFound = false;
int policyCount = pipeline.getPolicyCount();
for (int i = 0; i < policyCount; i++) {
if (SleuthHttpPolicy.class.equals(pipeline.getPolicy(i).getClass())) {
sleuthPolicyFound = true;
break;
}
}
assertTrue(sleuthPolicyFound);
}

@Test
Expand Down
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-blob/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "java",
"TagPrefix": "java/storage/azure-storage-blob",
"Tag": "java/storage/azure-storage-blob_622f991d7c"
"Tag": "java/storage/azure-storage-blob_7e4a0926e9"
}
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,12 @@ public enum BlobServiceVersion implements ServiceVersion {
/**
* Service version {@code 2024-02-04}.
*/
V2024_02_04("2024-02-04");
V2024_02_04("2024-02-04"),

/**
* Service version {@code 2024-05-04}.
*/
V2024_05_04("2024-05-04");

private final String version;

Expand All @@ -134,6 +139,6 @@ public String getVersion() {
* @return the latest {@link BlobServiceVersion}
*/
public static BlobServiceVersion getLatest() {
return V2024_02_04;
return V2024_05_04;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.HttpPipelineBuilder;
import com.azure.core.http.policy.AddDatePolicy;
import com.azure.core.http.policy.AddHeadersFromContextPolicy;
import com.azure.core.http.policy.AddHeadersPolicy;
import com.azure.core.http.policy.AzureSasCredentialPolicy;
import com.azure.core.http.policy.BearerTokenAuthenticationPolicy;
Expand Down Expand Up @@ -104,6 +105,8 @@ public static HttpPipeline buildPipeline(

policies.add(new AddDatePolicy());

policies.add(new AddHeadersFromContextPolicy());

// We need to place this policy right before the credential policy since headers may affect the string to sign
// of the request.
HttpHeaders headers = CoreUtils.createHttpHeadersFromClientOptions(clientOptions);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ protected BlobAsyncClientBase(HttpPipeline pipeline, String url, BlobServiceVers
*
* @return the encryption scope used for encryption.
*/
public String getEncryptionScope() {
protected String getEncryptionScope() {
if (encryptionScope == null) {
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import com.azure.storage.blob.sas.BlobServiceSasSignatureValues;
import com.azure.storage.blob.specialized.AppendBlobAsyncClient;
import com.azure.storage.blob.specialized.BlobAsyncClientBase;
import com.azure.storage.blob.specialized.BlobBaseTestHelper;
import com.azure.storage.blob.specialized.BlockBlobAsyncClient;
import com.azure.storage.blob.specialized.PageBlobAsyncClient;
import com.azure.storage.blob.specialized.PageBlobClient;
Expand Down Expand Up @@ -368,36 +369,40 @@ public void pageBlobClientBuilderCheck() {
.buildPageBlobAsyncClient());
}

//todo isbr getEncryptionScope is protected **
@Test
public void getEncryptionScopeClient() {
String newEncryptionScope = "newtestscope";

// when: "AppendBlob"
AppendBlobAsyncClient newCpknAppendBlob = cpknAppendBlob.getEncryptionScopeAsyncClient(newEncryptionScope);
assertInstanceOf(AppendBlobAsyncClient.class, newCpknAppendBlob);
assertNotEquals(cpknAppendBlob.getEncryptionScope(), newCpknAppendBlob.getEncryptionScope());
assertNotEquals(BlobBaseTestHelper.getEncryptionScope(cpknAppendBlob),
BlobBaseTestHelper.getEncryptionScope(newCpknAppendBlob));

// when: "BlockBlob"
BlockBlobAsyncClient newCpknBlockBlob = cpknBlockBlob.getEncryptionScopeAsyncClient(newEncryptionScope);
assertInstanceOf(BlockBlobAsyncClient.class, newCpknBlockBlob);
assertNotEquals(cpknBlockBlob.getEncryptionScope(), newCpknBlockBlob.getEncryptionScope());
assertNotEquals(BlobBaseTestHelper.getEncryptionScope(cpknBlockBlob),
BlobBaseTestHelper.getEncryptionScope(newCpknBlockBlob));

// when: "PageBlob"
PageBlobAsyncClient newCpknPageBlob = cpknPageBlob.getEncryptionScopeAsyncClient(newEncryptionScope);
assertInstanceOf(PageBlobAsyncClient.class, newCpknPageBlob);
assertNotEquals(cpknPageBlob.getEncryptionScope(), newCpknPageBlob.getEncryptionScope());
assertNotEquals(BlobBaseTestHelper.getEncryptionScope(cpknPageBlob),
BlobBaseTestHelper.getEncryptionScope(newCpknPageBlob));

// when: "BlobClient"
BlobAsyncClient cpkBlobClient = cpknContainer.getBlobAsyncClient(generateBlobName()); // Inherits container's CPK
BlobAsyncClient newCpknBlobClient = cpkBlobClient.getEncryptionScopeAsyncClient(newEncryptionScope);
assertInstanceOf(BlobAsyncClient.class, newCpknBlobClient);
assertNotEquals(cpkBlobClient.getEncryptionScope(), newCpknBlobClient.getEncryptionScope());
assertNotEquals(BlobBaseTestHelper.getEncryptionScope(cpkBlobClient),
BlobBaseTestHelper.getEncryptionScope(newCpknBlobClient));

// when: "BlobClientBase"
BlobAsyncClientBase newCpknBlobClientBase = ((BlobAsyncClientBase) cpkBlobClient)
.getEncryptionScopeAsyncClient(newEncryptionScope);
assertInstanceOf(BlobAsyncClientBase.class, newCpknBlobClientBase);
assertNotEquals(cpkBlobClient.getEncryptionScope(), newCpknBlobClientBase.getEncryptionScope());
assertNotEquals(BlobBaseTestHelper.getEncryptionScope(cpkBlobClient),
BlobBaseTestHelper.getEncryptionScope(newCpknBlobClientBase));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -892,10 +892,10 @@ public void storageAccountAudience() {

@Test
public void audienceError() {
AppendBlobAsyncClient aadBlob = new SpecializedBlobClientBuilder()
AppendBlobAsyncClient aadBlob = instrument(new SpecializedBlobClientBuilder()
.endpoint(bc.getBlobUrl())
.credential(new MockTokenCredential())
.audience(BlobAudience.createBlobServiceAccountAudience("badAudience"))
.audience(BlobAudience.createBlobServiceAccountAudience("badAudience")))
.buildAppendBlobAsyncClient();

StepVerifier.create(aadBlob.exists())
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.storage.blob.specialized;

public class BlobBaseTestHelper {
public static String getEncryptionScope(BlobAsyncClientBase baseClient) {
return baseClient.getEncryptionScope();
}
}
4 changes: 2 additions & 2 deletions sdk/storage/azure-storage-common/ci.system.properties
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
AZURE_LIVE_TEST_SERVICE_VERSION=V2024_02_04
AZURE_STORAGE_SAS_SERVICE_VERSION=2024-02-04
AZURE_LIVE_TEST_SERVICE_VERSION=V2024_05_04
AZURE_STORAGE_SAS_SERVICE_VERSION=2024-05-04
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ public final class Constants {
public static final String PROPERTY_AZURE_STORAGE_SAS_SERVICE_VERSION = "AZURE_STORAGE_SAS_SERVICE_VERSION";

public static final String SAS_SERVICE_VERSION = Configuration.getGlobalConfiguration()
.get(PROPERTY_AZURE_STORAGE_SAS_SERVICE_VERSION, "2024-02-04");
.get(PROPERTY_AZURE_STORAGE_SAS_SERVICE_VERSION, "2024-05-04");

private Constants() {
}
Expand Down Expand Up @@ -216,7 +216,7 @@ public static final class HeaderConstants {
* @deprecated For SAS Service Version use {@link Constants#SAS_SERVICE_VERSION}.
*/
@Deprecated
public static final String TARGET_STORAGE_VERSION = "2024-02-04";
public static final String TARGET_STORAGE_VERSION = "2024-05-04";

/**
* Error code returned from the service.
Expand Down
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-file-datalake/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "java",
"TagPrefix": "java/storage/azure-storage-file-datalake",
"Tag": "java/storage/azure-storage-file-datalake_b5a1b62d5c"
"Tag": "java/storage/azure-storage-file-datalake_bc901016df"
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
<suppress files="com.azure.storage.file.datalake.implementation.ServicesImpl.java" checks="MissingJavadocMethodCheck" />
<suppress files="com.azure.storage.file.datalake.implementation.models.InternalDataLakeFileOpenInputStreamResult.java" checks="MissingJavadocMethodCheck" />
<suppress files="com.azure.storage.file.datalake.implementation.util.AccessorUtility.java" checks="MissingJavadocMethodCheck" />
<suppress files="com.azure.storage.file.datalake.implementation.util.BuilderHelper.java" checks="MissingJavadocMethodCheck" />
<suppress files="com.azure.storage.file.datalake.implementation.util.DataLakeImplUtils.java" checks="MissingJavadocMethodCheck" />
<suppress files="com.azure.storage.file.datalake.implementation.util.DataLakeSasImplUtil.java" checks="MissingJavadocMethodCheck" />
<suppress files="com.azure.storage.file.datalake.implementation.util.ModelHelper.java" checks="MissingJavadocMethodCheck" />
Expand Down
4 changes: 4 additions & 0 deletions sdk/storage/azure-storage-file-datalake/spotbugs-exclude.xml
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,11 @@
<Class name="com.azure.storage.file.datalake.options.DataLakePathDeleteOptions" />
<Class name="com.azure.storage.file.datalake.options.FileParallelUploadOptions" />
<Class name="com.azure.storage.file.datalake.options.FileQueryOptions" />
<Class name="com.azure.storage.file.datalake.options.PathGetPropertiesOptions" />
<Class name="com.azure.storage.file.datalake.options.PathRemoveAccessControlRecursiveOptions" />
<Class name="com.azure.storage.file.datalake.options.PathSetAccessControlRecursiveOptions" />
<Class name="com.azure.storage.file.datalake.options.PathUpdateAccessControlRecursiveOptions" />
<Class name="com.azure.storage.file.datalake.options.ReadToFileOptions" />
</Or>
</Match>
<Match>
Expand Down Expand Up @@ -64,9 +66,11 @@
<Class name="com.azure.storage.file.datalake.options.DataLakePathDeleteOptions" />
<Class name="com.azure.storage.file.datalake.options.FileParallelUploadOptions" />
<Class name="com.azure.storage.file.datalake.options.FileQueryOptions" />
<Class name="com.azure.storage.file.datalake.options.PathGetPropertiesOptions" />
<Class name="com.azure.storage.file.datalake.options.PathRemoveAccessControlRecursiveOptions" />
<Class name="com.azure.storage.file.datalake.options.PathSetAccessControlRecursiveOptions" />
<Class name="com.azure.storage.file.datalake.options.PathUpdateAccessControlRecursiveOptions" />
<Class name="com.azure.storage.file.datalake.options.ReadToFileOptions" />
</Or>
</Match>
<Match>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import com.azure.storage.file.datalake.implementation.models.ModifiedAccessConditions;
import com.azure.storage.file.datalake.implementation.models.PathExpiryOptions;
import com.azure.storage.file.datalake.implementation.models.PathResourceType;
import com.azure.storage.file.datalake.implementation.util.BuilderHelper;
import com.azure.storage.file.datalake.implementation.util.DataLakeImplUtils;
import com.azure.storage.file.datalake.implementation.util.ModelHelper;
import com.azure.storage.file.datalake.models.CustomerProvidedKey;
Expand All @@ -55,6 +56,7 @@
import com.azure.storage.file.datalake.options.FileParallelUploadOptions;
import com.azure.storage.file.datalake.options.FileQueryOptions;
import com.azure.storage.file.datalake.options.FileScheduleDeletionOptions;
import com.azure.storage.file.datalake.options.ReadToFileOptions;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuples;
Expand Down Expand Up @@ -1465,6 +1467,32 @@ public Mono<PathProperties> readToFile(String filePath) {
return readToFile(filePath, false);
}

/**
* Reads the entire file into a file specified by the path.
*
* <p>The file will be created and must not exist, if the file already exists a {@link FileAlreadyExistsException}
* will be thrown.</p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileAsyncClient.readToFile#ReadToFileOptions -->
* <pre>
* client.readToFile&#40;new ReadToFileOptions&#40;&#41;.setFilePath&#40;file&#41;&#41;
* .subscribe&#40;response -&gt; System.out.println&#40;&quot;Completed download to file&quot;&#41;&#41;;
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileAsyncClient.readToFile#ReadToFileOptions -->
*
* <p>For more information, see the
* <a href="https://docs.microsoft.com/rest/api/storageservices/get-blob">Azure Docs</a></p>
*
* @param options {@link ReadToFileOptions}
* @return A reactive response containing the file properties and metadata.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PathProperties> readToFile(ReadToFileOptions options) {
return readToFileWithResponse(options).flatMap(FluxUtil::toMono);
}

/**
* Reads the entire file into a file specified by the path.
*
Expand Down Expand Up @@ -1551,6 +1579,54 @@ public Mono<Response<PathProperties>> readToFileWithResponse(String filePath, Fi
.map(response -> new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response)));
}

/**
* Reads the entire file into a file specified by the path.
*
* <p>By default the file will be created and must not exist, if the file already exists a
* {@link FileAlreadyExistsException} will be thrown. To override this behavior, provide appropriate
* {@link OpenOption OpenOptions} </p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileAsyncClient.readToFileWithResponse#ReadToFileOptions -->
* <pre>
* ReadToFileOptions options = new ReadToFileOptions&#40;&#41;;
* options.setFilePath&#40;file&#41;;
* options.setRange&#40;new FileRange&#40;1024, 2048L&#41;&#41;;
* options.setDownloadRetryOptions&#40;new DownloadRetryOptions&#40;&#41;.setMaxRetryRequests&#40;5&#41;&#41;;
* options.setOpenOptions&#40;new HashSet&lt;&gt;&#40;Arrays.asList&#40;StandardOpenOption.CREATE_NEW,
* StandardOpenOption.WRITE, StandardOpenOption.READ&#41;&#41;&#41;; &#47;&#47;Default options
* options.setParallelTransferOptions&#40;new ParallelTransferOptions&#40;&#41;.setBlockSizeLong&#40;4L * Constants.MB&#41;&#41;;
* options.setDataLakeRequestConditions&#40;null&#41;;
* options.setRangeGetContentMd5&#40;false&#41;;
*
* client.readToFileWithResponse&#40;options&#41;
* .subscribe&#40;response -&gt; System.out.println&#40;&quot;Completed download to file&quot;&#41;&#41;;
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileAsyncClient.readToFileWithResponse#ReadToFileOptions -->
*
* <p>For more information, see the
* <a href="https://docs.microsoft.com/rest/api/storageservices/get-blob">Azure Docs</a></p>
*
* @param options {@link ReadToFileOptions}
* @return A reactive response containing the file properties and metadata.
* @throws IllegalArgumentException If {@code blockSize} is less than 0 or greater than 100MB.
* @throws UncheckedIOException If an I/O error occurs.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<PathProperties>> readToFileWithResponse(ReadToFileOptions options) {
Context context = BuilderHelper.addUpnHeader(() -> (options == null) ? null : options.isUserPrincipalName(), null);

return blockBlobAsyncClient.downloadToFileWithResponse(new BlobDownloadToFileOptions(options.getFilePath())
.setRange(Transforms.toBlobRange(options.getRange())).setParallelTransferOptions(options.getParallelTransferOptions())
.setDownloadRetryOptions(Transforms.toBlobDownloadRetryOptions(options.getDownloadRetryOptions()))
.setRequestConditions(Transforms.toBlobRequestConditions(options.getDataLakeRequestConditions()))
.setRetrieveContentRangeMd5(options.isRangeGetContentMd5()).setOpenOptions(options.getOpenOptions()))
.contextWrite(FluxUtil.toReactorContext(context))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException)
.map(response -> new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response)));
}

/**
* Moves the file to another location within the file system.
* For more information see the
Expand Down
Loading

0 comments on commit f10cd0f

Please sign in to comment.