Skip to content

Commit

Permalink
Merge pull request Azure#405 from rickle-msft/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
rickle-msft authored Nov 16, 2018
2 parents 9df20cf + d0b4b88 commit 348cb4d
Show file tree
Hide file tree
Showing 5 changed files with 93 additions and 28 deletions.
3 changes: 3 additions & 0 deletions ChangeLog.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
XXXX.XX.XX Version XX.X.X
* Fixed a bug in hierarchical listings that would sometimes return incomplete results.

2018.10.29 Version 10.2.0
* Added overloads which only accept the required parameters.
* Added CopyFromURL, which will do a synchronous server-side copy, meaning the service will not return an HTTP response until it has completed the copy.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
package com.microsoft.azure.storage.blob.models;

import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
import java.util.ArrayList;
import java.util.List;
Expand All @@ -20,6 +20,7 @@
* The BlobHierarchyListSegment model.
*/
@JacksonXmlRootElement(localName = "Blobs")
@JsonDeserialize(using = CustomHierarchicalListingDeserializer.class)
public final class BlobHierarchyListSegment {
/**
* The blobPrefixes property.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
/*
* Copyright Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.microsoft.azure.storage.blob.models;

import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.deser.ResolvableDeserializer;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
import com.fasterxml.jackson.databind.type.TypeFactory;

import java.io.IOException;
import java.util.ArrayList;

// implement ContextualDeserializer or ResolvableDeserializer?
final class CustomHierarchicalListingDeserializer extends JsonDeserializer<BlobHierarchyListSegment> {

@Override
public BlobHierarchyListSegment deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
ArrayList<BlobItem> blobItems = new ArrayList<>();
ArrayList<BlobPrefix> blobPrefixes = new ArrayList<>();

JsonDeserializer<Object> blobItemDeserializer =
ctxt.findRootValueDeserializer(ctxt.constructType(BlobItem.class));
JsonDeserializer<Object> blobPrefixDeserializer =
ctxt.findRootValueDeserializer(ctxt.constructType(BlobPrefix.class));

for (JsonToken currentToken = p.nextToken(); !currentToken.name().equals("END_OBJECT");
currentToken = p.nextToken()) {
// Get to the root element of the next item.
p.nextToken();

if (p.getCurrentName().equals("Blob")) {
blobItems.add((BlobItem)blobItemDeserializer.deserialize(p, ctxt));
}
else if (p.getCurrentName().equals("BlobPrefix")) {
blobPrefixes.add((BlobPrefix)blobPrefixDeserializer.deserialize(p, ctxt));
}
}

return new BlobHierarchyListSegment().withBlobItems(blobItems).withBlobPrefixes(blobPrefixes);
}
}
31 changes: 14 additions & 17 deletions src/test/java/com/microsoft/azure/storage/BlockBlobAPITest.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -455,30 +455,27 @@ class BlockBlobAPITest extends APISpec {

def "Get block list"() {
setup:
String blockID = getBlockID()
String blockID2 = getBlockID()
bu.stageBlock(blockID, defaultFlowable, defaultDataSize, null, null).blockingGet()
bu.stageBlock(blockID2, defaultFlowable, defaultDataSize, null, null).blockingGet()
bu.commitBlockList(Arrays.asList(blockID, blockID2), null, null, null, null).blockingGet()
List<String> committedBlocks = Arrays.asList(getBlockID(), getBlockID())
bu.stageBlock(committedBlocks.get(0), defaultFlowable, defaultDataSize, null, null).blockingGet()
bu.stageBlock(committedBlocks.get(1), defaultFlowable, defaultDataSize, null, null).blockingGet()
bu.commitBlockList(committedBlocks, null, null, null, null).blockingGet()

String blockID3 = getBlockID()
String blockID4 = getBlockID()
bu.stageBlock(blockID3, defaultFlowable, defaultDataSize, null, null).blockingGet()
bu.stageBlock(blockID4, defaultFlowable, defaultDataSize, null, null).blockingGet()
List<String> uncommittedBlocks = Arrays.asList(getBlockID(), getBlockID())
bu.stageBlock(uncommittedBlocks.get(0), defaultFlowable, defaultDataSize, null, null).blockingGet()
bu.stageBlock(uncommittedBlocks.get(1), defaultFlowable, defaultDataSize, null, null).blockingGet()
uncommittedBlocks.sort(true)

when:
BlockBlobGetBlockListResponse response = bu.getBlockList(BlockListType.ALL, null, null)
.blockingGet()

then:
response.body().committedBlocks().get(0).name() == blockID
response.body().committedBlocks().get(0).size() == defaultDataSize
response.body().committedBlocks().get(1).name() == blockID2
response.body().committedBlocks().get(1).size() == defaultDataSize
response.body().uncommittedBlocks().get(0).name() == blockID3
response.body().uncommittedBlocks().get(0).size() == defaultDataSize
response.body().uncommittedBlocks().get(1).name() == blockID4
response.body().uncommittedBlocks().get(1).size() == defaultDataSize
for (int i = 0; i < committedBlocks.size(); i++) {
assert response.body().committedBlocks().get(i).name() == committedBlocks.get(i)
assert response.body().committedBlocks().get(i).size() == defaultDataSize
assert response.body().uncommittedBlocks().get(i).name() == uncommittedBlocks.get(i)
assert response.body().uncommittedBlocks().get(i).size() == defaultDataSize
}
validateBasicHeaders(response.headers())
response.headers().contentType() != null
response.headers().blobContentLength() == defaultDataSize * 2L
Expand Down
27 changes: 17 additions & 10 deletions src/test/java/com/microsoft/azure/storage/ContainerAPITest.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -1070,22 +1070,29 @@ class ContainerAPITest extends APISpec {

def "List blobs hier delim"() {
setup:
AppendBlobURL blob = cu.createAppendBlobURL("a")
blob.create(null, null, null, null).blockingGet()
AppendBlobURL dir = cu.createAppendBlobURL("b/")
dir.create(null, null, null, null).blockingGet()
AppendBlobURL subBlob = cu.createAppendBlobURL("b/c")
subBlob.create(null, null, null, null).blockingGet()
def blobNames = Arrays.asList("a", "b/a", "c", "d/a", "e", "f", "g/a")
for (String blobName : blobNames) {
def bu = cu.createAppendBlobURL(blobName)
bu.create().blockingGet()
}

when:
ContainerListBlobHierarchySegmentResponse response =
cu.listBlobsHierarchySegment(null, "/", null, null).blockingGet()

and:
def expectedBlobs = Arrays.asList("a", "c", "e", "f")
def expectedPrefixes = Arrays.asList("b/", "d/", "g/")

then:
response.body().segment().blobPrefixes().size() == 1
response.body().segment().blobPrefixes().get(0).name() == "b/"
response.body().segment().blobItems().size() == 1
response.body().segment().blobItems().get(0).name() == "a"
response.body().segment().blobItems().size() == 4
for (int i=0; i<expectedBlobs.size(); i++) {
assert expectedBlobs.get(i) == response.body().segment().blobItems().get(i).name()
}
for (int i=0; i<expectedPrefixes.size(); i++) {
assert expectedPrefixes.get(i) == response.body().segment().blobPrefixes().get(i).name()
}
response.body().segment().blobPrefixes().size() == 3
}

def "List blobs hier marker"() {
Expand Down

0 comments on commit 348cb4d

Please sign in to comment.