Skip to content

Commit

Permalink
Remove final complex methods
Browse files Browse the repository at this point in the history
  • Loading branch information
thecoop committed Oct 8, 2024
1 parent 8ef5f3b commit eb11f5d
Show file tree
Hide file tree
Showing 6 changed files with 123 additions and 167 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -759,10 +759,8 @@ public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params outerP

// customs
metrics.contains(Metric.CUSTOMS)
? Iterators.flatMap(
customs.entrySet().iterator(),
cursor -> ChunkedToXContentHelper.wrapWithObject(cursor.getKey(), cursor.getValue().toXContentChunked(outerParams))
)
? ChunkedToXContent.builder(outerParams)
.forEach(customs.entrySet().iterator(), (b, e) -> b.xContentObject(e.getKey(), e.getValue()))
: Collections.emptyIterator()
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,16 @@ public <T> ChunkedToXContentBuilder array(Iterator<T> items, BiConsumer<ChunkedT
return this;
}

/**
* Creates an array with the contents set by appending together the contents of {@code items}
*/
public ChunkedToXContentBuilder array(Iterator<? extends ToXContent> items) {
startArray();
items.forEachRemaining(this::append);
endArray();
return this;
}

/**
* Creates an array, with the contents set by appending together
* the return values of {@code create} called on each item returned by {@code items}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,26 +53,10 @@ public static Iterator<ToXContent> field(String name, String value) {
return Iterators.single(((builder, params) -> builder.field(name, value)));
}

/**
* Creates an Iterator to serialize a named field where the value is represented by a {@link ChunkedToXContentObject}.
* Chunked equivalent for {@code XContentBuilder field(String name, ToXContent value)}
* @param name name of the field
* @param value value for this field
* @param params params to propagate for XContent serialization
* @return Iterator composing field name and value serialization
*/
public static Iterator<ToXContent> field(String name, ChunkedToXContentObject value, ToXContent.Params params) {
return Iterators.concat(Iterators.single((builder, innerParam) -> builder.field(name)), value.toXContentChunked(params));
}

public static Iterator<ToXContent> array(String name, Iterator<? extends ToXContent> contents) {
return Iterators.concat(ChunkedToXContentHelper.startArray(name), contents, ChunkedToXContentHelper.endArray());
}

public static <T extends ToXContent> Iterator<ToXContent> wrapWithObject(String name, Iterator<T> iterator) {
return Iterators.concat(startObject(name), iterator, endObject());
}

/**
* Creates an Iterator of a single ToXContent object that serializes the given object as a single chunk. Just wraps {@link
* Iterators#single}, but still useful because it avoids any type ambiguity.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/

package org.elasticsearch.common.xcontent;

import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.ToXContent;

import java.util.function.IntFunction;
import java.util.stream.IntStream;

import static org.hamcrest.Matchers.equalTo;

public class ChunkedToXContentBuilderTests extends ESTestCase {

public void testFieldWithInnerChunkedObject() {

ToXContent innerXContent = (b, p) -> {
b.startObject();
b.field("field1", 10);
b.field("field2", "aaa");
b.endObject();
return b;
};

ToXContent outerXContent = (b, p) -> b.field("field3", 10).field("field4", innerXContent);

String expectedContent = Strings.toString(outerXContent);

ChunkedToXContentObject innerChunkedContent = params -> new ChunkedToXContentBuilder(params).object(
o -> o.field("field1", 10).field("field2", "aaa")
);

ChunkedToXContent outerChunkedContent = params -> new ChunkedToXContentBuilder(params).field("field3", 10)
.field("field4", innerChunkedContent);

assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent));
}

public void testFieldWithInnerChunkedArray() {

ToXContent innerXContent = (b, p) -> {
b.startArray();
b.value(10);
b.value(20);
b.endArray();
return b;
};

ToXContent outerXContent = (b, p) -> b.field("field3", 10).field("field4", innerXContent);

String expectedContent = Strings.toString(outerXContent);

IntFunction<ToXContent> value = v -> (b, p) -> b.value(v);

ChunkedToXContentObject innerChunkedContent = params -> new ChunkedToXContentBuilder(params).array(
IntStream.of(10, 20).mapToObj(value).iterator()
);

ChunkedToXContent outerChunkedContent = params -> new ChunkedToXContentBuilder(params).field("field3", 10)
.field("field4", innerChunkedContent);

assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent));
}

public void testFieldWithInnerChunkedField() {

ToXContent innerXContent = (b, p) -> b.value(10);
ToXContent outerXContent = (b, p) -> b.field("field3", 10).field("field4", innerXContent);

String expectedContent = Strings.toString(outerXContent);

ChunkedToXContentObject innerChunkedContent = params -> Iterators.single((b, p) -> b.value(10));

ChunkedToXContent outerChunkedContent = params -> new ChunkedToXContentBuilder(params).field("field3", 10)
.field("field4", innerChunkedContent);

assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent));
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,10 @@
import org.elasticsearch.cluster.metadata.ShutdownShardMigrationStatus;
import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContent;
Expand All @@ -25,10 +24,6 @@
import java.util.Iterator;
import java.util.Objects;

import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endObject;
import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.singleChunk;
import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startObject;

public class SingleNodeShutdownStatus implements Writeable, ChunkedToXContentObject {

private final SingleNodeShutdownMetadata metadata;
Expand Down Expand Up @@ -116,26 +111,27 @@ public String toString() {

@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return Iterators.concat(startObject(), singleChunk((builder, p) -> {
builder.field(SingleNodeShutdownMetadata.NODE_ID_FIELD.getPreferredName(), metadata.getNodeId());
builder.field(SingleNodeShutdownMetadata.TYPE_FIELD.getPreferredName(), metadata.getType());
builder.field(SingleNodeShutdownMetadata.REASON_FIELD.getPreferredName(), metadata.getReason());
if (metadata.getAllocationDelay() != null) {
builder.field(
SingleNodeShutdownMetadata.ALLOCATION_DELAY_FIELD.getPreferredName(),
metadata.getAllocationDelay().getStringRep()
return ChunkedToXContent.builder(params).object(b -> {
b.append((builder, p) -> {
builder.field(SingleNodeShutdownMetadata.NODE_ID_FIELD.getPreferredName(), metadata.getNodeId());
builder.field(SingleNodeShutdownMetadata.TYPE_FIELD.getPreferredName(), metadata.getType());
builder.field(SingleNodeShutdownMetadata.REASON_FIELD.getPreferredName(), metadata.getReason());
if (metadata.getAllocationDelay() != null) {
builder.field(
SingleNodeShutdownMetadata.ALLOCATION_DELAY_FIELD.getPreferredName(),
metadata.getAllocationDelay().getStringRep()
);
}
builder.timeField(
SingleNodeShutdownMetadata.STARTED_AT_MILLIS_FIELD.getPreferredName(),
SingleNodeShutdownMetadata.STARTED_AT_READABLE_FIELD,
metadata.getStartedAtMillis()
);
}
builder.timeField(
SingleNodeShutdownMetadata.STARTED_AT_MILLIS_FIELD.getPreferredName(),
SingleNodeShutdownMetadata.STARTED_AT_READABLE_FIELD,
metadata.getStartedAtMillis()
);
builder.field(STATUS.getPreferredName(), overallStatus());
return builder;
}),
ChunkedToXContentHelper.field(SHARD_MIGRATION_FIELD.getPreferredName(), shardMigrationStatus, params),
singleChunk((builder, p) -> {
builder.field(STATUS.getPreferredName(), overallStatus());
return builder;
});
b.field(SHARD_MIGRATION_FIELD.getPreferredName(), shardMigrationStatus);
b.append((builder, p) -> {
builder.field(PERSISTENT_TASKS_FIELD.getPreferredName(), persistentTasksStatus);
builder.field(PLUGINS_STATUS.getPreferredName(), pluginsStatus);
if (metadata.getTargetNodeName() != null) {
Expand All @@ -148,8 +144,7 @@ public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params
);
}
return builder;
}),
endObject()
);
});
});
}
}

0 comments on commit eb11f5d

Please sign in to comment.