Skip to content

Commit

Permalink
Move remaining test-only x-content parsers out of search production c…
Browse files Browse the repository at this point in the history
…ode (#113650)

This moves the remainining few parsers out of prod code now that we only
need this logic for testing with the HLRC gone.
  • Loading branch information
original-brownbear authored Sep 27, 2024
1 parent b4334f1 commit c498dae
Show file tree
Hide file tree
Showing 14 changed files with 206 additions and 223 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,32 +12,25 @@
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchException;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;

import java.io.IOException;

import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;

/**
* Represents a failure to search on a specific shard.
*/
public class ShardSearchFailure extends ShardOperationFailedException {

private static final String REASON_FIELD = "reason";
private static final String NODE_FIELD = "node";
private static final String INDEX_FIELD = "index";
private static final String SHARD_FIELD = "shard";
public static final String REASON_FIELD = "reason";
public static final String NODE_FIELD = "node";
public static final String INDEX_FIELD = "index";
public static final String SHARD_FIELD = "shard";

public static final ShardSearchFailure[] EMPTY_ARRAY = new ShardSearchFailure[0];

Expand Down Expand Up @@ -125,48 +118,4 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
return builder;
}

public static ShardSearchFailure fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token;
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
String currentFieldName = null;
int shardId = -1;
String indexName = null;
String clusterAlias = null;
String nodeId = null;
ElasticsearchException exception = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (SHARD_FIELD.equals(currentFieldName)) {
shardId = parser.intValue();
} else if (INDEX_FIELD.equals(currentFieldName)) {
String[] split = RemoteClusterAware.splitIndexName(parser.text());
clusterAlias = split[0];
indexName = split[1];
} else if (NODE_FIELD.equals(currentFieldName)) {
nodeId = parser.text();
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (REASON_FIELD.equals(currentFieldName)) {
exception = ElasticsearchException.fromXContent(parser);
} else {
parser.skipChildren();
}
} else {
parser.skipChildren();
}
}
SearchShardTarget searchShardTarget = null;
if (nodeId != null) {
searchShardTarget = new SearchShardTarget(
nodeId,
new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), shardId),
clusterAlias
);
}
return new ShardSearchFailure(exception, searchShardTarget);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,9 @@
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.SearchHit.Fields;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;

import java.io.IOException;
import java.util.Arrays;
Expand Down Expand Up @@ -84,11 +82,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
return builder;
}

public static SearchSortValues fromXContent(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
return new SearchSortValues(parser.list().toArray());
}

/**
* Returns the formatted version of the values that sorting was performed against
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@
*/
package org.elasticsearch.search.aggregations;

import org.apache.lucene.util.SetOnce;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.DelayableWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
Expand All @@ -22,21 +20,18 @@
import org.elasticsearch.search.sort.SortValue;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;

import java.io.IOException;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;

import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseTypedKeysObject;

/**
* Represents a set of {@link InternalAggregation}s
Expand All @@ -52,7 +47,7 @@ public final class InternalAggregations implements Iterable<InternalAggregation>
/**
* Constructs a new aggregation.
*/
private InternalAggregations(List<InternalAggregation> aggregations) {
public InternalAggregations(List<InternalAggregation> aggregations) {
this.aggregations = aggregations;
if (aggregations.isEmpty()) {
aggregationsAsMap = Map.of();
Expand Down Expand Up @@ -126,27 +121,6 @@ public XContentBuilder toXContentInternal(XContentBuilder builder, Params params
return builder;
}

public static InternalAggregations fromXContent(XContentParser parser) throws IOException {
final List<InternalAggregation> aggregations = new ArrayList<>();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.START_OBJECT) {
SetOnce<InternalAggregation> typedAgg = new SetOnce<>();
String currentField = parser.currentName();
parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, InternalAggregation.class, typedAgg::set);
if (typedAgg.get() != null) {
aggregations.add(typedAgg.get());
} else {
throw new ParsingException(
parser.getTokenLocation(),
String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField)
);
}
}
}
return new InternalAggregations(aggregations);
}

public static InternalAggregations from(List<InternalAggregation> aggregations) {
if (aggregations.isEmpty()) {
return EMPTY;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,17 @@

package org.elasticsearch.search.fetch.subphase.highlight;

import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;

import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;

/**
* A field highlighted with its highlighted fragments.
*/
Expand Down Expand Up @@ -74,25 +68,6 @@ public void writeTo(StreamOutput out) throws IOException {
}
}

public static HighlightField fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser);
String fieldName = parser.currentName();
Text[] fragments;
XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.START_ARRAY) {
List<Text> values = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
values.add(new Text(parser.text()));
}
fragments = values.toArray(Text.EMPTY_ARRAY);
} else if (token == XContentParser.Token.VALUE_NULL) {
fragments = null;
} else {
throw new ParsingException(parser.getTokenLocation(), "unexpected token type [" + token + "]");
}
return new HighlightField(fieldName, fragments);
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(name);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,9 @@
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xcontent.InstantiatingObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;

import java.io.IOException;
import java.util.Collections;
Expand All @@ -27,22 +25,18 @@
import java.util.Objects;
import java.util.concurrent.TimeUnit;

import static java.util.stream.Collectors.toMap;
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;

/**
* The result of a profiled *thing*, like a query or an aggregation. See
* {@link AbstractProfiler} for the statistic collection framework.
*/
public final class ProfileResult implements Writeable, ToXContentObject {
static final ParseField TYPE = new ParseField("type");
static final ParseField DESCRIPTION = new ParseField("description");
static final ParseField BREAKDOWN = new ParseField("breakdown");
static final ParseField DEBUG = new ParseField("debug");
public static final ParseField TYPE = new ParseField("type");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField BREAKDOWN = new ParseField("breakdown");
public static final ParseField DEBUG = new ParseField("debug");
static final ParseField NODE_TIME = new ParseField("time");
static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos");
static final ParseField CHILDREN = new ParseField("children");
public static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos");
public static final ParseField CHILDREN = new ParseField("children");

private final String type;
private final String description;
Expand Down Expand Up @@ -181,28 +175,4 @@ public int hashCode() {
public String toString() {
return Strings.toString(this);
}

private static final InstantiatingObjectParser<ProfileResult, Void> PARSER;
static {
InstantiatingObjectParser.Builder<ProfileResult, Void> parser = InstantiatingObjectParser.builder(
"profile_result",
true,
ProfileResult.class
);
parser.declareString(constructorArg(), TYPE);
parser.declareString(constructorArg(), DESCRIPTION);
parser.declareObject(
constructorArg(),
(p, c) -> p.map().entrySet().stream().collect(toMap(Map.Entry::getKey, e -> ((Number) e.getValue()).longValue())),
BREAKDOWN
);
parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), DEBUG);
parser.declareLong(constructorArg(), NODE_TIME_RAW);
parser.declareObjectArray(optionalConstructorArg(), (p, c) -> fromXContent(p), CHILDREN);
PARSER = parser.build();
}

public static ProfileResult fromXContent(XContentParser p) throws IOException {
return PARSER.parse(p, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,12 @@
import org.elasticsearch.search.profile.ProfileResult;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;

/**
* A container class to hold the profile results for a single shard in the request.
* Contains a list of query profiles, a collector tree and a total rewrite tree.
Expand Down Expand Up @@ -87,13 +84,4 @@ public String toString() {
return Strings.toString(this);
}

public static AggregationProfileShardResult fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser);
List<ProfileResult> aggProfileResults = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
aggProfileResults.add(ProfileResult.fromXContent(parser));
}
return new AggregationProfileShardResult(aggProfileResults);
}
}
Loading

0 comments on commit c498dae

Please sign in to comment.