From c99fd396b1ed52742a77e62fce0b8e2792c3c108 Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Thu, 30 May 2024 12:58:02 -0400 Subject: [PATCH 001/417] Bumping index version for new lucene snapshot version --- server/src/main/java/org/elasticsearch/index/IndexVersions.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 1a933a396108e..6c6a6c147e0ef 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -108,6 +108,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_00_0, Version.LUCENE_9_10_0); public static final IndexVersion SEMANTIC_TEXT_FIELD_TYPE = def(8_507_00_0, Version.LUCENE_9_10_0); public static final IndexVersion UPGRADE_TO_LUCENE_9_11 = def(8_508_00_0, Version.LUCENE_9_11_0); + public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_510_00_0, Version.LUCENE_9_12_0); /* * STOP! READ THIS FIRST! No, really, From fdeb0d772156debd1f4221609cd6f62f7b9c5ade Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 31 May 2024 06:11:38 +0000 Subject: [PATCH 002/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-c5ea94fbc6a --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 74bd1612cc697..fe90adb9c9c19 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2701,122 +2701,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From dd1863541b0eb22034198a69c84c9cd2afb406c8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 1 Jun 2024 06:10:40 +0000 Subject: [PATCH 003/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-1e660eee72d --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f44d351b16b81..62bcd0d90053b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.12.0-snapshot-c5ea94fbc6a +lucene = 9.12.0-snapshot-1e660eee72d bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fe90adb9c9c19..f0a3f0879458f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2699,124 +2699,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From aeaaff8a515221b424ec36a4ff0294b5d6a90c4f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 2 Jun 2024 06:11:10 +0000 Subject: [PATCH 004/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-84ff7364b9a --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 62bcd0d90053b..7dc236f3352ca 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.12.0-snapshot-1e660eee72d +lucene = 9.12.0-snapshot-84ff7364b9a bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f0a3f0879458f..1129db34407d1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2699,124 +2699,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 2108dc34ea254daddb723750f12b940b9e8a4f1a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 3 Jun 2024 06:11:38 +0000 Subject: [PATCH 005/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-d9659814e59 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7dc236f3352ca..c75eb5417ee9c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.12.0-snapshot-84ff7364b9a +lucene = 9.12.0-snapshot-d9659814e59 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1129db34407d1..912364a136ef1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2699,124 +2699,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 4b2ebf52495f89bc335b10d8bd7336f8b7faa43c Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Mon, 3 Jun 2024 18:25:44 +0100 Subject: [PATCH 006/417] Add a couple of missing supported checks to org.elasticsearch.vec.VectorScorerFactoryTests (#109303) This commit adds a couple of missing supported checks to org.elasticsearch.vec.VectorScorerFactoryTests. Without these the test try, and fail, to run on Windows. --- .../java/org/elasticsearch/vec/VectorScorerFactoryTests.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java index 742585722c8a2..dfd26e887b2ba 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java @@ -223,7 +223,6 @@ public void testRandomScorerMax() throws IOException { } public void testRandomScorerChunkSizeSmall() throws IOException { - assumeTrue(notSupportedMsg(), supported()); long maxChunkSize = randomLongBetween(32, 128); logger.info("maxChunkSize=" + maxChunkSize); testRandomScorerImpl(maxChunkSize, FLOAT_ARRAY_RANDOM_FUNC); @@ -231,6 +230,7 @@ public void testRandomScorerChunkSizeSmall() throws IOException { void testRandomScorerImpl(long maxChunkSize, Function floatArraySupplier) throws IOException { assumeTrue("scorer only supported on JDK 22+", Runtime.version().feature() >= 22); + assumeTrue(notSupportedMsg(), supported()); var factory = AbstractVectorTestCase.factory.get(); var scalarQuantizer = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); @@ -321,6 +321,7 @@ void testRandomSliceImpl(int dims, long maxChunkSize, int initialPadding, Functi // Tests with a large amount of data (> 2GB), which ensures that data offsets do not overflow @Nightly public void testLarge() throws IOException { + assumeTrue(notSupportedMsg(), supported()); var factory = AbstractVectorTestCase.factory.get(); try (Directory dir = new MMapDirectory(createTempDir("testLarge"))) { From 54ea6874e5c7faeb6be273427443e5f985344fda Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 4 Jun 2024 06:12:11 +0000 Subject: [PATCH 007/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-e8801bf91d5 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index c75eb5417ee9c..e3e334cbc4f10 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.12.0-snapshot-d9659814e59 +lucene = 9.12.0-snapshot-e8801bf91d5 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 912364a136ef1..6000be5793dbb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2699,124 +2699,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From cb241947af72ede4dbd3513a2a8f1b538779b5d5 Mon Sep 17 00:00:00 2001 From: john-wagster Date: Tue, 4 Jun 2024 11:37:02 -0500 Subject: [PATCH 008/417] Multivalue Sparse Vector Support (#109007) Updated LuceneDocument to take advantage of looking up feature values on existing features and selecting the max when parsing multi-value sparse vectors --- docs/changelog/109007.yaml | 5 + .../mapping/types/sparse-vector.asciidoc | 64 +++++- .../test/search.vectors/90_sparse_vector.yml | 203 ++++++++++++++++-- .../vectors/SparseVectorFieldMapper.java | 16 +- .../vectors/SparseVectorFieldMapperTests.java | 29 ++- 5 files changed, 285 insertions(+), 32 deletions(-) create mode 100644 docs/changelog/109007.yaml diff --git a/docs/changelog/109007.yaml b/docs/changelog/109007.yaml new file mode 100644 index 0000000000000..c828db64220fb --- /dev/null +++ b/docs/changelog/109007.yaml @@ -0,0 +1,5 @@ +pr: 109007 +summary: Multivalue Sparse Vector Support +area: Search +type: enhancement +issues: [] diff --git a/docs/reference/mapping/types/sparse-vector.asciidoc b/docs/reference/mapping/types/sparse-vector.asciidoc index 6057b722a0c6b..a382753cb6ed3 100644 --- a/docs/reference/mapping/types/sparse-vector.asciidoc +++ b/docs/reference/mapping/types/sparse-vector.asciidoc @@ -26,10 +26,70 @@ PUT my-index See <> for a complete example on adding documents to a `sparse_vector` mapped field using ELSER. +[[index-multi-value-sparse-vectors]] +==== Multi-value sparse vectors + +When passing in arrays of values for sparse vectors the max value for similarly named features is selected. + +The paper Adapting Learned Sparse Retrieval for Long Documents (https://arxiv.org/pdf/2305.18494.pdf) discusses this in more detail. +In summary, research findings support representation aggregation typically outperforming score aggregation. + +For instances where you want to have overlapping feature names use should store them separately or use nested fields. + +Below is an example of passing in a document with overlapping feature names. +Consider that in this example two categories exist for positive sentiment and negative sentiment. +However, for the purposes of retrieval we also want the overall impact rather than specific sentiment. +In the example `impact` is stored as a multi-value sparse vector and only the max values of overlapping names are stored. +More specifically the final `GET` query here returns a `_score` of ~1.2 (which is the `max(impact.delicious[0], impact.delicious[1])` and is approximate because we have a relative error of 0.4% as explained below) + +[source,console] +-------------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "text": { + "type": "text", + "analyzer": "standard" + }, + "impact": { + "type": "sparse_vector" + }, + "positive": { + "type": "sparse_vector" + }, + "negative": { + "type": "sparse_vector" + } + } + } +} + +POST my-index-000001/_doc +{ + "text": "I had some terribly delicious carrots.", + "impact": [{"I": 0.55, "had": 0.4, "some": 0.28, "terribly": 0.01, "delicious": 1.2, "carrots": 0.8}, + {"I": 0.54, "had": 0.4, "some": 0.28, "terribly": 2.01, "delicious": 0.02, "carrots": 0.4}], + "positive": {"I": 0.55, "had": 0.4, "some": 0.28, "terribly": 0.01, "delicious": 1.2, "carrots": 0.8}, + "negative": {"I": 0.54, "had": 0.4, "some": 0.28, "terribly": 2.01, "delicious": 0.02, "carrots": 0.4} +} + +GET my-index-000001/_search +{ + "query": { + "term": { + "impact": { + "value": "delicious" + } + } + } +} +-------------------------------- + NOTE: `sparse_vector` fields can not be included in indices that were *created* on {es} versions between 8.0 and 8.10 -NOTE: `sparse_vector` fields only support single-valued fields and strictly positive values. -Multi-valued fields and negative values will be rejected. +NOTE: `sparse_vector` fields only support strictly positive values. +Negative values will be rejected. NOTE: `sparse_vector` fields do not support querying, sorting or aggregating. They may only be used within specialized queries. diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml index fa89a43561764..5d6c3800ad70d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml @@ -1,22 +1,25 @@ --- "Indexing and searching sparse vectors in >=8.11": + - skip: + cluster_features: [ "gte_v8.15.0" ] + reason: "sparse_vector field type was updated to support multi-value sparse vectors in 8.15.0" - requires: - cluster_features: ["gte_v8.11.0"] + cluster_features: [ "gte_v8.11.0" ] reason: "sparse_vector field type reintroduced in 8.11" - do: indices.create: - index: test - body: - mappings: - properties: - text: - type: text - ml.tokens: - type: sparse_vector - embeddings: - type: sparse_vector + index: test + body: + mappings: + properties: + text: + type: text + ml.tokens: + type: sparse_vector + embeddings: + type: sparse_vector - match: { acknowledged: true } @@ -149,12 +152,182 @@ field: embeddings - match: { hits.total: 1 } +--- +"Indexing and searching multi-value sparse vectors in >=8.15": + + - requires: + cluster_features: [ "gte_v8.15.0" ] + reason: "sparse_vector field type added multi-value support in 8.15" + test_runner_features: "close_to" + + - do: + indices.create: + index: test + body: + mappings: + properties: + text: + type: text + ml.tokens: + type: sparse_vector + embeddings: + type: sparse_vector + + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + body: + text: "running is good for you" + ml: + tokens: + - running: 2.4097164 + good: 2.170997 + run: 2.052153 + race: 0.1 + for: 1.1908325 + runner: 1.1803857 + exercise: 0.1 + you: 0.9654308 + training: 0.94999343 + sports: 0.93650943 + fitness: 0.83129317 + best: 0.820365 + bad: 0.1 + health: 0.1 + marathon: 0.61555296 + gym: 0.5652374 + - running: 0.1 + good: 0.1 + run: 0.1 + race: 1.4575411 + for: 0.1 + runner: 0.1 + exercise: 1.1652642 + you: 0.1 + training: 0.1 + sports: 0.1 + fitness: 0.1 + best: 0.1 + bad: 0.7385934 + health: 0.7098149 + marathon: 0.1 + gym: 0.1 + + - match: { result: "created" } + + - do: + index: + index: test + id: "2" + body: + text: "walking is a healthy exercise" + ml: + tokens: + walking: 2.4797723 + exercise: 2.074234 + healthy: 1.971596 + walk: 1.6458614 + health: 1.5291847 + walker: 1.4736869 + activity: 1.0793462 + good: 1.0597849 + fitness: 0.91855437 + training: 0.86342937 + movement: 0.7657065 + normal: 0.6694081 + foot: 0.5892523 + physical: 0.4926789 + + - match: { result: "created" } + + - do: + index: + index: test + id: "3" + body: + text: "empty array with no values - should not be retrieved in exists queries" + ml: + tokens: [ ] + - do: + index: + index: test + id: "4" + body: + text: "should still respond to exists queries if when empty" + ml: + tokens: { } + + - match: { result: "created" } + + - do: + index: + index: test + id: "5" + body: + text: "other embeddings available only" + embeddings: + aardvark: 0.5 + + - match: { result: "created" } + + - do: + indices.refresh: { } + + - do: + search: + index: test + body: + query: + bool: + should: + - term: + ml.tokens: + value: "walk" + boost: 1.9790847 + - term: + ml.tokens: + value: "walking" + boost: 1.7092685 + - term: + ml.tokens: + value: "exercise" + boost: 0.84076905 + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "1" } + + - close_to: { hits.hits.1._score: { value: 0.9797, error: 0.01 } } + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + exists: + field: ml.tokens + - match: { hits.total: 3 } + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + exists: + field: embeddings + - match: { hits.total: 1 } + --- "Sparse vector in 7.x": - requires: - test_runner_features: ["allowed_warnings"] + test_runner_features: [ "allowed_warnings" ] - skip: - cluster_features: ["gte_v8.0.0"] + cluster_features: [ "gte_v8.0.0" ] reason: "sparse_vector field type supported in 7.x" - do: allowed_warnings: @@ -184,10 +357,10 @@ --- "Sparse vector in 8.0.0 <= x < 8.11.0": - skip: - cluster_features: ["gte_v8.11.0"] + cluster_features: [ "gte_v8.11.0" ] reason: "sparse_vector field type not supported in 8.x until 8.11.0" - requires: - cluster_features: ["gte_v8.0.0"] + cluster_features: [ "gte_v8.0.0" ] reason: "sparse_vector field type not supported in 8.x until 8.11.0" - do: catch: /The \[sparse_vector\] field type .* supported/ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java index 7b1e20a6cdda3..e07c9247072b9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper.vectors; import org.apache.lucene.document.FeatureField; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; @@ -187,14 +188,15 @@ public void parse(DocumentParserContext context) throws IOException { } else if (token == Token.VALUE_NUMBER || token == Token.VALUE_STRING) { final String key = name() + "." + feature; float value = context.parser().floatValue(true); - if (context.doc().getByKey(key) != null) { - throw new IllegalArgumentException( - "[sparse_vector] fields do not support indexing multiple values for the same feature [" - + key - + "] in the same document" - ); + + // if we have an existing feature of the same name we'll select for the one with the max value + // based on recommendations from this paper: https://arxiv.org/pdf/2305.18494.pdf + IndexableField currentField = context.doc().getByKey(key); + if (currentField == null) { + context.doc().addWithKey(key, new FeatureField(name(), feature, value)); + } else if (currentField instanceof FeatureField && ((FeatureField) currentField).getFeatureValue() < value) { + ((FeatureField) currentField).setFeatureValue(value); } - context.doc().addWithKey(key, new FeatureField(name(), feature, value)); } else { throw new IllegalArgumentException( "[sparse_vector] fields take hashes that map a feature to a strictly positive " diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java index 79f6768512b85..271f0c12be611 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java @@ -119,7 +119,8 @@ public void testDotInFieldName() throws Exception { assertThat(ex.getCause().getMessage(), containsString("politi.cs")); } - public void testRejectMultiValuedFields() throws MapperParsingException, IOException { + public void testHandlesMultiValuedFields() throws MapperParsingException, IOException { + // setup a mapping that includes a sparse vector property DocumentMapper mapper = createDocumentMapper(mapping(b -> { b.startObject("field").field("type", "sparse_vector").endObject(); b.startObject("foo").startObject("properties"); @@ -129,27 +130,39 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep b.endObject().endObject(); })); + // when providing a malformed list of values for a single field DocumentParsingException e = expectThrows( DocumentParsingException.class, () -> mapper.parse(source(b -> b.startObject("field").field("foo", Arrays.asList(10, 20)).endObject())) ); + + // then fail appropriately assertEquals( "[sparse_vector] fields take hashes that map a feature to a strictly positive float, but got unexpected token " + "START_ARRAY", e.getCause().getMessage() ); - e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source(b -> { + // when providing a two fields with the same key name + ParsedDocument doc1 = mapper.parse(source(b -> { b.startArray("foo"); { - b.startObject().startObject("field").field("bar", 10).endObject().endObject(); + b.startObject().startObject("field").field("coup", 1).endObject().endObject(); + b.startObject().startObject("field").field("bar", 5).endObject().endObject(); b.startObject().startObject("field").field("bar", 20).endObject().endObject(); + b.startObject().startObject("field").field("bar", 10).endObject().endObject(); + b.startObject().startObject("field").field("soup", 2).endObject().endObject(); } b.endArray(); - }))); - assertEquals( - "[sparse_vector] fields do not support indexing multiple values for the same feature [foo.field.bar] in " + "the same document", - e.getCause().getMessage() - ); + })); + + // then validate that the generate document stored both values appropriately and we have only the max value stored + FeatureField barField = ((FeatureField) doc1.rootDoc().getByKey("foo.field.bar")); + assertEquals(20, barField.getFeatureValue(), 1); + + FeatureField storedBarField = ((FeatureField) doc1.rootDoc().getFields("foo.field").get(1)); + assertEquals(20, storedBarField.getFeatureValue(), 1); + + assertEquals(3, doc1.rootDoc().getFields().stream().filter((f) -> f instanceof FeatureField).count()); } public void testCannotBeUsedInMultiFields() { From 2724b060bbaeed9584b898d9e5dfa4c8e7cec2fd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 5 Jun 2024 06:11:29 +0000 Subject: [PATCH 009/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-2ed1f2fe99f --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e3e334cbc4f10..720bb9ff0164c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.12.0-snapshot-e8801bf91d5 +lucene = 9.12.0-snapshot-2ed1f2fe99f bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 6000be5793dbb..fddafe411abf3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2699,124 +2699,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0e4dab9e10b7a48af8470418c1a6fa745aa3eaea Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 29 Jun 2024 06:15:37 +0000 Subject: [PATCH 010/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-e8bf83f6bf6 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 720bb9ff0164c..147a81b6d6727 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.12.0-snapshot-2ed1f2fe99f +lucene = 9.12.0-snapshot-e8bf83f6bf6 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3c3e5efc3f322..3e0c2222184a2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2699,124 +2699,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e9462b932dff53a62166319b663f1afcf62afe0c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 30 Jun 2024 06:15:10 +0000 Subject: [PATCH 011/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-6304105c104 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 147a81b6d6727..9df71cf898307 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.12.0-snapshot-e8bf83f6bf6 +lucene = 9.12.0-snapshot-6304105c104 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3e0c2222184a2..20b7e9d013b09 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2699,124 +2699,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From ff0ec8681baf2a9a375ebc19cd0edcede58d59c1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 1 Jul 2024 06:15:44 +0000 Subject: [PATCH 012/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-6304105c104 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 20b7e9d013b09..e6c99808ec60f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2701,122 +2701,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 731fb0b6df8bd3d2b09ed7dc1ea646bb588fe56a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 2 Jul 2024 06:15:31 +0000 Subject: [PATCH 013/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-48816b4de42 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9df71cf898307..e31153bf041e6 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.12.0-snapshot-6304105c104 +lucene = 9.12.0-snapshot-48816b4de42 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e6c99808ec60f..9cf4385eb09f0 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2699,124 +2699,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 9bbef83e8808a3709ff8e9020ce63b9ce2c3b07b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 3 Jul 2024 06:14:59 +0000 Subject: [PATCH 014/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-48816b4de42 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9cf4385eb09f0..2c0af83d6d74d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2701,122 +2701,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 2d67ecd8421b87db74f550103ba21c80b6ed2505 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 4 Jul 2024 06:16:08 +0000 Subject: [PATCH 015/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-48816b4de42 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2c0af83d6d74d..e2653aed581d8 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2701,122 +2701,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From ae3628975218a21f95afabb27bf04ccf74587f23 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 5 Jul 2024 06:15:59 +0000 Subject: [PATCH 016/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-e78f23738f4 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e31153bf041e6..876fdfd06b0c1 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.12.0-snapshot-48816b4de42 +lucene = 9.12.0-snapshot-e78f23738f4 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e2653aed581d8..4c62ea5298590 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2699,124 +2699,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 5b5055978bc8fd035db7e4a0c4d80e95cf8be2e6 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Jul 2024 12:41:09 +0200 Subject: [PATCH 017/417] Fix Lucene Snapshot branch to compile and pass tests Just doing the minimal fixes to making this pass tests again and compile. One trivial constructor adjustment and a few adjustments to task count expectations in some tests from having Lucene fork less. --- .../ES814ScalarQuantizedVectorsFormat.java | 1 + .../elasticsearch/search/SearchServiceTests.java | 15 ++++++++------- .../internal/ContextIndexSearcherTests.java | 8 +++++--- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index 0d1c5efeb3e28..65d42fbd74dfc 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -60,6 +60,7 @@ public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat { final FlatVectorsScorer flatVectorScorer; public ES814ScalarQuantizedVectorsFormat(Float confidenceInterval) { + super(NAME); if (confidenceInterval != null && (confidenceInterval < MINIMUM_CONFIDENCE_INTERVAL || confidenceInterval > MAXIMUM_CONFIDENCE_INTERVAL)) { throw new IllegalArgumentException( diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 5c175f792d399..bd3f1a94377ff 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -2741,7 +2741,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertBusy( () -> assertEquals( "DFS supports parallel collection, so the number of slices should be > 1.", - expectedSlices, + expectedSlices - 1, // one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2771,7 +2771,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", - expectedSlices, + expectedSlices - 1, // one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2785,8 +2785,9 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( () -> assertEquals( - "The number of slices should be 1 as FETCH does not support parallel collection.", - 1, + "The number of slices should be 1 as FETCH does not support parallel collection and thus runs on the calling" + + " thread.", + 0, executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2801,7 +2802,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertBusy( () -> assertEquals( "The number of slices should be 1 as NONE does not support parallel collection.", - 1, + 0, // zero since one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2824,7 +2825,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertBusy( () -> assertEquals( "The number of slices should be 1 when QUERY parallel collection is disabled.", - 1, + 0, // zero since one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2861,7 +2862,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", - expectedSlices, + expectedSlices - 1, // one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index d1a7e93efb075..0037642076566 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -223,7 +223,8 @@ public void testConcurrentRewrite() throws Exception { int numSegments = directoryReader.getContext().leaves().size(); KnnFloatVectorQuery vectorQuery = new KnnFloatVectorQuery("float_vector", new float[] { 0, 0, 0 }, 10, null); vectorQuery.rewrite(searcher); - assertBusy(() -> assertEquals(numSegments, executor.getCompletedTaskCount())); + // 1 task gets executed on the caller thread + assertBusy(() -> assertEquals(numSegments - 1, executor.getCompletedTaskCount())); } } finally { terminate(executor); @@ -252,8 +253,9 @@ public void testConcurrentCollection() throws Exception { Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); assertEquals(numDocs, totalHits.intValue()); int numExpectedTasks = ContextIndexSearcher.computeSlices(searcher.getIndexReader().leaves(), Integer.MAX_VALUE, 1).length; - // check that each slice goes to the executor, no matter the queue size or the number of slices - assertBusy(() -> assertEquals(numExpectedTasks, executor.getCompletedTaskCount())); + // check that each slice except for one that executes on the calling thread goes to the executor, no matter the queue size + // or the number of slices + assertBusy(() -> assertEquals(numExpectedTasks - 1, executor.getCompletedTaskCount())); } } finally { terminate(executor); From bc2449c71e1954932bc22abd7d4811db9e00f3db Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Jul 2024 15:24:47 +0200 Subject: [PATCH 018/417] fix --- docs/Versions.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index fb99ef498df17..b65b974cd6b69 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.11.1 -:lucene_version_path: 9_11_1 +:lucene_version: 9.12.0 +:lucene_version_path: 9_12_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar From e0ee3d5b1a9b3b8b5072533dd01e88367dad9f3e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 6 Jul 2024 06:16:03 +0000 Subject: [PATCH 019/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-e78f23738f4 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4c62ea5298590..21b0f1cce9296 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2701,122 +2701,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 6d0627524cf6dff0a15708b429ecbc322ce8a80a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 8 Jul 2024 06:15:44 +0000 Subject: [PATCH 020/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-e78f23738f4 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 21b0f1cce9296..3ee111111fbf9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2701,122 +2701,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 87b382f73ede7dd8728b634389e162f2d9047f7f Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 8 Jul 2024 17:04:50 +0200 Subject: [PATCH 021/417] fix verification metadata --- gradle/verification-metadata.xml | 96 ++++++++++++++++++++++++-------- 1 file changed, 72 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 8092cc4975928..dac9251eb16f3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2756,122 +2756,170 @@ - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + From 691f84d84e892d04399f2c2c74da17800e0d82d5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 11 Jul 2024 06:15:14 +0000 Subject: [PATCH 022/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-3eadfe6031c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 192 +++++++++--------------- 2 files changed, 73 insertions(+), 121 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 19b65b8348ece..aa5399c160e45 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-e78f23738f4 +lucene = 9.12.0-snapshot-3eadfe6031c bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 40b500de67e79..2e65800269508 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2754,172 +2754,124 @@ - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + From 8ef7b5cc03ca5a9c1fdf193c254c6e99477f096e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 12 Jul 2024 11:11:33 -0400 Subject: [PATCH 023/417] Fix compilation after Lucene update for kNN (#110823) * Fix compilation after Lucene update for kNN * generate verification --- gradle/verification-metadata.xml | 96 ++++++++++++++----- .../codec/vectors/ES813FlatVectorFormat.java | 2 +- .../vectors/ES813Int8FlatVectorFormat.java | 2 +- .../ES814ScalarQuantizedVectorsFormat.java | 4 +- 4 files changed, 76 insertions(+), 28 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2e65800269508..8ff97596a0bf9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2756,122 +2756,170 @@ - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + - + + + diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java index 861f5ecd56f5a..0655cdc8376e8 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java @@ -65,7 +65,7 @@ static class ES813FlatVectorWriter extends KnnVectorsWriter { @Override public KnnFieldVectorsWriter addField(FieldInfo fieldInfo) throws IOException { - return writer.addField(fieldInfo, null); + return writer.addField(fieldInfo); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java index 701bf5dc98552..e019f50bb4712 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java @@ -75,7 +75,7 @@ public ES813FlatVectorWriter(FlatVectorsWriter writer) { @Override public KnnFieldVectorsWriter addField(FieldInfo fieldInfo) throws IOException { - return writer.addField(fieldInfo, null); + return writer.addField(fieldInfo); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index fbde7e51ea6c0..2ccee974a7fa5 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -135,8 +135,8 @@ static final class ES814ScalarQuantizedVectorsWriter extends FlatVectorsWriter { } @Override - public FlatFieldVectorsWriter addField(FieldInfo fieldInfo, KnnFieldVectorsWriter knnFieldVectorsWriter) throws IOException { - return delegate.addField(fieldInfo, knnFieldVectorsWriter); + public FlatFieldVectorsWriter addField(FieldInfo fieldInfo) throws IOException { + return delegate.addField(fieldInfo); } @Override From 23e4053ed73529521af8af9ccf9a5a56631694fb Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 17 Jul 2024 03:34:03 -0400 Subject: [PATCH 024/417] Fixing lucene snapshot failing tests (#110892) Fixing lucene snapshot failing tests. A recent change in Lucene [1] has doubled down on the sequential *single-threaded* semantic of IOContext.READONCE - usages must now be all within the same thread. Otherwise, call-sites that are sequential multi-threaded can use plain READ. [1] apache/lucene#13535 --------- Co-authored-by: ChrisHegarty --- .../index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java | 1 - .../elasticsearch/indices/recovery/RecoverySourceHandler.java | 2 +- .../repositories/blobstore/BlobStoreRepository.java | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index 2ccee974a7fa5..e6f0f2efb449c 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.codec.vectors; -import org.apache.lucene.codecs.KnnFieldVectorsWriter; import org.apache.lucene.codecs.hnsw.DefaultFlatVectorScorer; import org.apache.lucene.codecs.hnsw.FlatFieldVectorsWriter; import org.apache.lucene.codecs.hnsw.FlatVectorsFormat; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index df2a9d16ebd6a..7d91ee5992277 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -1376,7 +1376,7 @@ protected void onNewResource(StoreFileMetadata md) throws IOException { // we already have the file contents on heap no need to open the file again currentInput = null; } else { - currentInput = store.directory().openInput(md.name(), IOContext.READONCE); + currentInput = store.directory().openInput(md.name(), IOContext.READ); } } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 5b7a11969973d..d651d08f5d36b 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -3872,7 +3872,7 @@ protected void snapshotFile(SnapshotShardContext context, FileInfo fileInfo) thr final String file = fileInfo.physicalName(); try ( Releasable ignored = context.withCommitRef(); - IndexInput indexInput = store.openVerifyingInput(file, IOContext.READONCE, fileInfo.metadata()) + IndexInput indexInput = store.openVerifyingInput(file, IOContext.READ, fileInfo.metadata()) ) { for (int i = 0; i < fileInfo.numberOfParts(); i++) { final long partBytes = fileInfo.partBytes(i); From 9d34d82b07794dc8a7aa5f9766b9481e66409383 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 21 Jul 2024 06:12:00 +0000 Subject: [PATCH 025/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-b83bbdd0bfe --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 192 +++++++++--------------- 2 files changed, 73 insertions(+), 121 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index aa5399c160e45..60a77fe8a99f8 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-3eadfe6031c +lucene = 9.12.0-snapshot-b83bbdd0bfe bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 8ff97596a0bf9..db92f874a2ce7 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2754,172 +2754,124 @@ - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + - - - - - + + + From 4a58d76c0d80594b5e32929d4b53259794db799c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 22 Jul 2024 06:13:10 +0000 Subject: [PATCH 026/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-b83bbdd0bfe --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index db92f874a2ce7..e8fb8ce88cbeb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2756,122 +2756,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From b4a87e36e92c160cf3b9211eb3c8ecdea8309292 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 23 Jul 2024 06:12:33 +0000 Subject: [PATCH 027/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-b83bbdd0bfe --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e8fb8ce88cbeb..00577600b3a84 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2756,122 +2756,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From b1e8c7d2e01ac11b08d08fd2b114c23738f9a4aa Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 24 Jul 2024 06:12:41 +0000 Subject: [PATCH 028/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-c23f6c09f7d --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 60a77fe8a99f8..e42070882c701 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-b83bbdd0bfe +lucene = 9.12.0-snapshot-c23f6c09f7d bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 00577600b3a84..5211ed9f173a1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2754,124 +2754,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 50778c52d988fc4d62499bf207438d9b2773ce32 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 25 Jul 2024 06:12:40 +0000 Subject: [PATCH 029/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-7b47c8a73cb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e42070882c701..1eec3c8672908 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-c23f6c09f7d +lucene = 9.12.0-snapshot-7b47c8a73cb bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5211ed9f173a1..9bd37d4dd4df4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2754,124 +2754,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e98e87ed206634ba5f3b752b38db8fc179742a7e Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Thu, 25 Jul 2024 16:19:15 +0100 Subject: [PATCH 030/417] Use READONCE for reading the segment file and computing checksums (#111272) This commit uses READONCE for reading the segment file and computing checksums. --- .../test/java/org/elasticsearch/index/store/StoreTests.java | 2 +- .../xpack/ccr/repository/CcrRestoreSourceService.java | 6 ++++-- .../org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 5c1a45dba604d..a4c6023062a88 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -273,7 +273,7 @@ public IndexInput openInput(String name, IOContext context) throws IOException { metadata = store.getMetadata(randomBoolean() ? indexCommit : null); assertThat(metadata.fileMetadataMap().isEmpty(), is(false)); for (StoreFileMetadata meta : metadata) { - try (IndexInput input = store.directory().openInput(meta.name(), IOContext.DEFAULT)) { + try (IndexInput input = store.directory().openInput(meta.name(), IOContext.READONCE)) { String checksum = Store.digestToString(CodecUtil.retrieveChecksum(input)); assertThat("File: " + meta.name() + " has a different checksum", meta.checksum(), equalTo(checksum)); assertThat(meta.writtenBy(), equalTo(Version.LATEST.toString())); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java index fa9438353779f..6b390ab5747a8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -244,9 +245,10 @@ private Store.MetadataSnapshot getMetadata() throws IOException { private long readFileBytes(String fileName, ByteArray reference) throws IOException { try (Releasable ignored = keyedLock.acquire(fileName)) { + var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.READ; final IndexInput indexInput = cachedInputs.computeIfAbsent(fileName, f -> { try { - return commitRef.getIndexCommit().getDirectory().openInput(fileName, IOContext.READONCE); + return commitRef.getIndexCommit().getDirectory().openInput(fileName, context); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -256,7 +258,7 @@ private long readFileBytes(String fileName, ByteArray reference) throws IOExcept long offsetAfterRead = indexInput.getFilePointer(); - if (offsetAfterRead == indexInput.length()) { + if (offsetAfterRead == indexInput.length() || context == IOContext.READONCE) { cachedInputs.remove(fileName); IOUtils.close(indexInput); } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java index e5de349203b3d..d1455eaa2f1c4 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java @@ -196,7 +196,7 @@ static final OldSegmentInfos readCommit(Directory directory, String segmentFileN long generation = generationFromSegmentsFileName(segmentFileName); // System.out.println(Thread.currentThread() + ": SegmentInfos.readCommit " + segmentFileName); - try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READ)) { + try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READONCE)) { try { return readCommit(directory, input, generation, minSupportedMajorVersion); } catch (EOFException | NoSuchFileException | FileNotFoundException e) { From b3f858274caad2b32503691c3cdb28bb11981c39 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 26 Jul 2024 06:12:09 +0000 Subject: [PATCH 031/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-7b47c8a73cb --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2936df75ae8b6..48b59e8cf72b4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2746,122 +2746,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From ae12321e7cd0d155112b0d3748592f9633066084 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Fri, 26 Jul 2024 11:16:14 +0100 Subject: [PATCH 032/417] Use READONCE for the segment file in tests (#111330) This commit uses READONCE for the segment file in tests. --- .../store/SearchableSnapshotDirectoryTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index 1452847c65b4c..e65c4a60f89d5 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -692,7 +692,7 @@ private void testDirectories( private void testIndexInputs(final CheckedBiConsumer consumer) throws Exception { testDirectories((directory, snapshotDirectory) -> { for (String fileName : randomSubsetOf(Arrays.asList(snapshotDirectory.listAll()))) { - final IOContext context = randomIOContext(); + final IOContext context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : randomIOContext(); try (IndexInput indexInput = directory.openInput(fileName, context)) { final List closeables = new ArrayList<>(); try { From b5343d599cb251f3409c8d5673d7eec423d2f5cf Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 27 Jul 2024 06:11:30 +0000 Subject: [PATCH 033/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-7b47c8a73cb --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 48b59e8cf72b4..73ec458c0c073 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2746,122 +2746,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 59886487803c1518a50128be2d11e0d50fc460d2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 28 Jul 2024 06:11:50 +0000 Subject: [PATCH 034/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-7edd64614c5 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1eec3c8672908..c70b4e3323a80 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-7b47c8a73cb +lucene = 9.12.0-snapshot-7edd64614c5 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 73ec458c0c073..1edff6149d90c 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2744,124 +2744,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 1320843e8899ff5d340f1bc7db454f9f056ff397 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 29 Jul 2024 06:12:47 +0000 Subject: [PATCH 035/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-7edd64614c5 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1edff6149d90c..98587a05f844e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2746,122 +2746,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From be4e14cc34bdb8e8fa9b664f2b85e5165b382a90 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 30 Jul 2024 06:11:49 +0000 Subject: [PATCH 036/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-7edd64614c5 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 98587a05f844e..e7cd9c129cfdb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2746,122 +2746,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From e7426d72999ba2e540f2db8abe291d603de1786a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 31 Jul 2024 06:12:17 +0000 Subject: [PATCH 037/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-539274f4530 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index c70b4e3323a80..aafe96013a2af 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-7edd64614c5 +lucene = 9.12.0-snapshot-539274f4530 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index c36bb3a1071de..395be45d3284f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 05d1cd84393394ea2fbd2be9b453b5c9eb3b8783 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 1 Aug 2024 06:12:53 +0000 Subject: [PATCH 038/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-d1c32c1c0fa --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index aafe96013a2af..b2fc13d784a9f 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-539274f4530 +lucene = 9.12.0-snapshot-d1c32c1c0fa bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 395be45d3284f..e85ed2f3a1329 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 1ae40eeda047362de1b2e0aca47d8b837e6d8c2e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 2 Aug 2024 06:12:06 +0000 Subject: [PATCH 039/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-e83c1d4234c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index b2fc13d784a9f..b3152dbcb4b51 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-d1c32c1c0fa +lucene = 9.12.0-snapshot-e83c1d4234c bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e85ed2f3a1329..c920316c460f3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From ac32bd9f8c39823011ffc0b0f153d93d80595068 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 3 Aug 2024 06:12:21 +0000 Subject: [PATCH 040/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-65be22a6221 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index b3152dbcb4b51..06ff73a3e75ce 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-e83c1d4234c +lucene = 9.12.0-snapshot-65be22a6221 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index c920316c460f3..30080f85f2424 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 8bca8277507c1cb37934e0ba1a1f0fc0a4f783b0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 4 Aug 2024 06:11:34 +0000 Subject: [PATCH 041/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-65be22a6221 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 30080f85f2424..4389c7751575e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2811,122 +2811,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 1698430bbb56875d2aab9e54a3b59e982086434c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 5 Aug 2024 06:12:12 +0000 Subject: [PATCH 042/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-65be22a6221 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4389c7751575e..c3dcdbe4ea584 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2811,122 +2811,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 53eaa10e9f68b8b2087b9b1f1680255c385eec2b Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 5 Aug 2024 21:31:03 +0200 Subject: [PATCH 043/417] Bump codec to Lucene912. (#111533) * Bump codec to Lucene912. Lucene recently upgraded its default codec from `Lucene99` to `Lucene912`. * Add missing class * Apply suggestions from code review --------- Co-authored-by: Benjamin Trent --- server/src/main/java/module-info.java | 6 +- .../diskusage/IndexDiskUsageAnalyzer.java | 6 +- .../elasticsearch/common/lucene/Lucene.java | 2 +- .../index/codec/CodecService.java | 6 +- .../index/codec/Elasticsearch814Codec.java | 4 +- .../index/codec/Elasticsearch816Codec.java | 130 ++++++++++++++++++ .../codec/LegacyPerFieldMapperCodec.java | 6 +- .../index/codec/PerFieldMapperCodec.java | 2 +- .../index/mapper/CompletionFieldMapper.java | 2 +- .../index/store/LuceneFilesExtensions.java | 1 + .../lucene/util/CombinedBitSet.java | 13 ++ .../lucene/util/MatchAllBitSet.java | 6 + .../search/internal/ContextIndexSearcher.java | 9 +- .../services/org.apache.lucene.codecs.Codec | 1 + .../IndexDiskUsageAnalyzerTests.java | 22 +-- .../elasticsearch/index/codec/CodecTests.java | 2 +- .../vectors/ES813FlatVectorFormatTests.java | 4 +- .../ES813Int8FlatVectorFormatTests.java | 4 +- ...HnswScalarQuantizedVectorsFormatTests.java | 4 +- .../ES815BitFlatVectorFormatTests.java | 4 +- .../ES815HnswBitVectorsFormatTests.java | 4 +- ...estCompressionStoredFieldsFormatTests.java | 4 +- ...td814BestSpeedStoredFieldsFormatTests.java | 4 +- .../engine/CompletionStatsCacheTests.java | 8 +- .../mapper/CompletionFieldMapperTests.java | 6 +- 25 files changed, 206 insertions(+), 54 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 1c07b5b4564ec..b74fa2e7dd035 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -6,7 +6,6 @@ * Side Public License, v 1. */ -import org.elasticsearch.index.codec.Elasticsearch814Codec; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; import org.elasticsearch.plugins.internal.RestExtension; @@ -454,7 +453,10 @@ org.elasticsearch.index.codec.vectors.ES815HnswBitVectorsFormat, org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat; - provides org.apache.lucene.codecs.Codec with Elasticsearch814Codec; + provides org.apache.lucene.codecs.Codec + with + org.elasticsearch.index.codec.Elasticsearch814Codec, + org.elasticsearch.index.codec.Elasticsearch816Codec; provides org.apache.logging.log4j.core.util.ContextDataProvider with org.elasticsearch.common.logging.DynamicContextDataProvider; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index 2f57b59c165e2..84e78a60bd641 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -12,6 +12,7 @@ import org.apache.lucene.backward_codecs.lucene50.Lucene50PostingsFormat; import org.apache.lucene.backward_codecs.lucene84.Lucene84PostingsFormat; import org.apache.lucene.backward_codecs.lucene90.Lucene90PostingsFormat; +import org.apache.lucene.backward_codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.KnnVectorsReader; @@ -19,7 +20,7 @@ import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.TermVectorsReader; -import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; +import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; @@ -303,6 +304,9 @@ private static void readProximity(Terms terms, PostingsEnum postings) throws IOE private static BlockTermState getBlockTermState(TermsEnum termsEnum, BytesRef term) throws IOException { if (term != null && termsEnum.seekExact(term)) { final TermState termState = termsEnum.termState(); + if (termState instanceof final Lucene912PostingsFormat.IntBlockTermState blockTermState) { + return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); + } if (termState instanceof final ES812PostingsFormat.IntBlockTermState blockTermState) { return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 36b3076c29a31..fb1a15a851252 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -87,7 +87,7 @@ import java.util.Objects; public class Lucene { - public static final String LATEST_CODEC = "Lucene99"; + public static final String LATEST_CODEC = "Lucene912"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index ef8d130f86f1e..e51a5a2a0c6c4 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; @@ -45,7 +45,7 @@ public class CodecService implements CodecProvider { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays); + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, mapperService, bigArrays); if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { @@ -53,7 +53,7 @@ public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) } codecs.put(LEGACY_DEFAULT_CODEC, legacyBestSpeedCodec); - Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { codecs.put( BEST_COMPRESSION_CODEC, diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java index 301d3129f7c2a..2eef2f480b83b 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java @@ -8,14 +8,14 @@ package org.elasticsearch.index.codec; +import org.apache.lucene.backward_codecs.lucene99.Lucene99Codec; +import org.apache.lucene.backward_codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java new file mode 100644 index 0000000000000..1b08b87e46762 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 8.16. This extends the Lucene 9.12 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch816Codec extends CodecService.DeduplicateFieldInfosCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch816Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch816Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch816Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch816Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch816", new Lucene912Codec()); + this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); + this.defaultPostingsFormat = new Lucene912PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java index a682d26b094e6..960da514eb68f 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.mapper.MapperService; @@ -21,11 +21,11 @@ * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new * changes in {@link PerFieldMapperCodec}. */ -public final class LegacyPerFieldMapperCodec extends Lucene99Codec { +public final class LegacyPerFieldMapperCodec extends Lucene912Codec { private final PerFieldFormatSupplier formatSupplier; - public LegacyPerFieldMapperCodec(Lucene99Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public LegacyPerFieldMapperCodec(Lucene912Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index 6f88578260db3..4468096215757 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -25,7 +25,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Elasticsearch814Codec { +public final class PerFieldMapperCodec extends Elasticsearch816Codec { private final PerFieldFormatSupplier formatSupplier; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 23272fbd354f3..b612d144cdeb3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -370,7 +370,7 @@ public CompletionFieldType fieldType() { } static PostingsFormat postingsFormat() { - return PostingsFormat.forName("Completion99"); + return PostingsFormat.forName("Completion912"); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java index 463ff90b47870..f3942b4ac22d0 100644 --- a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java +++ b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java @@ -56,6 +56,7 @@ public enum LuceneFilesExtensions { NVM("nvm", "Norms Metadata", true, false), PAY("pay", "Payloads", false, false), POS("pos", "Positions", false, false), + PSM("psm", "Postings Metadata", true, false), SI("si", "Segment Info", true, false), // Term dictionaries are typically performance-sensitive and hot in the page // cache, so we use mmap, which provides better performance. diff --git a/server/src/main/java/org/elasticsearch/lucene/util/CombinedBitSet.java b/server/src/main/java/org/elasticsearch/lucene/util/CombinedBitSet.java index 9db5cbe6b476a..ada9c8350d723 100644 --- a/server/src/main/java/org/elasticsearch/lucene/util/CombinedBitSet.java +++ b/server/src/main/java/org/elasticsearch/lucene/util/CombinedBitSet.java @@ -76,6 +76,19 @@ public int nextSetBit(int index) { return next; } + @Override + public int nextSetBit(int index, int upperBound) { + assert index >= 0 && index < length : "index=" + index + " numBits=" + length(); + int next = first.nextSetBit(index, upperBound); + while (next != DocIdSetIterator.NO_MORE_DOCS && second.get(next) == false) { + if (next == length() - 1) { + return DocIdSetIterator.NO_MORE_DOCS; + } + next = first.nextSetBit(next + 1, upperBound); + } + return next; + } + @Override public long ramBytesUsed() { return first.ramBytesUsed(); diff --git a/server/src/main/java/org/elasticsearch/lucene/util/MatchAllBitSet.java b/server/src/main/java/org/elasticsearch/lucene/util/MatchAllBitSet.java index 7cce67199a218..f56563637ccd2 100644 --- a/server/src/main/java/org/elasticsearch/lucene/util/MatchAllBitSet.java +++ b/server/src/main/java/org/elasticsearch/lucene/util/MatchAllBitSet.java @@ -68,6 +68,12 @@ public int nextSetBit(int index) { return index; } + @Override + public int nextSetBit(int index, int upperBound) { + assert index < upperBound; + return index; + } + @Override public long ramBytesUsed() { return RAM_BYTES_USED; diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index cba2cf761e6f3..c840772805b0e 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -407,13 +407,8 @@ private static class TimeExceededException extends RuntimeException { // This exception should never be re-thrown, but we fill in the stacktrace to be able to trace where it does not get properly caught } - /** - * Lower-level search API. - * - * {@link LeafCollector#collect(int)} is called for every matching document in - * the provided ctx. - */ - private void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collector) throws IOException { + @Override + protected void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collector) throws IOException { cancellable.checkCancelled(); final LeafCollector leafCollector; try { diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index b99a15507f742..4e85ba2cf479f 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1 +1,2 @@ org.elasticsearch.index.codec.Elasticsearch814Codec +org.elasticsearch.index.codec.Elasticsearch816Codec diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index d6cf90034f5b5..94d56c905afec 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; @@ -53,7 +53,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; -import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; @@ -326,11 +326,11 @@ public void testTriangle() throws Exception { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene99Codec(Lucene99Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene912Codec(Lucene912Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { - return new Completion99PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); + return new Completion912PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); } else { return super.postingsFormat(); } @@ -413,25 +413,25 @@ private static void addFieldsToDoc(Document doc, IndexableField[] fields) { enum CodecMode { BEST_SPEED { @Override - Lucene99Codec.Mode mode() { - return Lucene99Codec.Mode.BEST_SPEED; + Lucene912Codec.Mode mode() { + return Lucene912Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene99Codec.Mode mode() { - return Lucene99Codec.Mode.BEST_COMPRESSION; + Lucene912Codec.Mode mode() { + return Lucene912Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene99Codec.Mode mode(); + abstract Lucene912Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene99Codec(codecMode.mode())); + .setCodec(new Lucene912Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -639,7 +639,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene99Codec(mode.mode()) { + .setCodec(new Lucene912Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new ES812PostingsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 002e42b3198e6..e79df1f5256f6 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -51,7 +51,7 @@ public void testResolveDefaultCodecs() throws Exception { assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Elasticsearch814Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch816Codec.class)); } public void testDefault() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java index b4f82e91c39c1..5c291fa520519 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java @@ -10,7 +10,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -23,7 +23,7 @@ public class ES813FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java index 7bb2e9e0284f1..cd7b96343dc1e 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java @@ -10,7 +10,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -23,7 +23,7 @@ public class ES813Int8FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813Int8FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java index ca446a607f633..aead9485a4a7e 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java @@ -10,7 +10,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.KnnFloatVectorField; @@ -40,7 +40,7 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES814HnswScalarQuantizedVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java index c9a5a8e76a041..d00247fb688e2 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java @@ -10,7 +10,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -18,7 +18,7 @@ public class ES815BitFlatVectorFormatTests extends BaseKnnBitVectorsFormatTestCa @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815BitFlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java index 3525d5b619565..6e5d4a9abb0a8 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java @@ -10,7 +10,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -18,7 +18,7 @@ public class ES815HnswBitVectorsFormatTests extends BaseKnnBitVectorsFormatTestC @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815HnswBitVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java index 1679813ed1340..22e09ab4525ab 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -10,11 +10,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch814Codec; +import org.elasticsearch.index.codec.Elasticsearch816Codec; public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java index 5acdd4f5730e9..555e5becc74be 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -10,11 +10,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch814Codec; +import org.elasticsearch.index.codec.Elasticsearch816Codec; public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 7c2c40e078cb4..d6281ed655494 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -8,12 +8,12 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.elasticsearch.ElasticsearchException; @@ -43,8 +43,8 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion99PostingsFormat(); - indexWriterConfig.setCodec(new Lucene99Codec() { + final PostingsFormat postingsFormat = new Completion912PostingsFormat(); + indexWriterConfig.setCodec(new Lucene912Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index d90517e4be274..c1c9be7b9f19c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -150,7 +150,7 @@ public void testPostingsFormat() throws IOException { Codec codec = codecService.codec("default"); if (CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { assertThat(codec, instanceOf(PerFieldMapperCodec.class)); - assertThat(((PerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(Completion99PostingsFormat.class)); + assertThat(((PerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(Completion912PostingsFormat.class)); } else { if (codec instanceof CodecService.DeduplicateFieldInfosCodec deduplicateFieldInfosCodec) { codec = deduplicateFieldInfosCodec.delegate(); @@ -158,7 +158,7 @@ public void testPostingsFormat() throws IOException { assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); assertThat( ((LegacyPerFieldMapperCodec) codec).getPostingsFormatForField("field"), - instanceOf(Completion99PostingsFormat.class) + instanceOf(Completion912PostingsFormat.class) ); } } From 14e97af09d4644c3bc2541a7cadbd0f05f37c930 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 6 Aug 2024 06:18:07 +0000 Subject: [PATCH 044/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-69e08e4cdbd --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 06ff73a3e75ce..1e636ff07c948 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-65be22a6221 +lucene = 9.12.0-snapshot-69e08e4cdbd bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index c3dcdbe4ea584..fd27f7fb371b8 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 714cd0194c98c1cbeec6225328469537fc40f1c3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 7 Aug 2024 06:12:48 +0000 Subject: [PATCH 045/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-0fa473aa343 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1e636ff07c948..a74d23e9b707f 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-69e08e4cdbd +lucene = 9.12.0-snapshot-0fa473aa343 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fd27f7fb371b8..0cb2e27512639 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 45ef8414578e2ae94b3109a243ff828b3d2b0737 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 8 Aug 2024 06:12:58 +0000 Subject: [PATCH 046/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-a546ed51885 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a74d23e9b707f..e966d6d27fc04 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-0fa473aa343 +lucene = 9.12.0-snapshot-a546ed51885 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0cb2e27512639..3e6b26bacb9e1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 9a9ddf6b953dc10d6bcff11db2fa1d088edbddaa Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 9 Aug 2024 06:12:30 +0000 Subject: [PATCH 047/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-98fbbe2d673 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e966d6d27fc04..2f8bce9c96f2f 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-a546ed51885 +lucene = 9.12.0-snapshot-98fbbe2d673 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3e6b26bacb9e1..e79d377a9a7b1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From b68a00d063791cff4ab753900d1ade386e83d9fc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 10 Aug 2024 06:11:27 +0000 Subject: [PATCH 048/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-ef28015c0af --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 2f8bce9c96f2f..decde7a9bdfdd 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-98fbbe2d673 +lucene = 9.12.0-snapshot-ef28015c0af bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e79d377a9a7b1..75a501463553d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From d241599a26742606884c426c970336ee33ee8b71 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 11 Aug 2024 06:12:15 +0000 Subject: [PATCH 049/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-a8c0ba2b447 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index decde7a9bdfdd..cb6c55c377fbd 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-ef28015c0af +lucene = 9.12.0-snapshot-a8c0ba2b447 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 75a501463553d..78a861d7b2586 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a9e1342bd44b086143fdcff2b4b737301621189c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 12 Aug 2024 06:12:29 +0000 Subject: [PATCH 050/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-a8c0ba2b447 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 78a861d7b2586..a2af48d0403c5 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2811,122 +2811,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From de8dad5691ae25443530eb891e6c98c6ca112a05 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 13 Aug 2024 06:13:20 +0000 Subject: [PATCH 051/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-9bfa91eb905 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index cb6c55c377fbd..bd432f8d981fa 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-a8c0ba2b447 +lucene = 9.12.0-snapshot-9bfa91eb905 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a2af48d0403c5..0dac15c6bffe1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From badcfd8132c1d23cd02c1637a1b69c47e999a6f8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 14 Aug 2024 06:11:51 +0000 Subject: [PATCH 052/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-35fc50115d6 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index bd432f8d981fa..d54b8b7e8627a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-9bfa91eb905 +lucene = 9.12.0-snapshot-35fc50115d6 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0dac15c6bffe1..c4d9028385ff8 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,124 +2809,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 461a0fe8daa997583e80b3f80fb2f03a1f1b57b2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 15 Aug 2024 06:12:34 +0000 Subject: [PATCH 053/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-ec2492fe609 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 154 ++++++++++++------------ 2 files changed, 78 insertions(+), 78 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index d54b8b7e8627a..fca5eccb0b707 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-35fc50115d6 +lucene = 9.12.0-snapshot-ec2492fe609 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 47ccb938b9994..17c0819277301 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,129 +2809,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - - - - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + + + + + + From 1f66b29d59b47c560bf7c6c9938019bffe9c9c34 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Thu, 15 Aug 2024 13:53:54 +0100 Subject: [PATCH 054/417] Normalize float vector in native scorer test (#111925) --- .../org/elasticsearch/simdvec/VectorScorerFactoryTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java index 93c6da73f4179..008beabfa6415 100644 --- a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java @@ -236,6 +236,8 @@ void testRandomScorerImpl(long maxChunkSize, Function floatArr try (Directory dir = new MMapDirectory(createTempDir("testRandom"), maxChunkSize)) { for (var sim : List.of(COSINE, DOT_PRODUCT, EUCLIDEAN, MAXIMUM_INNER_PRODUCT)) { + // Use the random supplier for COSINE, which returns values in the normalized range + floatArraySupplier = sim == COSINE ? FLOAT_ARRAY_RANDOM_FUNC : floatArraySupplier; final int dims = randomIntBetween(1, 4096); final int size = randomIntBetween(2, 100); final float[][] vectors = new float[size][]; From debf44d40d8fb2f377231b94095ccfc8aa974a23 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 16 Aug 2024 06:12:30 +0000 Subject: [PATCH 055/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-4a0a09a8bbb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index fca5eccb0b707..e22fb9b788fcb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-ec2492fe609 +lucene = 9.12.0-snapshot-4a0a09a8bbb bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 17c0819277301..8c61564ebfa42 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2809,129 +2809,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From bc10e2f0020fc3cb70a8e75772e1ffc56f57cac5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 17 Aug 2024 06:11:18 +0000 Subject: [PATCH 056/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-a9a70fa97cc --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 165 ++++++++++++------------ 2 files changed, 81 insertions(+), 86 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e22fb9b788fcb..99a135480b97b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-4a0a09a8bbb +lucene = 9.12.0-snapshot-a9a70fa97cc bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 73c42c2a2066d..7e201e6b821e4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -953,11 +953,6 @@ - - - - - @@ -1746,16 +1741,16 @@ - - - - - + + + + + @@ -2821,129 +2816,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 88a35a7b548b79023a2191ca50a48445c74eb8c6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 18 Aug 2024 06:11:53 +0000 Subject: [PATCH 057/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-a9a70fa97cc --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7e201e6b821e4..ea8a97e20feba 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2818,127 +2818,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 2cfa455a427f8832f0bc5e5f4b6aac45eeca6489 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 19 Aug 2024 06:12:05 +0000 Subject: [PATCH 058/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-a9a70fa97cc --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index ea8a97e20feba..2d030e02c265e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2818,127 +2818,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 628d3348aa2053f195c440084495b33325569b82 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 19 Aug 2024 16:28:53 +0200 Subject: [PATCH 059/417] Remove explicit loop over leaves in favour of calling super.search (#111971) The searchLeaf protected method was recently introduced in IndexSearcher upstream, we do still need to override a bunch of search methods, but we can slightly reduce the surface of the code we override by removing the loop over the leaves, and rely on super instead. --- .../search/internal/ContextIndexSearcher.java | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index c840772805b0e..00c451fa42b2c 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -319,12 +319,9 @@ public T search(Query query, CollectorManager col } /** - * Similar to the lucene implementation, with the following changes made: - * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search worker threads - * so it can be parallelized. Also, it needs to happen in the same thread where doc_values are read, as it consumes them and Lucene - * does not allow consuming them from a different thread. - * 2) handles the ES TimeExceededException - * */ + * Same implementation as the default one in Lucene, with an additional call to postCollection in cased there are no segments. + * The rest is a plain copy from Lucene. + */ private T search(Weight weight, CollectorManager collectorManager, C firstCollector) throws IOException { LeafSlice[] leafSlices = getSlices(); if (leafSlices.length == 0) { @@ -356,14 +353,18 @@ private T search(Weight weight, CollectorManager } } + /** + * Similar to the lucene implementation, with the following changes made: + * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search worker threads + * so it can be parallelized. Also, it needs to happen in the same thread where doc_values are read, as it consumes them and Lucene + * does not allow consuming them from a different thread. + * 2) handles the ES TimeExceededException + */ @Override public void search(List leaves, Weight weight, Collector collector) throws IOException { - collector.setWeight(weight); boolean success = false; try { - for (LeafReaderContext ctx : leaves) { // search each subreader - searchLeaf(ctx, weight, collector); - } + super.search(leaves, weight, collector); success = true; } catch (@SuppressWarnings("unused") TimeExceededException e) { timeExceeded = true; From d03c197df257f867d58f72c9b8f872f31ab133be Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 20 Aug 2024 06:11:26 +0000 Subject: [PATCH 060/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-25253a1a016 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 99a135480b97b..f108b9f7fc4d7 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-a9a70fa97cc +lucene = 9.12.0-snapshot-25253a1a016 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2d030e02c265e..de1db00b952b7 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,129 +2816,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 29e922c902c85a214993d01f35120a7f5ba9d8f4 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 20 Aug 2024 23:57:38 +0200 Subject: [PATCH 061/417] Remove search worker pool (#111099) No more need for this pool, now that Lucene can safely execute on the current pool. --- docs/reference/modules/threadpool.asciidoc | 8 +----- .../elasticsearch/node/NodeConstruction.java | 13 +++++++++ .../search/DefaultSearchContext.java | 3 +- .../elasticsearch/search/SearchService.java | 20 ++++++++----- .../support/TimeSeriesIndexSearcher.java | 28 ++----------------- .../search/internal/ContextIndexSearcher.java | 2 +- .../elasticsearch/threadpool/ThreadPool.java | 6 ---- .../search/SearchServiceTests.java | 8 +++--- .../search/dfs/DfsPhaseTests.java | 2 +- .../snapshots/SnapshotResiliencyTests.java | 2 -- .../threadpool/ThreadPoolTests.java | 21 -------------- .../aggregations/AggregatorTestCase.java | 2 +- .../ConcurrentSearchSingleNodeTests.java | 27 ++++++------------ .../ConcurrentSearchTestPluginTests.java | 27 ++++++------------ .../input/MetadataCachingIndexInput.java | 1 - 15 files changed, 54 insertions(+), 116 deletions(-) diff --git a/docs/reference/modules/threadpool.asciidoc b/docs/reference/modules/threadpool.asciidoc index ed4becbfbb6d0..9e6e5fb80f999 100644 --- a/docs/reference/modules/threadpool.asciidoc +++ b/docs/reference/modules/threadpool.asciidoc @@ -13,16 +13,10 @@ There are several thread pools, but the important ones include: [[search-threadpool]] `search`:: - For coordination of count/search operations at the shard level whose computation - is offloaded to the search_worker thread pool. Used also by fetch and other search + For count/search operations at the shard level. Used also by fetch and other search related operations Thread pool type is `fixed` with a size of `int((`<>`pass:[ * ]3) / 2) + 1`, and queue_size of `1000`. -`search_worker`:: - For the heavy workload of count/search operations that may be executed concurrently - across segments within the same shard when possible. Thread pool type is `fixed` - with a size of `int((`<>`pass:[ * ]3) / 2) + 1`, and unbounded queue_size . - [[search-throttled]]`search_throttled`:: For count/search/suggest/get operations on `search_throttled indices`. Thread pool type is `fixed` with a size of `1`, and queue_size of `100`. diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index a4db9a0a0e149..2599972665033 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -81,6 +81,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -497,6 +498,7 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, for (final ExecutorBuilder builder : threadPool.builders()) { additionalSettings.addAll(builder.getRegisteredSettings()); } + addBwcSearchWorkerSettings(additionalSettings); SettingsExtension.load().forEach(e -> additionalSettings.addAll(e.getSettings())); // this is as early as we can validate settings at this point. we already pass them to ThreadPool @@ -527,6 +529,17 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, return settingsModule; } + @UpdateForV9 + private static void addBwcSearchWorkerSettings(List> additionalSettings) { + // TODO remove the below settings, they are unused and only here to enable BwC for deployments that still use them + additionalSettings.add( + Setting.intSetting("thread_pool.search_worker.queue_size", 0, Setting.Property.NodeScope, Setting.Property.DeprecatedWarning) + ); + additionalSettings.add( + Setting.intSetting("thread_pool.search_worker.size", 0, Setting.Property.NodeScope, Setting.Property.DeprecatedWarning) + ); + } + private SearchModule createSearchModule(Settings settings, ThreadPool threadPool, TelemetryProvider telemetryProvider) { IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList(), telemetryProvider); diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index dc92cfd11fce3..203834648eb67 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -186,7 +186,7 @@ final class DefaultSearchContext extends SearchContext { enableQueryPhaseParallelCollection, field -> getFieldCardinality(field, readerContext.indexService(), engineSearcher.getDirectoryReader()) ); - if (executor == null) { + if (maximumNumberOfSlices <= 1) { this.searcher = new ContextIndexSearcher( engineSearcher.getIndexReader(), engineSearcher.getSimilarity(), @@ -290,6 +290,7 @@ static int determineMaximumNumberOfSlices( ToLongFunction fieldCardinality ) { return executor instanceof ThreadPoolExecutor tpe + && tpe.getQueue().isEmpty() && isParallelCollectionSupportedForResults(resultsType, request.source(), fieldCardinality, enableQueryPhaseParallelCollection) ? tpe.getMaximumPoolSize() : 1; diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 67d5d6337d77c..26c3bf6ceeffe 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -143,7 +143,6 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -229,7 +228,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv "search.worker_threads_enabled", true, Property.NodeScope, - Property.Dynamic + Property.Dynamic, + Property.DeprecatedWarning ); public static final Setting QUERY_PHASE_PARALLEL_COLLECTION_ENABLED = Setting.boolSetting( @@ -282,7 +282,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private final FetchPhase fetchPhase; private final RankFeatureShardPhase rankFeatureShardPhase; - private volatile boolean enableSearchWorkerThreads; + private volatile Executor searchExecutor; private volatile boolean enableQueryPhaseParallelCollection; private volatile long defaultKeepAlive; @@ -376,7 +376,10 @@ public SearchService( clusterService.getClusterSettings() .addSettingsUpdateConsumer(ENABLE_REWRITE_AGGS_TO_FILTER_BY_FILTER, this::setEnableRewriteAggsToFilterByFilter); - enableSearchWorkerThreads = SEARCH_WORKER_THREADS_ENABLED.get(settings); + if (SEARCH_WORKER_THREADS_ENABLED.get(settings)) { + searchExecutor = threadPool.executor(Names.SEARCH); + } + clusterService.getClusterSettings().addSettingsUpdateConsumer(SEARCH_WORKER_THREADS_ENABLED, this::setEnableSearchWorkerThreads); enableQueryPhaseParallelCollection = QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.get(settings); @@ -385,7 +388,11 @@ public SearchService( } private void setEnableSearchWorkerThreads(boolean enableSearchWorkerThreads) { - this.enableSearchWorkerThreads = enableSearchWorkerThreads; + if (enableSearchWorkerThreads) { + searchExecutor = threadPool.executor(Names.SEARCH); + } else { + searchExecutor = null; + } } private void setEnableQueryPhaseParallelCollection(boolean enableQueryPhaseParallelCollection) { @@ -1126,7 +1133,6 @@ private DefaultSearchContext createSearchContext( reader.indexShard().shardId(), request.getClusterAlias() ); - ExecutorService executor = this.enableSearchWorkerThreads ? threadPool.executor(Names.SEARCH_WORKER) : null; searchContext = new DefaultSearchContext( reader, request, @@ -1135,7 +1141,7 @@ private DefaultSearchContext createSearchContext( timeout, fetchPhase, lowLevelCancellation, - executor, + searchExecutor, resultsType, enableQueryPhaseParallelCollection, minimumDocsPerSlice diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index 21138f46e974e..c3faf0e1900dc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -21,7 +21,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; -import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.lucene.search.function.MinScoreScorer; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; @@ -37,9 +36,6 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.FutureTask; -import java.util.concurrent.RunnableFuture; import java.util.function.IntSupplier; import static org.elasticsearch.index.IndexSortConfig.TIME_SERIES_SORT; @@ -93,28 +89,8 @@ public void setMinimumScore(Float minimumScore) { public void search(Query query, BucketCollector bucketCollector) throws IOException { query = searcher.rewrite(query); Weight weight = searcher.createWeight(query, bucketCollector.scoreMode(), 1); - if (searcher.getExecutor() == null) { - search(bucketCollector, weight); - bucketCollector.postCollection(); - return; - } - // offload to the search worker thread pool whenever possible. It will be null only when search.worker_threads_enabled is false - RunnableFuture task = new FutureTask<>(() -> { - search(bucketCollector, weight); - bucketCollector.postCollection(); - return null; - }); - searcher.getExecutor().execute(task); - try { - task.get(); - } catch (InterruptedException e) { - throw new ThreadInterruptedException(e); - } catch (ExecutionException e) { - if (e.getCause() instanceof RuntimeException runtimeException) { - throw runtimeException; - } - throw new RuntimeException(e.getCause()); - } + search(bucketCollector, weight); + bucketCollector.postCollection(); } private void search(BucketCollector bucketCollector, Weight weight) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 00c451fa42b2c..ee6b4fc7bad5b 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -355,7 +355,7 @@ private T search(Weight weight, CollectorManager /** * Similar to the lucene implementation, with the following changes made: - * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search worker threads + * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search threads * so it can be parallelized. Also, it needs to happen in the same thread where doc_values are read, as it consumes them and Lucene * does not allow consuming them from a different thread. * 2) handles the ES TimeExceededException diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 29ab3ec7e0848..2877e0b46a390 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -88,7 +88,6 @@ public static class Names { public static final String ANALYZE = "analyze"; public static final String WRITE = "write"; public static final String SEARCH = "search"; - public static final String SEARCH_WORKER = "search_worker"; public static final String SEARCH_COORDINATION = "search_coordination"; public static final String AUTO_COMPLETE = "auto_complete"; public static final String SEARCH_THROTTLED = "search_throttled"; @@ -158,7 +157,6 @@ public static ThreadPoolType fromType(String type) { entry(Names.ANALYZE, ThreadPoolType.FIXED), entry(Names.WRITE, ThreadPoolType.FIXED), entry(Names.SEARCH, ThreadPoolType.FIXED), - entry(Names.SEARCH_WORKER, ThreadPoolType.FIXED), entry(Names.SEARCH_COORDINATION, ThreadPoolType.FIXED), entry(Names.AUTO_COMPLETE, ThreadPoolType.FIXED), entry(Names.MANAGEMENT, ThreadPoolType.SCALING), @@ -267,10 +265,6 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex new TaskTrackingConfig(true, searchAutoscalingEWMA) ) ); - builders.put( - Names.SEARCH_WORKER, - new FixedExecutorBuilder(settings, Names.SEARCH_WORKER, searchOrGetThreadPoolSize, -1, TaskTrackingConfig.DEFAULT) - ); builders.put( Names.SEARCH_COORDINATION, new FixedExecutorBuilder( diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 701f093746b9f..3b8ff74e92653 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -2747,7 +2747,7 @@ public void testEnableSearchWorkerThreads() throws IOException { */ public void testSlicingBehaviourForParallelCollection() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY); - ThreadPoolExecutor executor = (ThreadPoolExecutor) indexService.getThreadPool().executor(ThreadPool.Names.SEARCH_WORKER); + ThreadPoolExecutor executor = (ThreadPoolExecutor) indexService.getThreadPool().executor(ThreadPool.Names.SEARCH); final int configuredMaxPoolSize = 10; executor.setMaximumPoolSize(configuredMaxPoolSize); // We set this explicitly to be independent of CPU cores. int numDocs = randomIntBetween(50, 100); @@ -2837,7 +2837,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.FETCH, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertNull(searcher.getExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( @@ -2853,7 +2853,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.NONE, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertNull(searcher.getExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( @@ -2876,7 +2876,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertNull(searcher.getExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( diff --git a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java index a0f37bcbb7fb1..a2b5671944405 100644 --- a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java @@ -39,7 +39,7 @@ public class DfsPhaseTests extends ESTestCase { @Before public final void init() { threadPool = new TestThreadPool(DfsPhaseTests.class.getName()); - threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH_WORKER); + threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH); } @After diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index b54a786e05c9d..04a7f2f538fdc 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1844,8 +1844,6 @@ private Environment createEnvironment(String nodeName) { Settings.builder() .put(NODE_NAME_SETTING.getKey(), nodeName) .put(PATH_HOME_SETTING.getKey(), tempDir.resolve(nodeName).toAbsolutePath()) - // test uses the same executor service for all thread pools, search worker would need to be a different one - .put(SearchService.SEARCH_WORKER_THREADS_ENABLED.getKey(), false) .put(Environment.PATH_REPO_SETTING.getKey(), tempDir.resolve("repo").toAbsolutePath()) .putList( ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey(), diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java index b19f058d2c6c6..395ae07765016 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java @@ -24,8 +24,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; -import java.util.concurrent.LinkedTransferQueue; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.util.concurrent.EsExecutors.TaskTrackingConfig.DEFAULT; @@ -370,25 +368,6 @@ public void testWriteThreadPoolUsesTaskExecutionTimeTrackingEsThreadPoolExecutor } } - public void testSearchWorkedThreadPool() { - final int allocatedProcessors = randomIntBetween(1, EsExecutors.allocatedProcessors(Settings.EMPTY)); - final ThreadPool threadPool = new TestThreadPool( - "test", - Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), allocatedProcessors).build() - ); - try { - ExecutorService executor = threadPool.executor(ThreadPool.Names.SEARCH_WORKER); - assertThat(executor, instanceOf(ThreadPoolExecutor.class)); - ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) executor; - int expectedPoolSize = allocatedProcessors * 3 / 2 + 1; - assertEquals(expectedPoolSize, threadPoolExecutor.getCorePoolSize()); - assertEquals(expectedPoolSize, threadPoolExecutor.getMaximumPoolSize()); - assertThat(threadPoolExecutor.getQueue(), instanceOf(LinkedTransferQueue.class)); - } finally { - assertTrue(terminate(threadPool)); - } - } - public void testScheduledOneShotRejection() { final var name = "fixed-bounded"; final var threadPool = new TestThreadPool( diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index f3fc4479a21a4..6ca513516d90e 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -211,7 +211,7 @@ public abstract class AggregatorTestCase extends ESTestCase { @Before public final void initPlugins() { threadPool = new TestThreadPool(AggregatorTestCase.class.getName()); - threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH_WORKER); + threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH); List plugins = new ArrayList<>(getSearchPlugins()); plugins.add(new AggCardinalityUpperBoundPlugin()); SearchModule searchModule = new SearchModule(Settings.EMPTY, plugins); diff --git a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java index 5bb393ff70e83..00f08b1fa8eca 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java @@ -8,33 +8,22 @@ package org.elasticsearch.search.internal; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.io.IOException; - public class ConcurrentSearchSingleNodeTests extends ESSingleNodeTestCase { private final boolean concurrentSearch = randomBoolean(); - public void testConcurrentSearch() throws IOException { + public void testConcurrentSearch() { client().admin().indices().prepareCreate("index").get(); - IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService indexService = indicesService.iterator().next(); - IndexShard shard = indexService.getShard(0); - SearchService searchService = getInstanceFromNode(SearchService.class); - ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shard.shardId(), 0L, AliasFilter.EMPTY); - try (SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, TimeValue.MINUS_ONE)) { - ContextIndexSearcher searcher = searchContext.searcher(); - if (concurrentSearch) { - assertEquals(1, searcher.getMinimumDocsPerSlice()); - } else { - assertEquals(50_000, searcher.getMinimumDocsPerSlice()); - } + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + int minDocsPerSlice = SearchService.MINIMUM_DOCS_PER_SLICE.get(clusterService.getSettings()); + if (concurrentSearch) { + assertEquals(1, minDocsPerSlice); + } else { + assertEquals(50_000, minDocsPerSlice); } } diff --git a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java index 29da297ce292e..75d23b3baeabf 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java @@ -8,34 +8,23 @@ package org.elasticsearch.search.internal; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.ESIntegTestCase; -import java.io.IOException; - @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 1) public class ConcurrentSearchTestPluginTests extends ESIntegTestCase { private final boolean concurrentSearch = randomBoolean(); - public void testConcurrentSearch() throws IOException { + public void testConcurrentSearch() { client().admin().indices().prepareCreate("index").get(); - IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class); - IndexService indexService = indicesService.iterator().next(); - IndexShard shard = indexService.getShard(0); - SearchService searchService = internalCluster().getDataNodeInstance(SearchService.class); - ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shard.shardId(), 0L, AliasFilter.EMPTY); - try (SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, TimeValue.MINUS_ONE)) { - ContextIndexSearcher searcher = searchContext.searcher(); - if (concurrentSearch) { - assertEquals(1, searcher.getMinimumDocsPerSlice()); - } else { - assertEquals(50_000, searcher.getMinimumDocsPerSlice()); - } + ClusterService clusterService = internalCluster().getDataNodeInstance(ClusterService.class); + int minDocsPerSlice = SearchService.MINIMUM_DOCS_PER_SLICE.get(clusterService.getSettings()); + if (concurrentSearch) { + assertEquals(1, minDocsPerSlice); + } else { + assertEquals(50_000, minDocsPerSlice); } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java index 8c978c3445526..9875ab03088aa 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java @@ -221,7 +221,6 @@ public static boolean assertCurrentThreadMayAccessBlobStore() { ThreadPool.Names.SNAPSHOT, ThreadPool.Names.GENERIC, ThreadPool.Names.SEARCH, - ThreadPool.Names.SEARCH_WORKER, ThreadPool.Names.SEARCH_THROTTLED, // Cache asynchronous fetching runs on a dedicated thread pool. From 5b7d98568f04cefee91d5e5e7bf377ea971aa8a6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 21 Aug 2024 06:11:22 +0000 Subject: [PATCH 062/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-25253a1a016 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index de1db00b952b7..2cf3a529a50c7 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2818,127 +2818,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 4b3034256c123270b3ef6280eac2fc211058e93f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 22 Aug 2024 06:12:02 +0000 Subject: [PATCH 063/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-fc011d36e7d --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f108b9f7fc4d7..5e720bdea021c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-25253a1a016 +lucene = 9.12.0-snapshot-fc011d36e7d bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f5528105ac557..2bb3c194a9197 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2836,129 +2836,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From f5c250b9a3867b311ab1dc243b8768aa49be2616 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Thu, 22 Aug 2024 12:25:11 +0100 Subject: [PATCH 064/417] Fix test compilation for new Lucene 912 Codec (#112093) This commit fixes a compilation issue by updating the test to use the new Lucene 912 codec. --- .../index/codec/zstd/StoredFieldCodecDuelTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java index 93e9911746d18..92abdb460ef55 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; @@ -34,13 +34,13 @@ public class StoredFieldCodecDuelTests extends ESTestCase { private static final String DOUBLE_FIELD = "double_field_5"; public void testDuelBestSpeed() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } public void testDuelBestCompression() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } From a3ffb121f77d7046eae5880079b3a2a2e2741eed Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 23 Aug 2024 06:12:18 +0000 Subject: [PATCH 065/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-b5c410da870 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 5e720bdea021c..292fd0e4f0a90 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-fc011d36e7d +lucene = 9.12.0-snapshot-b5c410da870 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2bb3c194a9197..fcc10a158da5a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2836,129 +2836,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0fa89d1c25058c67417683c1f4efee2d993e4ed0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 24 Aug 2024 06:12:39 +0000 Subject: [PATCH 066/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-83df2ff10e2 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 292fd0e4f0a90..a98fd7a1a7606 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-b5c410da870 +lucene = 9.12.0-snapshot-83df2ff10e2 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fcc10a158da5a..f690764c2eaa9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2836,129 +2836,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 7fc218f61da9f725228722c6050f47946deb4fd9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 25 Aug 2024 06:11:26 +0000 Subject: [PATCH 067/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-83df2ff10e2 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f690764c2eaa9..9bc1e19447d51 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2838,127 +2838,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 8637199b2373c9cc403d067813fd1a5389d941f9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 26 Aug 2024 06:11:42 +0000 Subject: [PATCH 068/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-83df2ff10e2 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9bc1e19447d51..13f18d77f3aa6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2838,127 +2838,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 92c5521c9056aad4cc201ce33e34a1b37bbb597e Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Mon, 26 Aug 2024 15:08:49 -0400 Subject: [PATCH 069/417] Add range and regexp Intervals (#111465) Lucene/pull/13562 introduced IntervalsSource for range and regexp queries. This exposes these features in ES. This is done to achieve parity with Span queries that support regexp and range. Relates to #110491 --- docs/changelog/111465.yaml | 5 + .../query-dsl/intervals-query.asciidoc | 66 ++++ .../extras/MatchOnlyTextFieldMapper.java | 24 ++ .../test/search/230_interval_query.yml | 42 +++ .../index/mapper/MappedFieldType.java | 24 ++ .../index/mapper/PlaceHolderFieldMapper.java | 16 + .../index/mapper/TextFieldMapper.java | 22 ++ .../index/query/IntervalsSourceProvider.java | 305 +++++++++++++++++- .../elasticsearch/search/SearchModule.java | 10 + .../query/IntervalQueryBuilderTests.java | 132 +++++++- .../RangeIntervalsSourceProviderTests.java | 70 ++++ .../RegexpIntervalsSourceProviderTests.java | 61 ++++ 12 files changed, 774 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/111465.yaml create mode 100644 server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java diff --git a/docs/changelog/111465.yaml b/docs/changelog/111465.yaml new file mode 100644 index 0000000000000..2a8df287427a9 --- /dev/null +++ b/docs/changelog/111465.yaml @@ -0,0 +1,5 @@ +pr: 111465 +summary: Add range and regexp Intervals +area: Search +type: enhancement +issues: [] diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc index 1e3380389d861..84869838fe1e6 100644 --- a/docs/reference/query-dsl/intervals-query.asciidoc +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -73,7 +73,9 @@ Valid rules include: * <> * <> * <> +* <> * <> +* <> * <> * <> -- @@ -178,6 +180,36 @@ The `pattern` is normalized using the search analyzer from this field, unless `analyzer` is specified separately. -- +[[intervals-regexp]] +==== `regexp` rule parameters + +The `regexp` rule matches terms using a regular expression pattern. +This pattern can expand to match at most 128 terms. +If the pattern matches more than 128 terms,{es} returns an error. + +`pattern`:: +(Required, string) Regexp pattern used to find matching terms. +For a list of operators supported by the +`regexp` pattern, see <>. + +WARNING: Avoid using wildcard patterns, such as `.*` or `.*?+``. This can +increase the iterations needed to find matching terms and slow search +performance. +-- +`analyzer`:: +(Optional, string) <> used to normalize the `pattern`. +Defaults to the top-level ``'s analyzer. + +`use_field`:: ++ +-- +(Optional, string) If specified, match intervals from this field rather than the +top-level ``. + +The `pattern` is normalized using the search analyzer from this field, unless +`analyzer` is specified separately. +-- + [[intervals-fuzzy]] ==== `fuzzy` rule parameters @@ -214,6 +246,40 @@ The `term` is normalized using the search analyzer from this field, unless `analyzer` is specified separately. -- +[[intervals-range]] +==== `range` rule parameters + +The `range` rule matches terms contained within a provided range. +This range can expand to match at most 128 terms. +If the range matches more than 128 terms,{es} returns an error. + +`gt`:: +(Optional, string) Greater than: match terms greater than the provided term. + +`gte`:: +(Optional, string) Greater than or equal to: match terms greater than or +equal to the provided term. + +`lt`:: +(Optional, string) Less than: match terms less than the provided term. + +`lte`:: +(Optional, string) Less than or equal to: match terms less than or +equal to the provided term. + +NOTE: It is required to provide one of `gt` or `gte` params. +It is required to provide one of `lt` or `lte` params. + + +`analyzer`:: +(Optional, string) <> used to normalize the `pattern`. +Defaults to the top-level ``'s analyzer. + +`use_field`:: +(Optional, string) If specified, match intervals from this field rather than the +top-level ``. + + [[intervals-all_of]] ==== `all_of` rule parameters diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index 899cc42fea1e0..62edb2256cfc9 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -304,6 +304,30 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex ); } + @Override + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + return toIntervalsSource( + Intervals.regexp(pattern), + new MatchAllDocsQuery(), // regexp queries can be expensive, what should the approximation be? + context + ); + } + + @Override + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + return toIntervalsSource( + Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper), + new MatchAllDocsQuery(), // range queries can be expensive, what should the approximation be? + context + ); + } + @Override public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements, SearchExecutionContext queryShardContext) throws IOException { diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml index 99bd001bd95e2..e828c9ce8d8a8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml @@ -476,3 +476,45 @@ setup: - match: { hits.hits.0._id: "6" } - match: { hits.hits.1._id: "5" } +--- +"Test regexp": + - requires: + cluster_features: "gte_v8.16.0" + reason: "Implemented in 8.16" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cold + - regexp: + pattern: ou.*ide + - match: { hits.total.value: 3 } + + +--- +"Test range": + - requires: + cluster_features: "gte_v8.16.0" + reason: "Implemented in 8.16" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cold + - range: + gte: out + lte: ouu + - match: { hits.total.value: 3 } + diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index aec0c580f1c51..05e0d85f1d8d1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -443,6 +443,30 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex ); } + /** + * Create a regexp {@link IntervalsSource} for the given pattern. + */ + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + throw new IllegalArgumentException( + "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" + ); + } + + /** + * Create a range {@link IntervalsSource} for the given ranges + */ + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + throw new IllegalArgumentException( + "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" + ); + } + /** * An enum used to describe the relation between the range of terms in a * shard when compared with a query range diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java index 4a12ed77b4f26..85a8f45b9efa4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java @@ -247,6 +247,22 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex throw new QueryShardException(context, fail("wildcard intervals query")); } + @Override + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + throw new QueryShardException(context, fail("regexp intervals query")); + } + + @Override + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + throw new QueryShardException(context, fail("range intervals query")); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { throw new IllegalArgumentException(fail("aggregation or sorts")); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 1f0c920c39c8f..c0905c2a763a4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -848,6 +848,28 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex return Intervals.wildcard(pattern); } + @Override + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + if (getTextSearchInfo().hasPositions() == false) { + throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + } + return Intervals.regexp(pattern); + } + + @Override + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + if (getTextSearchInfo().hasPositions() == false) { + throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + } + return Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper); + } + private void checkForPositions() { if (getTextSearchInfo().hasPositions() == false) { throw new IllegalStateException("field:[" + name() + "] was indexed without position data; cannot run PhraseQuery"); diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java index 9a326cf927cf6..359b546ff6f3f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java @@ -76,10 +76,16 @@ public static IntervalsSourceProvider fromXContent(XContentParser parser) throws return Wildcard.fromXContent(parser); case "fuzzy": return Fuzzy.fromXContent(parser); + case "regexp": + return Regexp.fromXContent(parser); + case "range": + return Range.fromXContent(parser); } throw new ParsingException( parser.getTokenLocation(), - "Unknown interval type [" + parser.currentName() + "], expecting one of [match, any_of, all_of, prefix, wildcard]" + "Unknown interval type [" + + parser.currentName() + + "], expecting one of [match, any_of, all_of, prefix, wildcard, regexp, range]" ); } @@ -746,6 +752,124 @@ String getUseField() { } } + public static class Regexp extends IntervalsSourceProvider { + + public static final String NAME = "regexp"; + + private final String pattern; + private final String analyzer; + private final String useField; + + public Regexp(String pattern, String analyzer, String useField) { + this.pattern = pattern; + this.analyzer = analyzer; + this.useField = useField; + } + + public Regexp(StreamInput in) throws IOException { + this.pattern = in.readString(); + this.analyzer = in.readOptionalString(); + this.useField = in.readOptionalString(); + } + + @Override + public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) { + NamedAnalyzer analyzer = null; + if (this.analyzer != null) { + analyzer = context.getIndexAnalyzers().get(this.analyzer); + } + if (useField != null) { + fieldType = context.getFieldType(useField); + assert fieldType != null; + } + if (analyzer == null) { + analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + } + BytesRef normalizedPattern = analyzer.normalize(fieldType.name(), pattern); + IntervalsSource source = fieldType.regexpIntervals(normalizedPattern, context); + if (useField != null) { + source = Intervals.fixField(useField, source); + } + return source; + } + + @Override + public void extractFields(Set fields) { + if (useField != null) { + fields.add(useField); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Regexp regexp = (Regexp) o; + return Objects.equals(pattern, regexp.pattern) + && Objects.equals(analyzer, regexp.analyzer) + && Objects.equals(useField, regexp.useField); + } + + @Override + public int hashCode() { + return Objects.hash(pattern, analyzer, useField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(pattern); + out.writeOptionalString(analyzer); + out.writeOptionalString(useField); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field("pattern", pattern); + if (analyzer != null) { + builder.field("analyzer", analyzer); + } + if (useField != null) { + builder.field("use_field", useField); + } + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { + String term = (String) args[0]; + String analyzer = (String) args[1]; + String useField = (String) args[2]; + return new Regexp(term, analyzer, useField); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("pattern")); + PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); + PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); + } + + public static Regexp fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + String getPattern() { + return pattern; + } + + String getAnalyzer() { + return analyzer; + } + + String getUseField() { + return useField; + } + } + public static class Fuzzy extends IntervalsSourceProvider { public static final String NAME = "fuzzy"; @@ -907,6 +1031,185 @@ String getUseField() { } } + public static class Range extends IntervalsSourceProvider { + + public static final String NAME = "range"; + + private final String lowerTerm; + private final String upperTerm; + private final boolean includeLower; + private final boolean includeUpper; + private final String analyzer; + private final String useField; + + public Range(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, String analyzer, String useField) { + this.lowerTerm = lowerTerm; + this.upperTerm = upperTerm; + this.includeLower = includeLower; + this.includeUpper = includeUpper; + this.analyzer = analyzer; + this.useField = useField; + } + + public Range(StreamInput in) throws IOException { + this.lowerTerm = in.readString(); + this.upperTerm = in.readString(); + this.includeLower = in.readBoolean(); + this.includeUpper = in.readBoolean(); + this.analyzer = in.readOptionalString(); + this.useField = in.readOptionalString(); + } + + @Override + public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) { + NamedAnalyzer analyzer = null; + if (this.analyzer != null) { + analyzer = context.getIndexAnalyzers().get(this.analyzer); + } + if (useField != null) { + fieldType = context.getFieldType(useField); + assert fieldType != null; + } + if (analyzer == null) { + analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + } + BytesRef normalizedLowerTerm = analyzer.normalize(fieldType.name(), lowerTerm); + BytesRef normalizedUpperTerm = analyzer.normalize(fieldType.name(), upperTerm); + + IntervalsSource source = fieldType.rangeIntervals( + normalizedLowerTerm, + normalizedUpperTerm, + includeLower, + includeUpper, + context + ); + if (useField != null) { + source = Intervals.fixField(useField, source); + } + return source; + } + + @Override + public void extractFields(Set fields) { + if (useField != null) { + fields.add(useField); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Range range = (Range) o; + return includeLower == range.includeLower + && includeUpper == range.includeUpper + && Objects.equals(lowerTerm, range.lowerTerm) + && Objects.equals(upperTerm, range.upperTerm) + && Objects.equals(analyzer, range.analyzer) + && Objects.equals(useField, range.useField); + } + + @Override + public int hashCode() { + return Objects.hash(lowerTerm, upperTerm, includeLower, includeUpper, analyzer, useField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(lowerTerm); + out.writeString(upperTerm); + out.writeBoolean(includeLower); + out.writeBoolean(includeUpper); + out.writeOptionalString(analyzer); + out.writeOptionalString(useField); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + if (includeLower) { + builder.field("gte", lowerTerm); + } else { + builder.field("gt", lowerTerm); + } + if (includeUpper) { + builder.field("lte", upperTerm); + } else { + builder.field("lt", upperTerm); + } + if (analyzer != null) { + builder.field("analyzer", analyzer); + } + if (useField != null) { + builder.field("use_field", useField); + } + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { + String gte = (String) args[0]; + String gt = (String) args[1]; + String lte = (String) args[2]; + String lt = (String) args[3]; + if ((gte == null && gt == null) || (gte != null && gt != null)) { + throw new IllegalArgumentException("Either [gte] or [gt], one of them must be provided"); + } + if ((lte == null && lt == null) || (lte != null && lt != null)) { + throw new IllegalArgumentException("Either [lte] or [lt], one of them must be provided"); + } + boolean includeLower = gte != null ? true : false; + String lowerTerm = gte != null ? gte : gt; + boolean includeUpper = lte != null ? true : false; + String upperTerm = lte != null ? lte : lt; + String analyzer = (String) args[4]; + String useField = (String) args[5]; + return new Range(lowerTerm, upperTerm, includeLower, includeUpper, analyzer, useField); + }); + + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("gte")); + PARSER.declareString(optionalConstructorArg(), new ParseField("gt")); + PARSER.declareString(optionalConstructorArg(), new ParseField("lte")); + PARSER.declareString(optionalConstructorArg(), new ParseField("lt")); + PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); + PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); + } + + public static Range fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + String getLowerTerm() { + return lowerTerm; + } + + String getUpperTerm() { + return upperTerm; + } + + boolean getIncludeLower() { + return includeLower; + } + + boolean getIncludeUpper() { + return includeUpper; + } + + String getAnalyzer() { + return analyzer; + } + + String getUseField() { + return useField; + } + } + static class ScriptFilterSource extends FilteredIntervalsSource { final IntervalFilterScript script; diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index bac5fe8c1d1ac..fff8ddf38b3e4 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -1255,6 +1255,16 @@ public static List getIntervalsSourceProviderNamed IntervalsSourceProvider.class, IntervalsSourceProvider.Fuzzy.NAME, IntervalsSourceProvider.Fuzzy::new + ), + new NamedWriteableRegistry.Entry( + IntervalsSourceProvider.class, + IntervalsSourceProvider.Regexp.NAME, + IntervalsSourceProvider.Regexp::new + ), + new NamedWriteableRegistry.Entry( + IntervalsSourceProvider.class, + IntervalsSourceProvider.Range.NAME, + IntervalsSourceProvider.Range::new ) ); } diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 79cc850c4b8cc..37c7172623b54 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -725,8 +725,72 @@ public void testPrefixes() throws IOException { assertEquals(expected, builder.toQuery(createSearchExecutionContext())); } - public void testWildcard() throws IOException { + public void testRegexp() throws IOException { + String json = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m" + } + } + } + }""", TEXT_FIELD_NAME); + + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.regexp(new BytesRef("te.*m"))); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + String no_positions_json = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m" + } + } + } + } + """, NO_POSITIONS_FIELD); + expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(no_positions_json); + builder1.toQuery(createSearchExecutionContext()); + }); + + String fixed_field_json = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m", + "use_field": "masked_field" + } + } + } + }""", TEXT_FIELD_NAME); + + builder = (IntervalQueryBuilder) parseQuery(fixed_field_json); + expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.regexp(new BytesRef("te.*m")))); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + String fixed_field_json_no_positions = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m", + "use_field": "%s" + } + } + } + }""", TEXT_FIELD_NAME, NO_POSITIONS_FIELD); + expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(fixed_field_json_no_positions); + builder1.toQuery(createSearchExecutionContext()); + }); + } + public void testWildcard() throws IOException { String json = Strings.format(""" { "intervals": { @@ -931,7 +995,71 @@ public void testFuzzy() throws IOException { Intervals.fixField(MASKED_FIELD, buildFuzzySource("term", "term", 2, true, Fuzziness.ONE.asDistance("term"))) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); - } + public void testRange() throws IOException { + String json = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "gte": "aaa", + "lte": "aab" + } + } + } + }""", TEXT_FIELD_NAME); + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), true, true)); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + json = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "gt": "aaa", + "lt": "aab" + } + } + } + }""", TEXT_FIELD_NAME); + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), false, false)); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + String incomplete_range = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "gt": "aaa" + } + } + } + } + """, TEXT_FIELD_NAME); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(incomplete_range); + builder1.toQuery(createSearchExecutionContext()); + }); + assertEquals("Either [lte] or [lt], one of them must be provided", exc.getCause().getMessage()); + + String incomplete_range2 = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "lt": "aaa" + } + } + } + } + """, TEXT_FIELD_NAME); + exc = expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(incomplete_range2); + builder1.toQuery(createSearchExecutionContext()); + }); + assertEquals("Either [gte] or [gt], one of them must be provided", exc.getCause().getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java new file mode 100644 index 0000000000000..73b4be4ec6154 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.index.query.IntervalsSourceProvider.Range; + +public class RangeIntervalsSourceProviderTests extends AbstractXContentSerializingTestCase { + + @Override + protected Range createTestInstance() { + return createRandomRange(); + } + + static Range createRandomRange() { + return new Range( + "a" + randomAlphaOfLengthBetween(1, 10), + "z" + randomAlphaOfLengthBetween(1, 10), + randomBoolean(), + randomBoolean(), + randomBoolean() ? randomAlphaOfLength(10) : null, + randomBoolean() ? randomAlphaOfLength(10) : null + ); + } + + @Override + protected Range mutateInstance(Range instance) { + String lowerTerm = instance.getLowerTerm(); + String upperTerm = instance.getUpperTerm(); + boolean includeLower = instance.getIncludeLower(); + boolean includeUpper = instance.getIncludeUpper(); + String analyzer = instance.getAnalyzer(); + String useField = instance.getUseField(); + switch (between(0, 5)) { + case 0 -> lowerTerm = "a" + lowerTerm; + case 1 -> upperTerm = "z" + upperTerm; + case 2 -> includeLower = includeLower == false; + case 3 -> includeUpper = includeUpper == false; + case 4 -> analyzer = randomAlphaOfLength(5); + case 5 -> useField = useField == null ? randomAlphaOfLength(5) : null; + } + return new Range(lowerTerm, upperTerm, includeLower, includeUpper, analyzer, useField); + } + + @Override + protected Writeable.Reader instanceReader() { + return Range::new; + } + + @Override + protected Range doParseInstance(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + parser.nextToken(); + } + Range range = (Range) IntervalsSourceProvider.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return range; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java new file mode 100644 index 0000000000000..b226a1394c75e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.index.query.IntervalsSourceProvider.Regexp; + +public class RegexpIntervalsSourceProviderTests extends AbstractXContentSerializingTestCase { + + @Override + protected Regexp createTestInstance() { + return createRandomRegexp(); + } + + static Regexp createRandomRegexp() { + return new Regexp( + randomAlphaOfLengthBetween(1, 10), + randomBoolean() ? randomAlphaOfLength(10) : null, + randomBoolean() ? randomAlphaOfLength(10) : null + ); + } + + @Override + protected Regexp mutateInstance(Regexp instance) { + String regexp = instance.getPattern(); + String analyzer = instance.getAnalyzer(); + String useField = instance.getUseField(); + switch (between(0, 2)) { + case 0 -> regexp += "a"; + case 1 -> analyzer = randomAlphaOfLength(5); + case 2 -> useField = useField == null ? randomAlphaOfLength(5) : null; + } + return new Regexp(regexp, analyzer, useField); + } + + @Override + protected Writeable.Reader instanceReader() { + return Regexp::new; + } + + @Override + protected Regexp doParseInstance(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + parser.nextToken(); + } + Regexp regexp = (Regexp) IntervalsSourceProvider.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return regexp; + } +} From afd0f934920980333c6244d91255076df107226a Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 26 Aug 2024 15:59:16 -0400 Subject: [PATCH 070/417] Build main for lucene 10 --- .buildkite/pipelines/lucene-snapshot/build-snapshot.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml b/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml index 1f69b8faa7ab4..f1813ff6003cb 100644 --- a/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml +++ b/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml @@ -1,8 +1,10 @@ steps: - trigger: apache-lucene-build-snapshot - label: Trigger pipeline to build lucene snapshot + label: Trigger pipeline to build lucene 10 snapshot key: lucene-build - if: build.env("LUCENE_BUILD_ID") == null || build.env("LUCENE_BUILD_ID") == "" + if: (build.env("LUCENE_BUILD_ID") == null || build.env("LUCENE_BUILD_ID") == "") + build: + branch: main - wait - label: Upload and update lucene snapshot command: .buildkite/scripts/lucene-snapshot/upload-snapshot.sh From ca4f4a874132665f20c57f5864696a3df031615f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 26 Aug 2024 20:18:08 +0000 Subject: [PATCH 071/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-054295fd380 --- build-tools-internal/version.properties | 2 +- docs/Versions.asciidoc | 4 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 3 files changed, 78 insertions(+), 78 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 99a135480b97b..f087c9ed71884 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-a9a70fa97cc +lucene = 10.0.0-snapshot-054295fd380 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index b65b974cd6b69..bdb0704fcd880 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.12.0 -:lucene_version_path: 9_12_0 +:lucene_version: 10.0.0 +:lucene_version_path: 10_0_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index c5f82fdd06c09..9d17ac99fc328 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2836,129 +2836,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From b49ffb4c68c5fdbaa8d4510b487c91f5506521b1 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 26 Aug 2024 17:08:02 -0400 Subject: [PATCH 072/417] Move ES_BUILD_JAVA to openjdk21 --- .ci/java-versions.properties | 2 +- .ci/matrix-runtime-javas-fips.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci/java-versions.properties b/.ci/java-versions.properties index 21884973742ba..5ed97ab248c2f 100644 --- a/.ci/java-versions.properties +++ b/.ci/java-versions.properties @@ -4,4 +4,4 @@ # build and test Elasticsearch for this branch. Valid Java versions # are 'java' or 'openjdk' followed by the major release number. -ES_BUILD_JAVA=openjdk17 +ES_BUILD_JAVA=openjdk21 diff --git a/.ci/matrix-runtime-javas-fips.yml b/.ci/matrix-runtime-javas-fips.yml index e9ace78b35823..624eb6abf222e 100644 --- a/.ci/matrix-runtime-javas-fips.yml +++ b/.ci/matrix-runtime-javas-fips.yml @@ -2,4 +2,4 @@ # values below are included as an axis of the matrix. ES_RUNTIME_JAVA: - - openjdk17 + - openjdk21 From 03894d1364297f9c953ae59426208696ee9f88fd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 27 Aug 2024 06:11:48 +0000 Subject: [PATCH 073/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-4d8ac2b0a02 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a98fd7a1a7606..648f9fd4059c9 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-83df2ff10e2 +lucene = 9.12.0-snapshot-4d8ac2b0a02 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 13f18d77f3aa6..559d6ad2044d3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2836,129 +2836,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From d40af8814ed6d4d4b2e3f6296cf75b4bf66fb597 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 27 Aug 2024 06:12:32 +0000 Subject: [PATCH 074/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-054295fd380 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9d17ac99fc328..64ff7e52e3684 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2838,127 +2838,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 8a6f94d61bb9bbf1313e57cd6064c36502c6d98c Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 27 Aug 2024 16:24:25 +0200 Subject: [PATCH 075/417] Fix compiler/runtime versions. --- build-tools-internal/src/main/resources/minimumCompilerVersion | 2 +- build-tools-internal/src/main/resources/minimumRuntimeVersion | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build-tools-internal/src/main/resources/minimumCompilerVersion b/build-tools-internal/src/main/resources/minimumCompilerVersion index 98d9bcb75a685..aabe6ec3909c9 100644 --- a/build-tools-internal/src/main/resources/minimumCompilerVersion +++ b/build-tools-internal/src/main/resources/minimumCompilerVersion @@ -1 +1 @@ -17 +21 diff --git a/build-tools-internal/src/main/resources/minimumRuntimeVersion b/build-tools-internal/src/main/resources/minimumRuntimeVersion index 98d9bcb75a685..aabe6ec3909c9 100644 --- a/build-tools-internal/src/main/resources/minimumRuntimeVersion +++ b/build-tools-internal/src/main/resources/minimumRuntimeVersion @@ -1 +1 @@ -17 +21 From 7ce232ddf255496091dc3e54f830c98246686546 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 27 Aug 2024 19:37:50 +0200 Subject: [PATCH 076/417] replace mentions of SortedSetDocValues#NO_MORE_ORDS See https://github.com/apache/lucene/pull/1021 --- .../aggregations/ParentJoinAggregator.java | 4 +-- .../diskusage/IndexDiskUsageAnalyzer.java | 2 +- .../ordinals/GlobalOrdinalMapping.java | 7 +---- .../fielddata/ordinals/MultiOrdinals.java | 7 ++--- .../grouping/GroupingDocValuesSelector.java | 2 +- .../SortedSetDocValuesStringFieldScript.java | 5 ++-- .../script/field/IpDocValuesField.java | 3 ++- .../elasticsearch/search/MultiValueMode.java | 4 +-- .../composite/GlobalOrdinalValuesSource.java | 11 +++----- .../countedterms/CountedTermsAggregator.java | 4 +-- .../bucket/range/BinaryRangeAggregator.java | 3 ++- .../DiversifiedOrdinalsSamplerAggregator.java | 2 +- .../GlobalOrdinalsStringTermsAggregator.java | 16 +++++++----- .../metrics/CardinalityAggregator.java | 3 ++- .../GlobalOrdCardinalityAggregator.java | 7 ++--- .../aggregations/support/MissingValues.java | 26 +++++++++---------- .../tsdb/ES87TSDBDocValuesFormatTests.java | 2 -- .../AbstractStringFieldDataTestCase.java | 6 ----- .../ordinals/MultiOrdinalsTests.java | 2 -- .../KeyedFlattenedLeafFieldDataTests.java | 9 +++---- .../search/MultiValueModeTests.java | 13 +++++----- .../support/IncludeExcludeTests.java | 9 +++---- .../support/MissingValuesTests.java | 9 ++----- .../accesscontrol/FieldSubsetReaderTests.java | 1 - .../VersionStringDocValuesField.java | 3 ++- .../index/LegacyDocValuesIterables.java | 5 +--- 26 files changed, 67 insertions(+), 98 deletions(-) diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java index 9c6a788ea2f77..d93a17d58d6f3 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java @@ -101,7 +101,7 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag public void collect(int docId, long owningBucketOrd) throws IOException { if (parentDocs.get(docId) && globalOrdinals.advanceExact(docId)) { int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; collectionStrategy.add(owningBucketOrd, globalOrdinal); } } @@ -149,7 +149,7 @@ public int docID() { continue; } int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; /* * Check if we contain every ordinal. It's almost certainly be * faster to replay all the matching ordinals and filter them down diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index 84e78a60bd641..829be9d54dcbd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -272,7 +272,7 @@ void analyzeDocValues(SegmentReader reader, IndexDiskUsageStats stats) throws IO } case SORTED_SET -> { SortedSetDocValues sortedSet = iterateDocValues(maxDocs, () -> docValuesReader.getSortedSet(field), dv -> { - while (dv.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + for (int i = 0; i < dv.docValueCount(); i++) { cancellationChecker.logEvent(); } }); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java index 12e487b27647c..9a1f65b73eaf7 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java @@ -51,12 +51,7 @@ public boolean advanceExact(int target) throws IOException { @Override public long nextOrd() throws IOException { - long segmentOrd = values.nextOrd(); - if (segmentOrd == SortedSetDocValues.NO_MORE_ORDS) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return getGlobalOrd(segmentOrd); - } + return getGlobalOrd(values.nextOrd()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java index 4d36ce5f35070..dfc17defd6612 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java @@ -174,11 +174,8 @@ public boolean advanceExact(int docId) { @Override public long nextOrd() { - if (currentOffset == currentEndOffset) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return ords.get(currentOffset++); - } + assert currentOffset != currentEndOffset; + return ords.get(currentOffset++); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java b/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java index 9175aa3b7f2a5..639cf235cd2cb 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java @@ -199,7 +199,7 @@ public void setNextReader(LeafReaderContext readerContext) throws IOException { public boolean advanceExact(int target) throws IOException { if (sorted.advanceExact(target)) { ord = (int) sorted.nextOrd(); - if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + if (sorted.docValueCount() > 1) { throw new IllegalStateException( "failed to extract doc:" + target + ", the grouping field must be single valued" ); diff --git a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java index b2b523f12fd30..be5f86585aa41 100644 --- a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java +++ b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java @@ -45,9 +45,8 @@ public void setDocument(int docID) { public void execute() { try { if (hasValue) { - long ord; - while ((ord = sortedSetDocValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { - BytesRef bytesRef = sortedSetDocValues.lookupOrd(ord); + for (int i = 0; i < sortedSetDocValues.docValueCount(); i++) { + BytesRef bytesRef = sortedSetDocValues.lookupOrd(sortedSetDocValues.nextOrd()); emit(bytesRef.utf8ToString()); } } diff --git a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java index 4074951a0c104..93cc54aef6d21 100644 --- a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java @@ -156,7 +156,8 @@ public SortedSetIpSupplier(SortedSetDocValues in) { public void setNextDocId(int docId) throws IOException { count = 0; if (in.advanceExact(docId)) { - for (long ord = in.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = in.nextOrd()) { + for (int i = 0; i < in.docValueCount(); i++) { + long ord = in.nextOrd(); ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java index ad314a97a3a67..079900e1aefc0 100644 --- a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -477,8 +477,8 @@ protected BytesRef pick( @Override protected int pick(SortedSetDocValues values) throws IOException { long maxOrd = -1; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { - maxOrd = ord; + for (int i = 0; i < values.docValueCount(); i++) { + maxOrd = values.nextOrd(); } return Math.toIntExact(maxOrd); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java index e6f90bfbdf975..c6f393e6ea6f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java @@ -40,8 +40,6 @@ import java.util.List; import java.util.function.BiConsumer; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - /** * A {@link SingleDimensionValuesSource} for global ordinals. */ @@ -246,9 +244,8 @@ public DocIdSetIterator competitiveIterator() { @Override public void collect(int doc, long bucket) throws IOException { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { - currentValue = ord; + for (int i = 0; i < dvs.docValueCount(); i++) { + currentValue = dvs.nextOrd(); next.collect(doc, bucket); } } else if (missingBucket) { @@ -305,8 +302,8 @@ public void collect(int doc, long bucket) throws IOException { public void collect(int doc, long bucket) throws IOException { if (currentValueIsSet == false) { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { + for (int i = 0; i < dvs.docValueCount(); i++) { + long ord = dvs.nextOrd(); if (term.equals(lookup.lookupOrd(ord))) { currentValueIsSet = true; currentValue = ord; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index 736b1c0c0c249..476a48c0d564b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -38,7 +38,6 @@ import java.util.function.Supplier; import static java.util.Collections.emptyList; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; class CountedTermsAggregator extends TermsAggregator { @@ -76,7 +75,8 @@ private LeafBucketCollector getLeafCollector(SortedSetDocValues ords, LeafBucket @Override public void collect(int doc, long owningBucketOrd) throws IOException { if (ords.advanceExact(doc)) { - for (long ord = ords.nextOrd(); ord != NO_MORE_ORDS; ord = ords.nextOrd()) { + for (int i = 0; i < ords.docValueCount(); i++) { + long ord = ords.nextOrd(); collectOrdinal(bucketOrds.add(owningBucketOrd, ords.lookupOrd(ord)), doc, sub); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 1a793ecd80b11..922ce20642def 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -156,7 +156,8 @@ abstract static class SortedSetRangeLeafCollector extends LeafBucketCollectorBas this.collector = (doc, bucket) -> { if (values.advanceExact(doc)) { int lo = 0; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); lo = collect(doc, ord, bucket, lo); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index e74cc91a03556..d4d608db04d8f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -120,7 +120,7 @@ public boolean advanceExact(int target) throws IOException { value = globalOrds.nextOrd(); // Check there isn't a second value for this // document - if (globalOrds.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + if (globalOrds.docValueCount() > 1) { throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 26204e1a2530f..769f7d6fd08b9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -52,7 +52,6 @@ import java.util.function.LongPredicate; import java.util.function.LongUnaryOperator; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; /** @@ -166,7 +165,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } } @@ -178,7 +178,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (false == acceptedGlobalOrdinals.test(globalOrd)) { continue; } @@ -349,7 +350,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == segmentOrds.advanceExact(doc)) { return; } - for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_ORDS; segmentOrd = segmentOrds.nextOrd()) { + for (int i = 0; i < segmentOrds.docValueCount(); i++) { + long segmentOrd = segmentOrds.nextOrd(); int docCount = docCountProvider.getDocCount(doc); segmentDocCounts.increment(segmentOrd + 1, docCount); } @@ -523,7 +525,8 @@ private void forEachExcludeDeletedDocs(BucketInfoConsumer consumer) throws IOExc if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } @@ -633,7 +636,8 @@ void forEachExcludeDeletedDocs(long owningBucketOrd, BucketInfoConsumer consumer if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java index 491f05ba1ab96..293353491878c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java @@ -307,7 +307,8 @@ public void collect(int doc, long bucketOrd) throws IOException { bits = new BitArray(maxOrd, bigArrays); visitedOrds.set(bucketOrd, bits); } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java index 32cf3c7d24115..716420bc2192b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java @@ -253,8 +253,8 @@ public CompetitiveIterator competitiveIterator() { @Override public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues - .nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); if (bits.getAndSet(ord) == false) { competitiveIterator.onVisitedOrdinal(ord); } @@ -303,7 +303,8 @@ public void collect(int doc, long bucketOrd) throws IOException { public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { final BitArray bits = getNewOrExistingBitArray(bucketOrd); - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java index 453dd469392c6..673170498acc0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java @@ -270,18 +270,17 @@ public long nextOrd() throws IOException { if (hasOrds) { return values.nextOrd(); } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -320,7 +319,11 @@ public BytesRef lookupOrd(long ord) throws IOException { @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -338,12 +341,7 @@ public long nextOrd() throws IOException { return ord + 1; } } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java index 0aa8520eec5e4..3c53685b44421 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java @@ -114,7 +114,6 @@ public void testSortedSetDocValuesSingleUniqueValue() throws IOException { assertEquals(0, field.nextOrd()); BytesRef scratch = field.lookupOrd(0); assertEquals("value", scratch.utf8ToString()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, field.nextOrd()); } assertEquals(DocIdSetIterator.NO_MORE_DOCS, field.nextDoc()); for (int i = 0; i < NUM_DOCS; i++) { @@ -125,7 +124,6 @@ public void testSortedSetDocValuesSingleUniqueValue() throws IOException { BytesRef scratch = fieldN.lookupOrd(0); assertEquals("value" + i, scratch.utf8ToString()); assertEquals(DocIdSetIterator.NO_MORE_DOCS, fieldN.nextDoc()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, fieldN.nextOrd()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index c54e980c5e21b..0bcad16497cfb 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -498,14 +498,12 @@ public void testGlobalOrdinals() throws Exception { assertThat(ord, equalTo(5L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04")); ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(1)); assertTrue(values.advanceExact(2)); ord = values.nextOrd(); assertThat(ord, equalTo(4L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("03")); ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Second segment leaf = topLevelReader.leaves().get(1); @@ -522,7 +520,6 @@ public void testGlobalOrdinals() throws Exception { assertThat(ord, equalTo(7L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06")); ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertTrue(values.advanceExact(1)); ord = values.nextOrd(); assertThat(ord, equalTo(7L)); @@ -534,7 +531,6 @@ public void testGlobalOrdinals() throws Exception { assertThat(ord, equalTo(9L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08")); ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(2)); assertTrue(values.advanceExact(3)); ord = values.nextOrd(); @@ -547,7 +543,6 @@ public void testGlobalOrdinals() throws Exception { assertThat(ord, equalTo(11L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("10")); ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Third segment leaf = topLevelReader.leaves().get(2); @@ -564,7 +559,6 @@ public void testGlobalOrdinals() throws Exception { assertThat(ord, equalTo(2L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!10")); ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); } public void testTermsEnum() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java index 54086e0d9689f..663ba92eed126 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java @@ -107,7 +107,6 @@ public int compare(OrdAndId o1, OrdAndId o2) { for (Long ord : docOrds) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, docs.nextOrd()); } for (int i = docId + 1; i < ordAndId.id; i++) { assertFalse(singleOrds.advanceExact(i)); @@ -256,7 +255,6 @@ private void assertEquals(SortedSetDocValues docs, long[][] ordinalPlan) throws for (long ord : ords) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertThat(docs.nextOrd(), equalTo(SortedSetDocValues.NO_MORE_ORDS)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java index 40fea348d997e..0181b68b5b18a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java @@ -22,8 +22,6 @@ import java.io.IOException; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - public class KeyedFlattenedLeafFieldDataTests extends ESTestCase { private LeafOrdinalsFieldData delegate; @@ -120,7 +118,8 @@ public void testNextOrd() throws IOException { docValues.advanceExact(0); int retrievedOrds = 0; - for (long ord = docValues.nextOrd(); ord != NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); assertTrue(0 <= ord && ord < 10); retrievedOrds++; @@ -193,9 +192,7 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (index == documentOrds.length) { - return NO_MORE_ORDS; - } + assert index == documentOrds.length; return documentOrds[index++]; } diff --git a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index 488d79559f589..ec0296619e150 100644 --- a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -713,11 +713,8 @@ public void testMultiValuedOrds() throws Exception { @Override public long nextOrd() { - if (i < array[doc].length) { - return array[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < array[doc].length; + return array[doc][i++]; } @Override @@ -761,7 +758,8 @@ private void verifySortedSet(Supplier supplier, int maxDoc) } int expected = -1; if (values.advanceExact(i)) { - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int j = 0; j < values.docValueCount(); j++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { @@ -809,7 +807,8 @@ private void verifySortedSet( if (++count > maxChildren) { break; } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index 6dea4233ef281..19a3f255fc0dc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -87,12 +87,9 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (consumed) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - consumed = true; - return 0; - } + assert consumed == false; + consumed = true; + return 0; } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java index 655b8f90ed0ef..6ad649a97ead4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java @@ -123,11 +123,8 @@ public long getValueCount() { @Override public long nextOrd() { - if (i < ords[doc].length) { - return ords[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < ords[doc].length; + return ords[doc][i++]; } @Override @@ -152,10 +149,8 @@ public int docValueCount() { for (int ord : ords[i]) { assertEquals(values[ord], withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } else { assertEquals(missing, withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java index 560dee9b5843c..9c269259792bc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java @@ -615,7 +615,6 @@ public void testSortedSetDocValues() throws Exception { assertNotNull(dv); assertTrue(dv.advanceExact(0)); assertEquals(0, dv.nextOrd()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, dv.nextOrd()); assertEquals(new BytesRef("testA"), dv.lookupOrd(0)); assertNull(segmentReader.getSortedSetDocValues("fieldB")); diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java index 17e1d70cbb471..01f0fdb256551 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java @@ -47,7 +47,8 @@ public VersionStringDocValuesField(SortedSetDocValues input, String name) { public void setNextDocId(int docId) throws IOException { count = 0; if (input.advanceExact(docId)) { - for (long ord = input.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = input.nextOrd()) { + for (int i = 0; i < input.docValueCount(); i++) { + long ord = input.nextOrd(); ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java index 5a9b1bb252308..7ce51b8ade09c 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java @@ -182,10 +182,7 @@ public Number next() { try { if (nextDocID > values.docID()) { if (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - ordCount = 0; - while (values.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { - ordCount++; - } + ordCount = values.docValueCount(); } } int result; From 3a6887bf4cfcb4c355007afe7204761e8f6cf4b2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 28 Aug 2024 06:11:59 +0000 Subject: [PATCH 077/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-3be89ea2d59 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 648f9fd4059c9..48221df22a639 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-4d8ac2b0a02 +lucene = 9.12.0-snapshot-3be89ea2d59 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 559d6ad2044d3..d2b6674f9aba6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2836,129 +2836,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From cb96f898cab39eace194c5320d9138daba29e52e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 28 Aug 2024 06:12:00 +0000 Subject: [PATCH 078/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-0f5359a2ac6 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f087c9ed71884..cc95244609eab 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-054295fd380 +lucene = 10.0.0-snapshot-0f5359a2ac6 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 64ff7e52e3684..7d85da2b76caa 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2836,129 +2836,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 2cc8f8a2914e7d868f444c255dc951f396ee5779 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 28 Aug 2024 09:22:34 +0200 Subject: [PATCH 079/417] Fix some compile errors related to the scorer/scorerSupplier/bulkScorer change. --- .../indices/IndicesQueryCache.java | 14 -- .../queries/BinaryDocValuesRangeQuery.java | 79 ++++++---- .../bucket/filter/MergedPointRangeQuery.java | 24 --- .../search/vectors/DenseVectorQuery.java | 25 ++- .../search/vectors/KnnScoreDocQuery.java | 146 ++++++++++-------- 5 files changed, 144 insertions(+), 144 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index a40a5ab2e2fe8..173366abe70eb 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -11,13 +11,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.LRUQueryCache; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.ShardCoreKeyMap; @@ -172,24 +170,12 @@ public int count(LeafReaderContext context) throws IOException { return in.count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.bulkScorer(context); - } - @Override public boolean isCacheable(LeafReaderContext ctx) { return in.isCacheable(ctx); diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java index f474b6f7c883b..3e1ddd13da812 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -60,52 +61,62 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final BinaryDocValues values = context.reader().getBinaryDocValues(fieldName); if (values == null) { return null; } - final TwoPhaseIterator iterator = new TwoPhaseIterator(values) { - - ByteArrayStreamInput in = new ByteArrayStreamInput(); - BytesRef otherFrom = new BytesRef(); - BytesRef otherTo = new BytesRef(); - + return new ScorerSupplier() { @Override - public boolean matches() throws IOException { - BytesRef encodedRanges = values.binaryValue(); - in.reset(encodedRanges.bytes, encodedRanges.offset, encodedRanges.length); - int numRanges = in.readVInt(); - final byte[] bytes = encodedRanges.bytes; - otherFrom.bytes = bytes; - otherTo.bytes = bytes; - int offset = in.getPosition(); - for (int i = 0; i < numRanges; i++) { - int length = lengthType.readLength(bytes, offset); - otherFrom.offset = offset; - otherFrom.length = length; - offset += length; - - length = lengthType.readLength(bytes, offset); - otherTo.offset = offset; - otherTo.length = length; - offset += length; - - if (queryType.matches(from, to, otherFrom, otherTo)) { - return true; + public Scorer get(long leadCost) throws IOException { + final TwoPhaseIterator iterator = new TwoPhaseIterator(values) { + + ByteArrayStreamInput in = new ByteArrayStreamInput(); + BytesRef otherFrom = new BytesRef(); + BytesRef otherTo = new BytesRef(); + + @Override + public boolean matches() throws IOException { + BytesRef encodedRanges = values.binaryValue(); + in.reset(encodedRanges.bytes, encodedRanges.offset, encodedRanges.length); + int numRanges = in.readVInt(); + final byte[] bytes = encodedRanges.bytes; + otherFrom.bytes = bytes; + otherTo.bytes = bytes; + int offset = in.getPosition(); + for (int i = 0; i < numRanges; i++) { + int length = lengthType.readLength(bytes, offset); + otherFrom.offset = offset; + otherFrom.length = length; + offset += length; + + length = lengthType.readLength(bytes, offset); + otherTo.offset = offset; + otherTo.length = length; + offset += length; + + if (queryType.matches(from, to, otherFrom, otherTo)) { + return true; + } + } + assert offset == encodedRanges.offset + encodedRanges.length; + return false; + } + + @Override + public float matchCost() { + return 4; // at most 4 comparisons } - } - assert offset == encodedRanges.offset + encodedRanges.length; - return false; + }; + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override - public float matchCost() { - return 4; // at most 4 comparisons + public long cost() { + return values.cost(); } }; - return new ConstantScoreScorer(this, score(), scoreMode, iterator); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java index 22b5b80c0c717..486b028841294 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; @@ -20,7 +19,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -116,15 +114,6 @@ public int count(LeafReaderContext context) throws IOException { return multiValuedSegmentWeight().count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { /* @@ -143,19 +132,6 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti return multiValuedSegmentWeight().scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - PointValues points = context.reader().getPointValues(field); - if (points == null) { - return null; - } - if (points.size() == points.getDocCount()) { - // Each doc that has points has exactly one point. - return singleValuedSegmentWeight().bulkScorer(context); - } - return multiValuedSegmentWeight().bulkScorer(context); - } - private Weight singleValuedSegmentWeight() throws IOException { if (singleValuedSegmentWeight == null) { singleValuedSegmentWeight = delegateForSingleValuedSegments.createWeight(searcher, scoreMode, boost); diff --git a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java index 8fd59a0e6f224..e73badf0da4e1 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.VectorScorer; import org.apache.lucene.search.Weight; @@ -69,12 +70,25 @@ public Explanation explain(LeafReaderContext leafReaderContext, int i) throws IO } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - VectorScorer vectorScorer = vectorScorer(leafReaderContext); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + VectorScorer vectorScorer = vectorScorer(context); if (vectorScorer == null) { return null; } - return new DenseVectorScorer(this, vectorScorer); + return new ScorerSupplier() { + + private final DenseVectorScorer scorer = new DenseVectorScorer(vectorScorer, boost); + + @Override + public Scorer get(long leadCost) throws IOException { + return scorer; + } + + @Override + public long cost() { + return scorer.iterator().cost(); + } + }; } @Override @@ -177,11 +191,10 @@ static class DenseVectorScorer extends Scorer { private final DocIdSetIterator iterator; private final float boost; - DenseVectorScorer(DenseVectorWeight weight, VectorScorer vectorScorer) { - super(weight); + DenseVectorScorer(VectorScorer vectorScorer, float boost) { this.vectorScorer = vectorScorer; this.iterator = vectorScorer.iterator(); - this.boost = weight.boost; + this.boost = boost; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java index ee4b31e7b5360..d8989a3bb7bf0 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java @@ -17,6 +17,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -87,94 +88,107 @@ public Explanation explain(LeafReaderContext context, int doc) { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { // Segment starts indicate how many docs are in the segment, // upper equalling lower indicates no documents for this segment if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { return null; } - return new Scorer(this) { - final int lower = segmentStarts[context.ord]; - final int upper = segmentStarts[context.ord + 1]; - int upTo = -1; + return new ScorerSupplier() { + + private final Scorer scorer = new Scorer() { + final int lower = segmentStarts[context.ord]; + final int upper = segmentStarts[context.ord + 1]; + int upTo = -1; + + @Override + public DocIdSetIterator iterator() { + return new DocIdSetIterator() { + @Override + public int docID() { + return currentDocId(); + } - @Override - public DocIdSetIterator iterator() { - return new DocIdSetIterator() { - @Override - public int docID() { - return currentDocId(); - } + @Override + public int nextDoc() { + if (upTo == -1) { + upTo = lower; + } else { + ++upTo; + } + return currentDocId(); + } - @Override - public int nextDoc() { - if (upTo == -1) { - upTo = lower; - } else { - ++upTo; + @Override + public int advance(int target) throws IOException { + return slowAdvance(target); } - return currentDocId(); - } - @Override - public int advance(int target) throws IOException { - return slowAdvance(target); - } + @Override + public long cost() { + return upper - lower; + } + }; + } - @Override - public long cost() { - return upper - lower; + @Override + public float getMaxScore(int docId) { + // NO_MORE_DOCS indicates the maximum score for all docs in this segment + // Anything less than must be accounted for via the docBase. + if (docId != NO_MORE_DOCS) { + docId += context.docBase; } - }; - } - - @Override - public float getMaxScore(int docId) { - // NO_MORE_DOCS indicates the maximum score for all docs in this segment - // Anything less than must be accounted for via the docBase. - if (docId != NO_MORE_DOCS) { - docId += context.docBase; + float maxScore = 0; + for (int idx = Math.max(lower, upTo); idx < upper && docs[idx] <= docId; idx++) { + maxScore = Math.max(maxScore, scores[idx] * boost); + } + return maxScore; } - float maxScore = 0; - for (int idx = Math.max(lower, upTo); idx < upper && docs[idx] <= docId; idx++) { - maxScore = Math.max(maxScore, scores[idx] * boost); + + @Override + public float score() { + return scores[upTo] * boost; } - return maxScore; - } - @Override - public float score() { - return scores[upTo] * boost; - } + @Override + public int advanceShallow(int docId) { + int start = Math.max(upTo, lower); + int docIdIndex = Arrays.binarySearch(docs, start, upper, docId + context.docBase); + if (docIdIndex < 0) { + docIdIndex = -1 - docIdIndex; + } + if (docIdIndex >= upper) { + return NO_MORE_DOCS; + } + return docs[docIdIndex]; + } - @Override - public int advanceShallow(int docId) { - int start = Math.max(upTo, lower); - int docIdIndex = Arrays.binarySearch(docs, start, upper, docId + context.docBase); - if (docIdIndex < 0) { - docIdIndex = -1 - docIdIndex; + @Override + public int docID() { + return currentDocId(); } - if (docIdIndex >= upper) { - return NO_MORE_DOCS; + + private int currentDocId() { + if (upTo == -1) { + return -1; + } + if (upTo >= upper) { + return NO_MORE_DOCS; + } + return docs[upTo] - context.docBase; } - return docs[docIdIndex]; - } + + }; @Override - public int docID() { - return currentDocId(); + public Scorer get(long leadCost) throws IOException { + return scorer; } - private int currentDocId() { - if (upTo == -1) { - return -1; - } - if (upTo >= upper) { - return NO_MORE_DOCS; - } - return docs[upTo] - context.docBase; + @Override + public long cost() { + return scorer.iterator().cost(); } - }; } From 6e996f12f7614eee2f865fddd28c953316d04672 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 28 Aug 2024 09:55:31 +0200 Subject: [PATCH 080/417] More compile errors related to the scorer/scorerSupplier/bulkScorer change. --- .../queries/BinaryDocValuesRangeQuery.java | 78 +++++----- .../sampler/random/RandomSamplingQuery.java | 7 +- .../LongScriptFieldDistanceFeatureQuery.java | 10 +- .../search/vectors/DenseVectorQuery.java | 15 +- .../search/vectors/KnnScoreDocQuery.java | 144 ++++++++---------- 5 files changed, 111 insertions(+), 143 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java index 3e1ddd13da812..b892d0562313c 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java @@ -17,7 +17,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -67,56 +66,47 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti return null; } - return new ScorerSupplier() { - @Override - public Scorer get(long leadCost) throws IOException { - final TwoPhaseIterator iterator = new TwoPhaseIterator(values) { - - ByteArrayStreamInput in = new ByteArrayStreamInput(); - BytesRef otherFrom = new BytesRef(); - BytesRef otherTo = new BytesRef(); - - @Override - public boolean matches() throws IOException { - BytesRef encodedRanges = values.binaryValue(); - in.reset(encodedRanges.bytes, encodedRanges.offset, encodedRanges.length); - int numRanges = in.readVInt(); - final byte[] bytes = encodedRanges.bytes; - otherFrom.bytes = bytes; - otherTo.bytes = bytes; - int offset = in.getPosition(); - for (int i = 0; i < numRanges; i++) { - int length = lengthType.readLength(bytes, offset); - otherFrom.offset = offset; - otherFrom.length = length; - offset += length; - - length = lengthType.readLength(bytes, offset); - otherTo.offset = offset; - otherTo.length = length; - offset += length; - - if (queryType.matches(from, to, otherFrom, otherTo)) { - return true; - } - } - assert offset == encodedRanges.offset + encodedRanges.length; - return false; - } + final TwoPhaseIterator iterator = new TwoPhaseIterator(values) { + + ByteArrayStreamInput in = new ByteArrayStreamInput(); + BytesRef otherFrom = new BytesRef(); + BytesRef otherTo = new BytesRef(); - @Override - public float matchCost() { - return 4; // at most 4 comparisons + @Override + public boolean matches() throws IOException { + BytesRef encodedRanges = values.binaryValue(); + in.reset(encodedRanges.bytes, encodedRanges.offset, encodedRanges.length); + int numRanges = in.readVInt(); + final byte[] bytes = encodedRanges.bytes; + otherFrom.bytes = bytes; + otherTo.bytes = bytes; + int offset = in.getPosition(); + for (int i = 0; i < numRanges; i++) { + int length = lengthType.readLength(bytes, offset); + otherFrom.offset = offset; + otherFrom.length = length; + offset += length; + + length = lengthType.readLength(bytes, offset); + otherTo.offset = offset; + otherTo.length = length; + offset += length; + + if (queryType.matches(from, to, otherFrom, otherTo)) { + return true; } - }; - return new ConstantScoreScorer(score(), scoreMode, iterator); + } + assert offset == encodedRanges.offset + encodedRanges.length; + return false; } @Override - public long cost() { - return values.cost(); + public float matchCost() { + return 4; // at most 4 comparisons } }; + + return new DefaultScorerSupplier(new ConstantScoreScorer(score(), scoreMode, iterator)); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java index 08705c36e5e78..42584709d90d8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -75,15 +76,15 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SplittableRandom random = new SplittableRandom(BitMixer.mix(hash ^ seed)); int maxDoc = context.reader().maxDoc(); - return new ConstantScoreScorer( - this, + Scorer scorer = new ConstantScoreScorer( boost, ScoreMode.COMPLETE_NO_SCORES, new RandomSamplingIterator(maxDoc, p, random::nextInt) ); + return new DefaultScorerSupplier(scorer); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java index c5a751e4ba0e8..51d9b34abc80b 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractLongFieldScript; @@ -55,8 +56,10 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + return new DefaultScorerSupplier( + new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost) + ); } @Override @@ -83,8 +86,7 @@ private class DistanceScorer extends Scorer { private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java index e73badf0da4e1..86ac3ece7117b 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java @@ -75,20 +75,7 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti if (vectorScorer == null) { return null; } - return new ScorerSupplier() { - - private final DenseVectorScorer scorer = new DenseVectorScorer(vectorScorer, boost); - - @Override - public Scorer get(long leadCost) throws IOException { - return scorer; - } - - @Override - public long cost() { - return scorer.iterator().cost(); - } - }; + return new DefaultScorerSupplier(new DenseVectorScorer(vectorScorer, boost)); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java index d8989a3bb7bf0..f040f839d59a1 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java @@ -94,102 +94,90 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { return null; } - return new ScorerSupplier() { - - private final Scorer scorer = new Scorer() { - final int lower = segmentStarts[context.ord]; - final int upper = segmentStarts[context.ord + 1]; - int upTo = -1; - - @Override - public DocIdSetIterator iterator() { - return new DocIdSetIterator() { - @Override - public int docID() { - return currentDocId(); - } - - @Override - public int nextDoc() { - if (upTo == -1) { - upTo = lower; - } else { - ++upTo; - } - return currentDocId(); - } + Scorer scorer = new Scorer() { + final int lower = segmentStarts[context.ord]; + final int upper = segmentStarts[context.ord + 1]; + int upTo = -1; - @Override - public int advance(int target) throws IOException { - return slowAdvance(target); - } + @Override + public DocIdSetIterator iterator() { + return new DocIdSetIterator() { + @Override + public int docID() { + return currentDocId(); + } - @Override - public long cost() { - return upper - lower; + @Override + public int nextDoc() { + if (upTo == -1) { + upTo = lower; + } else { + ++upTo; } - }; - } - - @Override - public float getMaxScore(int docId) { - // NO_MORE_DOCS indicates the maximum score for all docs in this segment - // Anything less than must be accounted for via the docBase. - if (docId != NO_MORE_DOCS) { - docId += context.docBase; + return currentDocId(); } - float maxScore = 0; - for (int idx = Math.max(lower, upTo); idx < upper && docs[idx] <= docId; idx++) { - maxScore = Math.max(maxScore, scores[idx] * boost); - } - return maxScore; - } - - @Override - public float score() { - return scores[upTo] * boost; - } - @Override - public int advanceShallow(int docId) { - int start = Math.max(upTo, lower); - int docIdIndex = Arrays.binarySearch(docs, start, upper, docId + context.docBase); - if (docIdIndex < 0) { - docIdIndex = -1 - docIdIndex; + @Override + public int advance(int target) throws IOException { + return slowAdvance(target); } - if (docIdIndex >= upper) { - return NO_MORE_DOCS; + + @Override + public long cost() { + return upper - lower; } - return docs[docIdIndex]; - } + }; + } - @Override - public int docID() { - return currentDocId(); + @Override + public float getMaxScore(int docId) { + // NO_MORE_DOCS indicates the maximum score for all docs in this segment + // Anything less than must be accounted for via the docBase. + if (docId != NO_MORE_DOCS) { + docId += context.docBase; } - - private int currentDocId() { - if (upTo == -1) { - return -1; - } - if (upTo >= upper) { - return NO_MORE_DOCS; - } - return docs[upTo] - context.docBase; + float maxScore = 0; + for (int idx = Math.max(lower, upTo); idx < upper && docs[idx] <= docId; idx++) { + maxScore = Math.max(maxScore, scores[idx] * boost); } + return maxScore; + } - }; + @Override + public float score() { + return scores[upTo] * boost; + } @Override - public Scorer get(long leadCost) throws IOException { - return scorer; + public int advanceShallow(int docId) { + int start = Math.max(upTo, lower); + int docIdIndex = Arrays.binarySearch(docs, start, upper, docId + context.docBase); + if (docIdIndex < 0) { + docIdIndex = -1 - docIdIndex; + } + if (docIdIndex >= upper) { + return NO_MORE_DOCS; + } + return docs[docIdIndex]; } @Override - public long cost() { - return scorer.iterator().cost(); + public int docID() { + return currentDocId(); } + + private int currentDocId() { + if (upTo == -1) { + return -1; + } + if (upTo >= upper) { + return NO_MORE_DOCS; + } + return docs[upTo] - context.docBase; + } + }; + return new DefaultScorerSupplier(scorer); } @Override From 81d92ba4d4db9bfa8d4c7ebd5c9af376a1a9d48e Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 28 Aug 2024 10:06:46 +0200 Subject: [PATCH 081/417] IOContext.READ -> IOContext.DEFAULT --- .../src/main/java/org/elasticsearch/common/lucene/Lucene.java | 2 +- .../elasticsearch/indices/recovery/RecoverySourceHandler.java | 2 +- .../repositories/blobstore/BlobStoreRepository.java | 2 +- .../admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java | 2 +- .../xpack/ccr/repository/CcrRestoreSourceService.java | 2 +- .../org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 2b8db7fe8b7fd..2cd3361a05822 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -241,7 +241,7 @@ public static void checkSegmentInfoIntegrity(final Directory directory) throws I @Override protected Object doBody(String segmentFileName) throws IOException { - try (IndexInput input = directory.openInput(segmentFileName, IOContext.READ)) { + try (IndexInput input = directory.openInput(segmentFileName, IOContext.READONCE)) { CodecUtil.checksumEntireFile(input); } return null; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 7d1f3c05acf10..de8833ac60275 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -1373,7 +1373,7 @@ protected void onNewResource(StoreFileMetadata md) throws IOException { // we already have the file contents on heap no need to open the file again currentInput = null; } else { - currentInput = store.directory().openInput(md.name(), IOContext.READ); + currentInput = store.directory().openInput(md.name(), IOContext.DEFAULT); } } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index d56acde64e192..910ca830263d0 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -3857,7 +3857,7 @@ protected void snapshotFile(SnapshotShardContext context, FileInfo fileInfo) thr final String file = fileInfo.physicalName(); try ( Releasable ignored = context.withCommitRef(); - IndexInput indexInput = store.openVerifyingInput(file, IOContext.READ, fileInfo.metadata()) + IndexInput indexInput = store.openVerifyingInput(file, IOContext.DEFAULT, fileInfo.metadata()) ) { for (int i = 0; i < fileInfo.numberOfParts(); i++) { final long partBytes = fileInfo.partBytes(i); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 94d56c905afec..976c99675817f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -686,7 +686,7 @@ static void collectPerFieldStats(SegmentReader reader, IndexDiskUsageStats stats final String[] files; final Directory directory; if (sis.getUseCompoundFile()) { - directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.READ); + directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.DEFAULT); files = directory.listAll(); } else { directory = reader.directory(); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java index 6b390ab5747a8..164e6ed5406ae 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java @@ -245,7 +245,7 @@ private Store.MetadataSnapshot getMetadata() throws IOException { private long readFileBytes(String fileName, ByteArray reference) throws IOException { try (Releasable ignored = keyedLock.acquire(fileName)) { - var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.READ; + var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.DEFAULT; final IndexInput indexInput = cachedInputs.computeIfAbsent(fileName, f -> { try { return commitRef.getIndexCommit().getDirectory().openInput(fileName, context); diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java index d1455eaa2f1c4..069eb126bcb3c 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java @@ -305,7 +305,7 @@ private static void parseSegmentInfos(Directory directory, DataInput input, OldS byte[] segmentID = new byte[StringHelper.ID_LENGTH]; input.readBytes(segmentID, 0, segmentID.length); Codec codec = readCodec(input); - SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ); + SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.DEFAULT); info.setCodec(codec); totalDocs += info.maxDoc(); long delGen = CodecUtil.readBELong(input); From 8ae3025f1f121e80ff7e1484adb18125f4554530 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 28 Aug 2024 10:20:51 +0200 Subject: [PATCH 082/417] Fix some compile errors in mappers. --- .../org/elasticsearch/index/mapper/DateFieldMapper.java | 2 +- .../elasticsearch/index/mapper/KeywordFieldMapper.java | 2 +- .../elasticsearch/index/mapper/NumberFieldMapper.java | 9 +++++---- .../elasticsearch/index/mapper/TermBasedFieldType.java | 3 ++- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index c70414807cdce..8a3077f5a29da 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -685,7 +685,7 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo long pivotLong = resolution.convert(pivotTime); // As we already apply boost in AbstractQueryBuilder::toQuery, we always passing a boost of 1.0 to distanceFeatureQuery if (isIndexed()) { - return LongPoint.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); + return LongField.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); } else { return new LongScriptFieldDistanceFeatureQuery( new Script(""), diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 16aa827e6a251..1aabffb151c7d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -469,7 +469,7 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { if (isIndexed()) { return super.termsQuery(values, context); } else { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + Collection bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 1e5143a58f20a..9f6ff1312122f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -10,6 +10,7 @@ import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatField; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntField; @@ -578,7 +579,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final float f = value.floatValue(); if (indexed && docValued) { - document.add(new FloatField(name, f)); + document.add(new FloatField(name, f, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(f))); } else if (indexed) { @@ -724,7 +725,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final double d = value.doubleValue(); if (indexed && docValued) { - document.add(new DoubleField(name, d)); + document.add(new DoubleField(name, d, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(d))); } else if (indexed) { @@ -1122,7 +1123,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final int i = value.intValue(); if (indexed && docValued) { - document.add(new IntField(name, i)); + document.add(new IntField(name, i, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, i)); } else if (indexed) { @@ -1265,7 +1266,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final long l = value.longValue(); if (indexed && docValued) { - document.add(new LongField(name, l)); + document.add(new LongField(name, l, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, l)); } else if (indexed) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java index f574e509df9b9..cff5e7be25d5f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import java.util.Collection; +import java.util.List; import java.util.Map; /** Base {@link MappedFieldType} implementation for a field that is indexed @@ -68,7 +69,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + List bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return new TermInSetQuery(name(), bytesRefs); } From 0c6f269038155145079cdb51a7a01c75f2ed562d Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 28 Aug 2024 10:24:31 +0200 Subject: [PATCH 083/417] openChecksumInput no longer takes an IOContext. --- .../elasticsearch/common/settings/KeyStoreWrapper.java | 2 +- .../codec/bloomfilter/ES87BloomFilterPostingsFormat.java | 8 +------- .../index/codec/tsdb/ES87TSDBDocValuesProducer.java | 2 +- .../main/java/org/elasticsearch/index/store/Store.java | 2 +- .../elasticsearch/gateway/MetadataStateFormatTests.java | 2 +- .../main/java/org/elasticsearch/test/CorruptionUtils.java | 2 +- 6 files changed, 6 insertions(+), 12 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 276775a868665..aa6acb225a999 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -253,7 +253,7 @@ public static KeyStoreWrapper load(Path configDir) throws IOException { } Directory directory = new NIOFSDirectory(configDir); - try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME)) { final int formatVersion; try { formatVersion = CodecUtil.checkHeader(input, KEYSTORE_FILENAME, MIN_FORMAT_VERSION, CURRENT_VERSION); diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java index 01d874adec14d..abf68abe51887 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java @@ -38,7 +38,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -291,12 +290,7 @@ static final class FieldsReader extends FieldsProducer { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { Map bloomFilters = null; Throwable priorE = null; long indexFileLength = 0; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index b6e1bb503045c..ad725d79b8810 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -60,7 +60,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { // read in the entries from the metadata file. int version = -1; - try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) { + try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName)) { Throwable priorE = null; try { diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index b9c50edf50216..07406a57903d8 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -631,7 +631,7 @@ private static void failIfCorrupted(Directory directory) throws IOException { List ex = new ArrayList<>(); for (String file : files) { if (file.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { - try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(file)) { CodecUtil.checkHeader(input, CODEC, CORRUPTED_MARKER_CODEC_VERSION, CORRUPTED_MARKER_CODEC_VERSION); final int size = input.readVInt(); final byte[] buffer = new byte[size]; diff --git a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java index 1e432dcb369ae..a1ab7cb27d4f9 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java @@ -226,7 +226,7 @@ public static void corruptFile(Path fileToCorrupt, Logger logger) throws IOExcep } long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - 8); // one long is the checksum... 8 bytes checksumAfterCorruption = input.getChecksum(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index 0782a69fd0c1e..fd7d516167655 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -72,7 +72,7 @@ public static void corruptFile(Random random, Path... files) throws IOException long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - CodecUtil.footerLength()); checksumAfterCorruption = input.getChecksum(); From be8379a3b848ed06f1f13b43702414226028fa4e Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 09:39:41 +0100 Subject: [PATCH 084/417] Bump ES|QL annotation processor source version to 21 --- .../java/org/elasticsearch/compute/gen/AggregatorProcessor.java | 2 +- .../java/org/elasticsearch/compute/gen/ConsumeProcessor.java | 2 +- .../java/org/elasticsearch/compute/gen/EvaluatorProcessor.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java index 4b1f946a1d176..863db86eb934a 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java @@ -52,7 +52,7 @@ public Set getSupportedAnnotationTypes() { @Override public SourceVersion getSupportedSourceVersion() { - return SourceVersion.RELEASE_17; + return SourceVersion.RELEASE_21; } @Override diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java index b76b1cc7ea74b..b4e74d52ffeb8 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java @@ -48,7 +48,7 @@ public Set getSupportedAnnotationTypes() { @Override public SourceVersion getSupportedSourceVersion() { - return SourceVersion.latest(); + return SourceVersion.RELEASE_21; } @Override diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java index 09012c7b3a48a..ec36908833661 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java @@ -44,7 +44,7 @@ public Set getSupportedAnnotationTypes() { @Override public SourceVersion getSupportedSourceVersion() { - return SourceVersion.RELEASE_17; + return SourceVersion.RELEASE_21; } @Override From f5718328be6e5d17cf28bd5f758f65628154186e Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 28 Aug 2024 10:47:37 +0200 Subject: [PATCH 085/417] Resolve more LongField creation compile errors Just add the required Store.NO last argument in mappers code. --- .../java/org/elasticsearch/index/mapper/DateFieldMapper.java | 3 ++- .../xpack/esql/querydsl/query/SingleValueQueryTests.java | 2 +- .../xpack/unsignedlong/UnsignedLongFieldMapper.java | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 8a3077f5a29da..8bebed5a0bf1a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.document.Field; import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -957,7 +958,7 @@ private void indexValue(DocumentParserContext context, long timestamp) { } if (indexed && hasDocValues) { - context.doc().add(new LongField(fieldType().name(), timestamp)); + context.doc().add(new LongField(fieldType().name(), timestamp, Field.Store.NO)); } else if (hasDocValues) { context.doc().add(new SortedNumericDocValuesField(fieldType().name(), timestamp)); } else if (indexed) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index 2ba397a3cb3de..a6df0e75e2f05 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -232,7 +232,7 @@ private Object randomValue() { private List docFor(int i, Iterable values) { List fields = new ArrayList<>(); - fields.add(new LongField("i", i)); + fields.add(new LongField("i", i, Field.Store.NO)); fields.add(new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)); switch (fieldType) { case "long", "integer", "short", "byte" -> { diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index eae4094fee0d0..e69038ee74910 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -659,7 +659,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio List fields = new ArrayList<>(); if (indexed && hasDocValues) { - fields.add(new LongField(fieldType().name(), numericValue)); + fields.add(new LongField(fieldType().name(), numericValue, Field.Store.NO)); } else if (hasDocValues) { fields.add(new SortedNumericDocValuesField(fieldType().name(), numericValue)); } else if (indexed) { From b30a46748fd7af6fc12d3472a12e9349fa2b73dc Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 09:54:51 +0100 Subject: [PATCH 086/417] Remove references to MMapDirectory.UNMAP_SUPPORTED, as unmap is always supported --- .../java/org/elasticsearch/index/IndexModule.java | 2 +- .../elasticsearch/common/lucene/LuceneTests.java | 13 ------------- .../index/store/FsDirectoryFactoryTests.java | 2 +- 3 files changed, 2 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index fa2a9f0f35259..0f83fc215e8e0 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -454,7 +454,7 @@ public boolean match(String setting) { } public static Type defaultStoreType(final boolean allowMmap) { - if (allowMmap && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (allowMmap && Constants.JRE_IS_64BIT) { return Type.HYBRIDFS; } else { return Type.NIOFS; diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 02084c818346b..d8bb479e719f6 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -43,7 +43,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.MockDirectoryWrapper; @@ -463,18 +462,6 @@ public void testAsSequentialBitsUsesRandomAccess() throws IOException { } } - /** - * Test that the "unmap hack" is detected as supported by lucene. - * This works around the following bug: https://bugs.openjdk.java.net/browse/JDK-4724038 - *

- * While not guaranteed, current status is "Critical Internal API": http://openjdk.java.net/jeps/260 - * Additionally this checks we did not screw up the security logic around the hack. - */ - public void testMMapHackSupported() throws Exception { - // add assume's here if needed for certain platforms, but we should know if it does not work. - assertTrue("MMapDirectory does not support unmapping: " + MMapDirectory.UNMAP_NOT_SUPPORTED_REASON, MMapDirectory.UNMAP_SUPPORTED); - } - public void testWrapAllDocsLive() throws Exception { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) diff --git a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java index b1222213a505d..689a25cd235a2 100644 --- a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java @@ -147,7 +147,7 @@ private void doTestStoreDirectory(Path tempDir, String typeSettingValue, IndexMo ); break; case FS: - if (Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (Constants.JRE_IS_64BIT) { assertTrue(FsDirectoryFactory.isHybridFs(directory)); } else { assertTrue(directory.toString(), directory instanceof NIOFSDirectory); From da9fa670005e550be586d206c9c433072528c1b2 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 28 Aug 2024 11:03:45 +0200 Subject: [PATCH 087/417] Move validation of docValueCount before call to nextOrd --- .../lucene/grouping/GroupingDocValuesSelector.java | 2 +- .../bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java b/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java index 639cf235cd2cb..cd362f15c022c 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java @@ -198,12 +198,12 @@ public void setNextReader(LeafReaderContext readerContext) throws IOException { @Override public boolean advanceExact(int target) throws IOException { if (sorted.advanceExact(target)) { - ord = (int) sorted.nextOrd(); if (sorted.docValueCount() > 1) { throw new IllegalStateException( "failed to extract doc:" + target + ", the grouping field must be single valued" ); } + ord = (int) sorted.nextOrd(); return true; } else { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index d4d608db04d8f..e8cc57b558c7f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -117,12 +117,12 @@ public long longValue() throws IOException { @Override public boolean advanceExact(int target) throws IOException { if (globalOrds.advanceExact(target)) { - value = globalOrds.nextOrd(); // Check there isn't a second value for this // document if (globalOrds.docValueCount() > 1) { throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } + value = globalOrds.nextOrd(); return true; } else { return false; From af8741dc1a9f16aeb8952cb31216ceb47d7cea30 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 28 Aug 2024 11:16:13 +0200 Subject: [PATCH 088/417] Resolve compile errors around removal of TopScoreDoc/TopFieldCollector creation methods See https://github.com/apache/lucene/pull/13617 --- .../join/query/ParentChildInnerHitContextBuilder.java | 8 ++++---- .../main/java/org/elasticsearch/index/IndexModule.java | 1 - .../org/elasticsearch/index/query/NestedQueryBuilder.java | 8 ++++---- .../bucket/sampler/BestDocsDeferringCollector.java | 4 ++-- .../search/aggregations/metrics/TopHitsAggregator.java | 7 +++++-- 5 files changed, 15 insertions(+), 13 deletions(-) diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index 95db4d92874a4..bd2f65e32bc13 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -19,8 +19,8 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -136,12 +136,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, Integer.MAX_VALUE).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, Integer.MAX_VALUE).newCollector(); maxScoreCollector = new MaxScoreCollector(); } for (LeafReaderContext ctx : this.context.searcher().getIndexReader().leaves()) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 0f83fc215e8e0..992f4ed8f782a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -17,7 +17,6 @@ import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; import org.elasticsearch.client.internal.Client; diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index a6a3d8546187f..ef49a7fe00a7f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -15,8 +15,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -442,12 +442,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, Integer.MAX_VALUE).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, Integer.MAX_VALUE).newCollector(); maxScoreCollector = new MaxScoreCollector(); } intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 1344604a8d39c..ccb1095ce37f4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.util.BigArrays; @@ -98,7 +98,7 @@ public void collect(int doc, long bucket) throws IOException { // Designed to be overridden by subclasses that may score docs by criteria // other than Lucene score protected TopDocsCollector createTopDocsCollector(int size) throws IOException { - return TopScoreDocCollector.create(size, Integer.MAX_VALUE); + return new TopScoreDocCollectorManager(size, Integer.MAX_VALUE).newCollector(); } // Can be overridden by subclasses that have a different priority queue implementation diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index e61465fbc5e37..ed6af5430061a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -18,8 +18,10 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.lucene.Lucene; @@ -135,12 +137,13 @@ public void collect(int docId, long bucket) throws IOException { // but here we create collectors ourselves and we need prevent OOM because of crazy an offset and size. topN = Math.min(topN, subSearchContext.searcher().getIndexReader().maxDoc()); if (sort == null) { - collectors = new Collectors(TopScoreDocCollector.create(topN, Integer.MAX_VALUE), null); + TopScoreDocCollector topScoreDocCollector = new TopScoreDocCollectorManager(topN, Integer.MAX_VALUE).newCollector(); + collectors = new Collectors(topScoreDocCollector, null); } else { // TODO: can we pass trackTotalHits=subSearchContext.trackTotalHits(){ // Note that this would require to catch CollectionTerminatedException collectors = new Collectors( - TopFieldCollector.create(sort.sort, topN, Integer.MAX_VALUE), + new TopFieldCollectorManager(sort.sort, topN, Integer.MAX_VALUE).newCollector(), subSearchContext.trackScores() ? new MaxScoreCollector() : null ); } From 8d0784e712ac32a866122c5d7ab00b1dc8ba3856 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 28 Aug 2024 11:16:58 +0200 Subject: [PATCH 089/417] More compile errors related to the scorer/scorerSupplier/bulkScorer change. --- .../search/function/FunctionScoreQuery.java | 89 ++++++++++++------- .../search/function/MinScoreScorer.java | 8 +- .../search/function/ScriptScoreFunction.java | 8 -- .../search/function/ScriptScoreQuery.java | 82 ++++++++++------- .../lucene/queries/BlendedTermQuery.java | 13 ++- .../lucene/queries/MinDocQuery.java | 7 +- .../queries/SearchAfterSortedDocQuery.java | 6 +- .../lucene/spatial/ShapeDocValuesQuery.java | 20 +---- .../support/TimeSeriesIndexSearcher.java | 2 +- .../search/profile/query/ProfileScorer.java | 11 +-- .../search/profile/query/ProfileWeight.java | 35 +++----- .../retriever/rankdoc/RankDocsQuery.java | 6 +- .../runtime/AbstractScriptFieldQuery.java | 6 +- ...oPointScriptFieldDistanceFeatureQuery.java | 9 +- .../search/slice/DocIdSliceQuery.java | 6 +- .../search/slice/DocValuesSliceQuery.java | 6 +- .../search/slice/TermsSliceQuery.java | 6 +- .../search/vectors/VectorSimilarityQuery.java | 19 +++- .../search/function/MinScoreScorerTests.java | 31 +------ .../profile/query/ProfileScorerTests.java | 18 +--- 20 files changed, 196 insertions(+), 192 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index aed11297d4285..6546adfae619a 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -259,44 +259,65 @@ class CustomBoostFactorWeight extends Weight { this.needsScores = needsScores; } - private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + private ScorerSupplier functionScorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - final long leadCost = subQueryScorer.iterator().cost(); - final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; - final Bits[] docSets = new Bits[functions.length]; - for (int i = 0; i < functions.length; i++) { - ScoreFunction function = functions[i]; - leafFunctions[i] = function.getLeafScoreFunction(context); - if (filterWeights[i] != null) { - ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); - docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); - } else { - docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; + final Bits[] docSets = new Bits[functions.length]; + for (int i = 0; i < functions.length; i++) { + ScoreFunction function = functions[i]; + leafFunctions[i] = function.getLeafScoreFunction(context); + if (filterWeights[i] != null) { + ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); + docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); + } else { + docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + } + } + return new FunctionFactorScorer( + subQueryScorer, + scoreMode, + functions, + maxBoost, + leafFunctions, + docSets, + combineFunction, + needsScores + ); } - } - return new FunctionFactorScorer( - this, - subQueryScorer, - scoreMode, - functions, - maxBoost, - leafFunctions, - docSets, - combineFunction, - needsScores - ); + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer scorer = functionScorer(context); - if (scorer != null && minScore != null) { - scorer = new MinScoreScorer(this, scorer, minScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier scorerSupplier = functionScorerSupplier(context); + + if (scorerSupplier == null || minScore == null) { + return scorerSupplier; } - return scorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(scorerSupplier.get(leadCost), minScore); + } + + @Override + public long cost() { + return scorerSupplier.cost(); + } + }; } @Override @@ -343,7 +364,8 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } else if (singleFunction && functionsExplanations.size() == 1) { factorExplanation = functionsExplanations.get(0); } else { - FunctionFactorScorer scorer = functionScorer(context); + + FunctionFactorScorer scorer = (FunctionFactorScorer) functionScorerSupplier(context).get(1L); int actualDoc = scorer.iterator().advance(doc); assert (actualDoc == doc); double score = scorer.computeScore(doc, expl.getValue().floatValue()); @@ -378,7 +400,6 @@ static class FunctionFactorScorer extends FilterScorer { private final boolean needsScores; private FunctionFactorScorer( - CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, ScoreFunction[] functions, @@ -388,7 +409,7 @@ private FunctionFactorScorer( CombineFunction scoreCombiner, boolean needsScores ) throws IOException { - super(scorer, w); + super(scorer); this.scoreMode = scoreMode; this.functions = functions; this.leafFunctions = leafFunctions; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java index 70ca261868ba2..d057b2de76464 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import java.io.IOException; @@ -24,12 +23,11 @@ public final class MinScoreScorer extends Scorer { private float curScore; private final float boost; - public MinScoreScorer(Weight weight, Scorer scorer, float minScore) { - this(weight, scorer, minScore, 1f); + public MinScoreScorer(Scorer scorer, float minScore) { + this(scorer, minScore, 1f); } - public MinScoreScorer(Weight weight, Scorer scorer, float minScore, float boost) { - super(weight); + public MinScoreScorer(Scorer scorer, float minScore, float boost) { this.in = scorer; this.minScore = minScore; this.boost = boost; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 6b8a75337b8ee..b30cc7e015f30 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -23,14 +23,8 @@ public class ScriptScoreFunction extends ScoreFunction { static final class CannedScorer extends Scorable { - protected int docid; protected float score; - @Override - public int docID() { - return docid; - } - @Override public float score() { return score; @@ -65,7 +59,6 @@ public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOEx private double score(int docId, float subQueryScore, ScoreScript.ExplanationHolder holder) throws IOException { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore; double result = leafScript.execute(holder); @@ -85,7 +78,6 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE Explanation exp; if (leafScript instanceof ExplainableScoreScript) { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore.getValue().floatValue(); exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore); } else { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java index 93837269f2090..23553960413b0 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -37,6 +38,7 @@ import java.util.HashSet; import java.util.Objects; import java.util.Set; +import java.util.function.IntSupplier; /** * A query that uses a script to compute documents' scores. @@ -103,30 +105,40 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo } return new Weight(this) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - if (minScore == null) { - final BulkScorer subQueryBulkScorer = subQueryWeight.bulkScorer(context); - if (subQueryBulkScorer == null) { - return null; - } - return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); - } else { - return super.bulkScorer(context); - } - } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - Scorer scriptScorer = new ScriptScorer(this, makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); - if (minScore != null) { - scriptScorer = new MinScoreScorer(this, scriptScorer, minScore); - } - return scriptScorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + Scorer scriptScorer = new ScriptScorer(makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); + if (minScore != null) { + scriptScorer = new MinScoreScorer(scriptScorer, minScore); + } + return scriptScorer; + } + + @Override + public BulkScorer bulkScorer() throws IOException { + if (minScore == null) { + final BulkScorer subQueryBulkScorer = subQueryScorerSupplier.bulkScorer(); + return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); + } else { + return super.bulkScorer(); + } + } + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override @@ -137,7 +149,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } ExplanationHolder explanationHolder = new ExplanationHolder(); Scorer scorer = new ScriptScorer( - this, makeScoreScript(context), subQueryWeight.scorer(context), subQueryScoreMode, @@ -230,14 +241,12 @@ private static class ScriptScorer extends Scorer { private final ExplanationHolder explanation; ScriptScorer( - Weight weight, ScoreScript scoreScript, Scorer subQueryScorer, ScoreMode subQueryScoreMode, float boost, ExplanationHolder explanation ) { - super(weight); this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); @@ -291,19 +300,27 @@ private static class ScriptScorable extends Scorable { private final ScoreScript scoreScript; private final Scorable subQueryScorer; private final float boost; + private final IntSupplier docIDSupplier; - ScriptScorable(ScoreScript scoreScript, Scorable subQueryScorer, ScoreMode subQueryScoreMode, float boost) { + ScriptScorable( + ScoreScript scoreScript, + Scorable subQueryScorer, + ScoreMode subQueryScoreMode, + float boost, + IntSupplier docIDSupplier + ) { this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); } this.subQueryScorer = subQueryScorer; this.boost = boost; + this.docIDSupplier = docIDSupplier; } @Override public float score() throws IOException { - int docId = docID(); + int docId = docIDSupplier.getAsInt(); scoreScript.setDocument(docId); float score = (float) scoreScript.execute(null); if (score < 0f || Float.isNaN(score)) { @@ -319,10 +336,6 @@ public float score() throws IOException { return score * boost; } - @Override - public int docID() { - return subQueryScorer.docID(); - } } /** @@ -349,9 +362,18 @@ public int score(LeafCollector collector, Bits acceptDocs, int min, int max) thr private LeafCollector wrapCollector(LeafCollector collector) { return new FilterLeafCollector(collector) { + + private int docID; + @Override public void setScorer(Scorable scorer) throws IOException { - in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost)); + in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost, () -> docID)); + } + + @Override + public void collect(int doc) throws IOException { + this.docID = doc; + super.collect(doc); } }; } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java index d88e0e0dd9fcf..8b5189fa2939a 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.IOSupplier; import org.apache.lucene.util.InPlaceMergeSorter; import java.io.IOException; @@ -187,7 +188,11 @@ private static TermStates adjustTTF(IndexReaderContext readerContext, TermStates int df = termContext.docFreq(); long ttf = sumTTF; for (int i = 0; i < len; i++) { - TermState termState = termContext.get(leaves.get(i)); + IOSupplier termStateSupplier = termContext.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } @@ -211,7 +216,11 @@ private static TermStates adjustDF(IndexReaderContext readerContext, TermStates } TermStates newCtx = new TermStates(readerContext); for (int i = 0; i < len; ++i) { - TermState termState = ctx.get(leaves.get(i)); + IOSupplier termStateSupplier = ctx.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java index 0e2f05cc12e4f..0fc212b7db31e 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -75,15 +76,17 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo throw new IllegalStateException("Executing against a different reader than the query has been rewritten against"); } return new ConstantScoreWeight(this, boost) { + @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final int maxDoc = context.reader().maxDoc(); if (context.docBase + maxDoc <= minDoc) { return null; } final int segmentMinDoc = Math.max(0, minDoc - context.docBase); final DocIdSetIterator disi = new MinDocIterator(segmentMinDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java index c5802f092c033..5de0d7fafdf27 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; @@ -66,7 +67,7 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { Sort segmentSort = context.reader().getMetaData().getSort(); if (segmentSort == null || Lucene.canEarlyTerminate(sort, segmentSort) == false) { throw new IOException("search sort :[" + sort + "] does not match the index sort:[" + segmentSort + "]"); @@ -79,7 +80,8 @@ public Scorer scorer(LeafReaderContext context) throws IOException { return null; } final DocIdSetIterator disi = new MinDocQuery.MinDocIterator(firstDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java index f79d5303ab65a..e207c16229232 100644 --- a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java @@ -108,14 +108,8 @@ private ConstantScoreWeight getStandardWeight(ScoreMode scoreMode, float boost) final Component2D component2D = create(geometries); return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -124,7 +118,7 @@ public Scorer get(long leadCost) throws IOException { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final GeometryDocValueReader reader = new GeometryDocValueReader(); final Component2DVisitor visitor = Component2DVisitor.getVisitor(component2D, relation, encoder); @@ -142,7 +136,7 @@ public float matchCost() { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override @@ -166,14 +160,8 @@ private ConstantScoreWeight getContainsWeight(ScoreMode scoreMode, float boost) } return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -182,7 +170,7 @@ public Scorer get(long leadCost) throws IOException { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final Component2DVisitor[] visitors = new Component2DVisitor[components2D.size()]; for (int i = 0; i < components2D.size(); i++) { @@ -209,7 +197,7 @@ public float matchCost() { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index 21138f46e974e..67bf8b79ef1b1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -130,7 +130,7 @@ private void search(BucketCollector bucketCollector, Weight weight) throws IOExc Scorer scorer = weight.scorer(leaf); if (scorer != null) { if (minimumScore != null) { - scorer = new MinScoreScorer(weight, scorer, minimumScore); + scorer = new MinScoreScorer(scorer, minimumScore); } LeafWalker leafWalker = new LeafWalker(leaf, scorer, bucketCollector, () -> tsidOrd[0]); if (leafWalker.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java index 743321b0fa518..2c8b3991fc4a5 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.elasticsearch.search.profile.Timer; import java.io.IOException; @@ -24,15 +23,12 @@ final class ProfileScorer extends Scorer { private final Scorer scorer; - private final ProfileWeight profileWeight; private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer, shallowAdvanceTimer, computeMaxScoreTimer, setMinCompetitiveScoreTimer; - ProfileScorer(ProfileWeight w, Scorer scorer, QueryProfileBreakdown profile) { - super(w); + ProfileScorer(Scorer scorer, QueryProfileBreakdown profile) { this.scorer = scorer; - this.profileWeight = w; scoreTimer = profile.getNewTimer(QueryTimingType.SCORE); nextDocTimer = profile.getNewTimer(QueryTimingType.NEXT_DOC); advanceTimer = profile.getNewTimer(QueryTimingType.ADVANCE); @@ -57,11 +53,6 @@ public float score() throws IOException { } } - @Override - public Weight getWeight() { - return profileWeight; - } - @Override public Collection getChildren() throws IOException { return scorer.getChildren(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java index ce725bb277ccc..174f8e58fb701 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java @@ -36,15 +36,6 @@ public ProfileWeight(Query query, Weight subQueryWeight, QueryProfileBreakdown p this.profile = profile; } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier supplier = scorerSupplier(context); - if (supplier == null) { - return null; - } - return supplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final Timer timer = profile.getNewTimer(QueryTimingType.BUILD_SCORER); @@ -66,12 +57,24 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti public Scorer get(long loadCost) throws IOException { timer.start(); try { - return new ProfileScorer(weight, subQueryScorerSupplier.get(loadCost), profile); + return new ProfileScorer(subQueryScorerSupplier.get(loadCost), profile); } finally { timer.stop(); } } + @Override + public BulkScorer bulkScorer() throws IOException { + // We use the default bulk scorer instead of the specialized one. The reason + // is that Lucene's BulkScorers do everything at once: finding matches, + // scoring them and calling the collector, so they make it impossible to + // see where time is spent, which is the purpose of query profiling. + // The default bulk scorer will pull a scorer and iterate over matches, + // this might be a significantly different execution path for some queries + // like disjunctions, but in general this is what is done anyway + return super.bulkScorer(); + } + @Override public long cost() { timer.start(); @@ -89,18 +92,6 @@ public void setTopLevelScoringClause() throws IOException { }; } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - // We use the default bulk scorer instead of the specialized one. The reason - // is that Lucene's BulkScorers do everything at once: finding matches, - // scoring them and calling the collector, so they make it impossible to - // see where time is spent, which is the purpose of query profiling. - // The default bulk scorer will pull a scorer and iterate over matches, - // this might be a significantly different execution path for some queries - // like disjunctions, but in general this is what is done anyway - return super.bulkScorer(context); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return subQueryWeight.explain(context, doc); diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java index 77da1cc80bc97..edcac1bae4266 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java @@ -17,6 +17,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.search.rank.RankDoc; @@ -88,13 +89,13 @@ public Explanation explain(LeafReaderContext context, int doc) { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { // Segment starts indicate how many docs are in the segment, // upper equalling lower indicates no documents for this segment if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { return null; } - return new Scorer(this) { + Scorer scorer = new Scorer() { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; int upTo = -1; @@ -163,6 +164,7 @@ private int currentDocId() { } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java index 01eaa5714dd77..59d5f651df27e 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractFieldScript; @@ -68,10 +69,11 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext ctx) { + public ScorerSupplier scorerSupplier(LeafReaderContext ctx) throws IOException { S scriptContext = scriptContextFunction.apply(ctx); DocIdSetIterator approximation = DocIdSetIterator.all(ctx.reader().maxDoc()); - return new ConstantScoreScorer(this, score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java index de081fd386d54..a73f520bfdf09 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java @@ -17,6 +17,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.SloppyMath; @@ -78,8 +79,9 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + return new DefaultScorerSupplier(scorer); } @Override @@ -115,8 +117,7 @@ private class DistanceScorer extends Scorer { private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java index 4b52a91c0bc24..0e04b385593a2 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -57,9 +58,10 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator iterator = createIterator(context, sliceStart, sliceStart + sliceSize); - return new ConstantScoreScorer(this, boost, scoreMode, iterator); + Scorer scorer = new ConstantScoreScorer(boost, scoreMode, iterator); + return new DefaultScorerSupplier(scorer); } private static DocIdSetIterator createIterator(LeafReaderContext context, int sliceStart, int sliceEnd) { diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java index 08c06489c6fb7..af8771be6dfc9 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -40,7 +41,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), getField()); final DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -65,7 +66,8 @@ public float matchCost() { return 10; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java index e2f46100c41ec..401090d7b3583 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; @@ -48,10 +49,11 @@ public TermsSliceQuery(String field, int id, int max) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final DocIdSet disi = build(context.reader()); final DocIdSetIterator leafIt = disi.iterator(); - return new ConstantScoreScorer(this, score(), scoreMode, leafIt); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, leafIt); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java index bb8615fb8ab48..dd9937711a4c5 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java @@ -17,6 +17,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.search.function.MinScoreScorer; @@ -141,12 +142,22 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer innerScorer = in.scorer(context); - if (innerScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier inScorerSupplier = in.scorerSupplier(context); + if (inScorerSupplier == null) { return null; } - return new MinScoreScorer(this, innerScorer, docScore, boost); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(inScorerSupplier.get(leadCost), docScore, boost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java index 97caa9ecf9503..92c06a1cb121f 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java @@ -10,15 +10,11 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.apache.lucene.tests.search.AssertingScorer; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.ESTestCase; @@ -65,27 +61,8 @@ public int advance(int target) throws IOException { }; } - private static Weight fakeWeight() { - return new Weight(new MatchAllDocsQuery()) { - @Override - public Explanation explain(LeafReaderContext context, int doc) throws IOException { - return null; - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return null; - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; - } - }; - } - private static Scorer hideTwoPhaseIterator(Scorer in) { - return new Scorer(in.getWeight()) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return TwoPhaseIterator.asDocIdSetIterator(in.twoPhaseIterator()); @@ -110,7 +87,7 @@ public int docID() { private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) { final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs); - final Scorer scorer = new Scorer(fakeWeight()) { + final Scorer scorer = new Scorer() { int lastScoredDoc = -1; final float matchCost = (random().nextBoolean() ? 1000 : 0) + random().nextInt(2000); @@ -191,7 +168,7 @@ public void doTestRandom(boolean twoPhase) throws IOException { } Scorer scorer = scorer(maxDoc, docs, scores, twoPhase); final float minScore = random().nextFloat(); - Scorer minScoreScorer = new MinScoreScorer(fakeWeight(), scorer, minScore); + Scorer minScoreScorer = new MinScoreScorer(scorer, minScore); int doc = -1; while (doc != DocIdSetIterator.NO_MORE_DOCS) { final int target; @@ -249,7 +226,7 @@ public void testConjunction() throws Exception { final float minScore; if (randomBoolean()) { minScore = randomFloat(); - MinScoreScorer minScoreScorer = new MinScoreScorer(scorer.getWeight(), scorer, minScore); + MinScoreScorer minScoreScorer = new MinScoreScorer(scorer, minScore); scorers.add(minScoreScorer); } else { scorers.add(scorer); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java index 8f5f95241432e..be0ce00145226 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java @@ -33,10 +33,6 @@ private static class FakeScorer extends Scorer { public float maxScore, minCompetitiveScore; - protected FakeScorer(Weight weight) { - super(weight); - } - @Override public DocIdSetIterator iterator() { throw new UnsupportedOperationException(); @@ -74,22 +70,14 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return Explanation.match(1, "fake_description"); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - FakeScorer fakeScorer = new FakeScorer(this); - fakeScorer.maxScore = 42f; - return fakeScorer; - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - Weight weight = this; return new ScorerSupplier() { private long cost = 0; @Override public Scorer get(long leadCost) { - return new Scorer(weight) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return null; @@ -191,7 +179,7 @@ public void testPropagateMinCompetitiveScore() throws IOException { FakeScorer fakeScorer = new FakeScorer(weight); QueryProfileBreakdown profile = new QueryProfileBreakdown(); ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); assertEquals(0.42f, fakeScorer.minCompetitiveScore, 0f); } @@ -202,7 +190,7 @@ public void testPropagateMaxScore() throws IOException { FakeScorer fakeScorer = new FakeScorer(weight); QueryProfileBreakdown profile = new QueryProfileBreakdown(); ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); fakeScorer.maxScore = 42f; assertEquals(42f, profileScorer.getMaxScore(DocIdSetIterator.NO_MORE_DOCS), 0f); From 31e85b208946740aba851cf746f7dce3e87e89ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 11:03:10 +0200 Subject: [PATCH 090/417] Fix missing "MimizationOperations.minimize" errors Lucene 10 moved the MinizationOperations class into the test project and removed the last use of it in https://github.com/apache/lucene/pull/528 by replacing minimization of automata by just determinizing them, replacing MinimizationOperations.minimize by Operations.determinize (see reasoning behind this in the Lucene PR). This change follows the same pattern where we don't already know the automaton is already deterministic. --- .../org/elasticsearch/reindex/ReindexValidator.java | 3 +-- .../common/lucene/search/AutomatonQueries.java | 5 ----- .../index/mapper/IpPrefixAutomatonUtil.java | 3 +-- .../index/mapper/KeywordFieldMapper.java | 2 -- .../mapper/flattened/FlattenedFieldMapper.java | 2 -- .../org/elasticsearch/indices/SystemIndices.java | 13 ++++++------- .../security/authz/permission/FieldPermissions.java | 5 ++--- .../xpack/core/security/support/Automatons.java | 3 +-- .../expression/predicate/regex/LikePattern.java | 3 +-- .../expression/predicate/regex/WildcardPattern.java | 3 +-- .../xpack/versionfield/VersionEncoder.java | 3 +-- .../ql/expression/predicate/regex/LikePattern.java | 3 +-- .../expression/predicate/regex/WildcardPattern.java | 3 +-- .../security/authz/store/FileRolesStoreTests.java | 3 +-- .../xpack/watcher/common/http/HttpClient.java | 3 +-- .../xpack/wildcard/mapper/WildcardFieldMapper.java | 2 -- 16 files changed, 18 insertions(+), 41 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java index 69e21b10ac3a4..97d078e8d9c5c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java @@ -11,7 +11,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; @@ -95,7 +94,7 @@ static CharacterRunAutomaton buildRemoteWhitelist(List whitelist) { return new CharacterRunAutomaton(Automata.makeEmpty()); } Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY)); - automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (Operations.isTotal(automaton)) { throw new IllegalArgumentException( "Refusing to start because whitelist " diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java index d6463fb28f6cf..670b9d53a68ea 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -37,8 +36,6 @@ public static Automaton caseInsensitivePrefix(String s) { Automaton a = Operations.concatenate(list); // since all elements in the list should be deterministic already, the concatenation also is, so no need to determinized assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); - assert a.isDeterministic(); return a; } @@ -116,7 +113,6 @@ public static Automaton toCaseInsensitiveString(String s) { Automaton a = Operations.concatenate(list); // concatenating deterministic automata should result in a deterministic automaton. No need to determinize here. assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); return a; } @@ -131,7 +127,6 @@ public static Automaton toCaseInsensitiveChar(int codepoint) { if (altCase != codepoint) { result = Operations.union(case1, Automata.makeChar(altCase)); // this automaton should always be deterministic, no need to determinize - result = MinimizationOperations.minimize(result, 0); assert result.isDeterministic(); } else { result = case1; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java index c967f435e6f23..db9e3e77eef67 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -75,7 +74,7 @@ static CompiledAutomaton buildIpPrefixAutomaton(String ipPrefix) { } else { result = Automata.makeAnyBinary(); } - result = MinimizationOperations.minimize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + result = Operations.determinize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); return new CompiledAutomaton(result, null, false, 0, true); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 22a51a102e46d..21f406e86c6ab 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -31,7 +31,6 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; import org.apache.lucene.util.automaton.CompiledAutomaton.AUTOMATON_TYPE; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; @@ -576,7 +575,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi ? AutomatonQueries.caseInsensitivePrefix(prefix) : Operations.concatenate(Automata.makeString(prefix), Automata.makeAnyString()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a, true, true); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index 3887c5a15cd5a..0c1340867849a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -30,7 +30,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -372,7 +371,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi a = Operations.concatenate(a, Automata.makeAnyString()); } assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a); if (searchAfter != null) { diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 3261ac83a7e67..5afbb79028067 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateResponse.ResetFeatureStateStatus; @@ -177,7 +176,7 @@ public SystemIndices(List pluginAndModuleFeatures) { this.netNewSystemIndexAutomaton = buildNetNewIndexCharacterRunAutomaton(featureDescriptors); this.productToSystemIndicesMatcher = getProductToSystemIndicesMap(featureDescriptors); this.executorSelector = new ExecutorSelector(this); - this.systemNameAutomaton = MinimizationOperations.minimize( + this.systemNameAutomaton = Operations.determinize( Operations.union(List.of(systemIndexAutomata, systemDataStreamIndicesAutomata, buildDataStreamAutomaton(featureDescriptors))), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ); @@ -264,7 +263,7 @@ private static Map getProductToSystemIndicesMap(M Collectors.toUnmodifiableMap( Entry::getKey, entry -> new CharacterRunAutomaton( - MinimizationOperations.minimize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + Operations.determinize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) ) ) ); @@ -425,7 +424,7 @@ private static Automaton buildIndexAutomaton(Map featureDescrip .stream() .map(SystemIndices::featureToIndexAutomaton) .reduce(Operations::union); - return MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map featureDescriptors) { @@ -437,7 +436,7 @@ private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map SystemIndexDescriptor.buildAutomaton(descriptor.getIndexPattern(), descriptor.getAliasName())) .reduce(Operations::union); return new CharacterRunAutomaton( - MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) ); } @@ -458,7 +457,7 @@ private static Automaton buildDataStreamAutomaton(Map featureDe .map(dsName -> SystemIndexDescriptor.buildAutomaton(dsName, null)) .reduce(Operations::union); - return automaton.isPresent() ? MinimizationOperations.minimize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; + return automaton.isPresent() ? Operations.determinize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; } private static Predicate buildDataStreamNamePredicate(Map featureDescriptors) { @@ -471,7 +470,7 @@ private static Automaton buildDataStreamBackingIndicesAutomaton(Map predicate(String... patterns) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java index 52ce2636e914b..b579848a51f30 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -52,7 +51,7 @@ public char escape() { @Override public Automaton createAutomaton() { Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java index 7cedbc4742138..c2c0ad60f45d1 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -40,7 +39,7 @@ public String pattern() { @Override public Automaton createAutomaton() { Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java index 00532d95574c0..651e07246c59c 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.Locale; @@ -213,7 +212,7 @@ static CompiledAutomaton prefixAutomaton(String versionPrefix, boolean caseInsen a = Operations.concatenate(a, Automata.makeAnyBinary()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); + a = Operations.determinize(a, 0); return new CompiledAutomaton(a, null, true, 0, true); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java index 8eac03d36371e..6f8915dc685d0 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -52,7 +51,7 @@ public char escape() { @Override public Automaton createAutomaton() { Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java index fd6bd177e4c60..325a021045b81 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -40,7 +39,7 @@ public String pattern() { @Override public Automaton createAutomaton() { Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 0a2c40d2a257a..84998aa48d41f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -8,7 +8,6 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -183,7 +182,7 @@ public void testParseFile() throws Exception { assertTrue( Operations.sameLanguage( group.privilege().getAutomaton(), - MinimizationOperations.minimize( + Operations.determinize( Operations.union(IndexPrivilege.READ.getAutomaton(), IndexPrivilege.WRITE.getAutomaton()), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java index 9a165112c41d1..327d345af864e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java @@ -42,7 +42,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.service.ClusterService; @@ -440,7 +439,7 @@ static CharacterRunAutomaton createAutomaton(List whiteListedHosts) { } Automaton whiteListAutomaton = Regex.simpleMatchToAutomaton(whiteListedHosts.toArray(Strings.EMPTY_ARRAY)); - whiteListAutomaton = MinimizationOperations.minimize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + whiteListAutomaton = Operations.determinize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); return new CharacterRunAutomaton(whiteListAutomaton); } } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index a661322899a8d..a61ecec57a1c5 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -38,7 +38,6 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchParseException; @@ -379,7 +378,6 @@ public Query regexpQuery( RegExp regExp = new RegExp(value, syntaxFlags, matchFlags); Automaton a = regExp.toAutomaton(); a = Operations.determinize(a, maxDeterminizedStates); - a = MinimizationOperations.minimize(a, maxDeterminizedStates); if (Operations.isTotal(a)) { // Will match all return existsQuery(context); } From a7b5cc5592399f86344f34d57e43ee94253adb3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 12:22:37 +0200 Subject: [PATCH 091/417] Fix compilation missing CharacterRunAutomaton ctor CharacterRunAutomaton used to have a constructor that takes an upper work limit for determinization states. This was changed in https://github.com/apache/lucene/pull/485 and by default now uses 10000 as the limit (in RunAutomaton ctor). This PR determinizes the automaton passed in beforehand where we currently use a higher limit. --- .../common/xcontent/support/XContentMapValues.java | 8 ++++---- .../search/fetch/subphase/UnmappedFieldFetcher.java | 6 +++++- .../xpack/core/security/support/Automatons.java | 3 ++- .../xpack/core/security/support/AutomatonsTests.java | 5 ++--- 4 files changed, 13 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index f527b4cd8d684..a4cfdce60d887 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -279,8 +279,8 @@ public static Function, Map> filter(String[] include = matchAllAutomaton; } else { Automaton includeA = Regex.simpleMatchToAutomaton(includes); - includeA = makeMatchDotsInFieldNames(includeA); - include = new CharacterRunAutomaton(includeA, MAX_DETERMINIZED_STATES); + includeA = Operations.determinize(makeMatchDotsInFieldNames(includeA), MAX_DETERMINIZED_STATES); + include = new CharacterRunAutomaton(includeA); } Automaton excludeA; @@ -288,9 +288,9 @@ public static Function, Map> filter(String[] excludeA = Automata.makeEmpty(); } else { excludeA = Regex.simpleMatchToAutomaton(excludes); - excludeA = makeMatchDotsInFieldNames(excludeA); + excludeA = Operations.determinize(makeMatchDotsInFieldNames(excludeA), MAX_DETERMINIZED_STATES); } - CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA, MAX_DETERMINIZED_STATES); + CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA); // NOTE: We cannot use Operations.minus because of the special case that // we want all sub properties to match as soon as an object matches diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java index 773eac5ec86fb..2627177fcf541 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java @@ -83,7 +83,11 @@ private static CharacterRunAutomaton buildUnmappedFieldPatternAutomaton(List predicate(Automaton automaton, final String toS } else if (automaton == EMPTY) { return Predicates.never(); } - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, maxDeterminizedStates); + automaton = Operations.determinize(automaton, maxDeterminizedStates); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); return new Predicate() { @Override public boolean test(String s) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java index 0b2e48bd20dfe..94f91f427e19a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java @@ -20,7 +20,6 @@ import java.util.Arrays; import java.util.List; -import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; import static org.elasticsearch.xpack.core.security.support.Automatons.pattern; import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; import static org.elasticsearch.xpack.core.security.support.Automatons.predicate; @@ -115,12 +114,12 @@ public void testPatternComplexity() { } private void assertMatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertTrue(runAutomaton.run(text)); } private void assertMismatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertFalse(runAutomaton.run(text)); } From 88b351698ee55b98cff3a7a06602ca45e322b971 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 28 Aug 2024 12:46:50 +0200 Subject: [PATCH 092/417] More compile errors related to the scorer/scorerSupplier/bulkScorer change. --- .../extras/SourceConfirmedTextQuery.java | 32 ++-- .../percolator/PercolateQuery.java | 140 ++++++++------- .../percolator/CandidateQueryTests.java | 6 +- .../index/query/ScriptQueryBuilder.java | 6 +- .../index/shard/ShardSplittingQuery.java | 168 ++++++++++-------- .../IndexDiskUsageAnalyzerTests.java | 6 +- .../common/lucene/LuceneTests.java | 5 - .../indices/IndicesQueryCacheTests.java | 25 ++- .../internal/ContextIndexSearcherTests.java | 21 ++- .../profile/query/QueryProfilerTests.java | 5 - .../search/query/QueryPhaseTimeoutTests.java | 78 ++++---- .../querydsl/query/SingleValueMatchQuery.java | 58 ++---- .../searchbusinessrules/CappedScoreQuery.java | 30 +--- .../CappedScoreWeight.java | 19 +- .../searchbusinessrules/CappedScorer.java | 5 +- .../BinaryDvConfirmedAutomatonQuery.java | 65 ++++--- 16 files changed, 354 insertions(+), 315 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java index 3d0f26e8cc130..b437fca96a76f 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TwoPhaseIterator; @@ -265,7 +266,7 @@ public boolean isCacheable(LeafReaderContext ctx) { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0); if (scorer == null) { return Explanation.noMatch("No matching phrase"); } @@ -285,15 +286,26 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public RuntimePhraseScorer scorer(LeafReaderContext context) throws IOException { - final Scorer approximationScorer = approximationWeight != null ? approximationWeight.scorer(context) : null; - if (approximationScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier approximationSupplier = approximationWeight != null ? approximationWeight.scorerSupplier(context) : null; + if (approximationSupplier == null) { return null; } - final DocIdSetIterator approximation = approximationScorer.iterator(); - final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); - final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); - return new RuntimePhraseScorer(this, approximation, leafSimScorer, valueFetcher, field, in); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximationScorer = approximationSupplier.get(leadCost); + final DocIdSetIterator approximation = approximationScorer.iterator(); + final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); + final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); + return new RuntimePhraseScorer(approximation, leafSimScorer, valueFetcher, field, in); + } + + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -309,7 +321,7 @@ public Matches matches(LeafReaderContext context, int doc) throws IOException { Weight innerWeight = in.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, 1); return innerWeight.matches(context, doc); } - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0L); if (scorer == null) { return null; } @@ -335,14 +347,12 @@ private class RuntimePhraseScorer extends Scorer { private float freq; private RuntimePhraseScorer( - Weight weight, DocIdSetIterator approximation, LeafSimScorer scorer, CheckedIntFunction, IOException> valueFetcher, String field, Query query ) { - super(weight); this.scorer = scorer; this.valueFetcher = valueFetcher; this.field = field; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 408b3f204de1a..b655fb58986d6 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -109,74 +109,93 @@ public Explanation explain(LeafReaderContext leafReaderContext, int docId) throw } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - final Scorer approximation = candidateMatchesWeight.scorer(leafReaderContext); - if (approximation == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext leafReaderContext) throws IOException { + final ScorerSupplier approximationSupplier = candidateMatchesWeight.scorerSupplier(leafReaderContext); + if (approximationSupplier == null) { return null; } - final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + ScorerSupplier verifiedDocsScorer; if (scoreMode.needsScores()) { - return new BaseScorer(this, approximation) { - - float score; - - @Override - boolean matchDocId(int docId) throws IOException { - Query query = percolatorQueries.apply(docId); - if (query != null) { - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST) - .add(nonNestedDocsFilter, Occur.FILTER) - .build(); - } - TopDocs topDocs = percolatorIndexSearcher.search(query, 1); - if (topDocs.scoreDocs.length > 0) { - score = topDocs.scoreDocs[0].score; - return true; - } else { - return false; + verifiedDocsScorer = null; + } else { + verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); + } + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximation = approximationSupplier.get(leadCost); + final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + if (scoreMode.needsScores()) { + return new BaseScorer(approximation) { + + float score; + + @Override + boolean matchDocId(int docId) throws IOException { + Query query = percolatorQueries.apply(docId); + if (query != null) { + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + TopDocs topDocs = percolatorIndexSearcher.search(query, 1); + if (topDocs.scoreDocs.length > 0) { + score = topDocs.scoreDocs[0].score; + return true; + } else { + return false; + } + } else { + return false; + } } - } else { - return false; - } - } - @Override - public float score() { - return score; - } - }; - } else { - ScorerSupplier verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); - Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); - return new BaseScorer(this, approximation) { + @Override + public float score() { + return score; + } + }; + } else { + Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); + return new BaseScorer(approximation) { + + @Override + public float score() throws IOException { + return 0f; + } - @Override - public float score() throws IOException { - return 0f; + boolean matchDocId(int docId) throws IOException { + // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. + // If docId also appears in the verifiedDocsBits then that means during indexing + // we were able to extract all query terms and for this candidate match + // and we determined based on the nature of the query that it is safe to skip + // the MemoryIndex verification. + if (verifiedDocsBits.get(docId)) { + return true; + } + Query query = percolatorQueries.apply(docId); + if (query == null) { + return false; + } + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + return Lucene.exists(percolatorIndexSearcher, query); + } + }; } + } - boolean matchDocId(int docId) throws IOException { - // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. - // If docId also appears in the verifiedDocsBits then that means during indexing - // we were able to extract all query terms and for this candidate match - // and we determined based on the nature of the query that it is safe to skip - // the MemoryIndex verification. - if (verifiedDocsBits.get(docId)) { - return true; - } - Query query = percolatorQueries.apply(docId); - if (query == null) { - return false; - } - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST).add(nonNestedDocsFilter, Occur.FILTER).build(); - } - return Lucene.exists(percolatorIndexSearcher, query); - } - }; - } + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -264,8 +283,7 @@ abstract static class BaseScorer extends Scorer { final Scorer approximation; - BaseScorer(Weight weight, Scorer approximation) { - super(weight); + BaseScorer(Scorer approximation) { this.approximation = approximation; } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 05a935229246d..67d99132c0d46 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -55,6 +55,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermInSetQuery; @@ -1288,7 +1289,7 @@ public String toString() { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { float _score[] = new float[] { boost }; DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc()); CheckedFunction leaf = queryStore.getQueries(context); @@ -1312,7 +1313,7 @@ protected boolean match(int doc) { } } }; - return new Scorer(this) { + Scorer scorer = new Scorer(this) { @Override public int docID() { @@ -1334,6 +1335,7 @@ public float getMaxScore(int upTo) throws IOException { return _score[0]; } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index 597e23881362b..c8e4058e424cd 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -17,6 +17,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchException; @@ -183,7 +184,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final FilterScript leafScript = filterScript.newInstance(new DocValuesDocReader(lookup, context)); TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -200,7 +201,8 @@ public float matchCost() { return 1000f; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java index 389485ac4eaf2..fe4b2a94279f6 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -72,7 +73,7 @@ public String toString() { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { LeafReader leafReader = context.reader(); FixedBitSet bitSet = new FixedBitSet(leafReader.maxDoc()); Terms terms = leafReader.terms(RoutingFieldMapper.NAME); @@ -81,87 +82,102 @@ public Scorer scorer(LeafReaderContext context) throws IOException { int targetShardId = indexRouting.getShard(Uid.decodeId(ref.bytes, ref.offset, ref.length), null); return shardId == targetShardId; }; - if (terms == null) { - // this is the common case - no partitioning and no _routing values - // in this case we also don't do anything special with regards to nested docs since we basically delete - // by ID and parent and nested all have the same id. - assert indexMetadata.isRoutingPartitionedIndex() == false; - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); - } else { - final BitSet parentBitSet; - if (nestedParentBitSetProducer == null) { - parentBitSet = null; - } else { - parentBitSet = nestedParentBitSetProducer.getBitSet(context); - if (parentBitSet == null) { - return null; // no matches - } - } - if (indexMetadata.isRoutingPartitionedIndex()) { - // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing - // this index is routing partitioned. - Visitor visitor = new Visitor(leafReader); - TwoPhaseIterator twoPhaseIterator = parentBitSet == null - ? new RoutingPartitionedDocIdSetIterator(visitor) - : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); - return new ConstantScoreScorer(this, score(), scoreMode, twoPhaseIterator); - } else { - // here we potentially guard the docID consumers with our parent bitset if we have one. - // this ensures that we are only marking root documents in the nested case and if necessary - // we do a second pass to mark the corresponding children in markChildDocs - Function maybeWrapConsumer = consumer -> { - if (parentBitSet != null) { - return docId -> { - if (parentBitSet.get(docId)) { - consumer.accept(docId); + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + if (terms == null) { + // this is the common case - no partitioning and no _routing values + // in this case we also don't do anything special with regards to nested docs since we basically delete + // by ID and parent and nested all have the same id. + assert indexMetadata.isRoutingPartitionedIndex() == false; + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); + } else { + final BitSet parentBitSet; + if (nestedParentBitSetProducer == null) { + parentBitSet = null; + } else { + parentBitSet = nestedParentBitSetProducer.getBitSet(context); + if (parentBitSet == null) { + return null; // no matches + } + } + if (indexMetadata.isRoutingPartitionedIndex()) { + // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing + // this index is routing partitioned. + Visitor visitor = new Visitor(leafReader); + TwoPhaseIterator twoPhaseIterator = parentBitSet == null + ? new RoutingPartitionedDocIdSetIterator(visitor) + : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); + return new ConstantScoreScorer(score(), scoreMode, twoPhaseIterator); + } else { + // here we potentially guard the docID consumers with our parent bitset if we have one. + // this ensures that we are only marking root documents in the nested case and if necessary + // we do a second pass to mark the corresponding children in markChildDocs + Function maybeWrapConsumer = consumer -> { + if (parentBitSet != null) { + return docId -> { + if (parentBitSet.get(docId)) { + consumer.accept(docId); + } + }; } + return consumer; }; - } - return consumer; - }; - // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to delete - findSplitDocs(RoutingFieldMapper.NAME, ref -> { - int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); - return shardId == targetShardId; - }, leafReader, maybeWrapConsumer.apply(bitSet::set)); - - // TODO have the IndexRouting build the query and pass routingRequired in - boolean routingRequired = indexMetadata.mapping() == null ? false : indexMetadata.mapping().routingRequired(); - // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the ones - // with a routing value from the next iteration and delete / select based on the ID. - if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { - /* - * This is a special case where some docs don't have routing values. - * It's annoying, but it's allowed to build an index where some documents - * hve routing and others don't. - * - * Luckily, if the routing field is required in the mapping then we can - * safely assume that all documents which are don't have a routing are - * nested documents. And we pick those up later based on the assignment - * of the document that contains them. - */ - FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); - findSplitDocs( - RoutingFieldMapper.NAME, - Predicates.never(), - leafReader, - maybeWrapConsumer.apply(hasRoutingValue::set) - ); - IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { - if (hasRoutingValue.get(docId) == false) { - bitSetConsumer.accept(docId); + // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to + // delete + findSplitDocs(RoutingFieldMapper.NAME, ref -> { + int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); + return shardId == targetShardId; + }, leafReader, maybeWrapConsumer.apply(bitSet::set)); + + // TODO have the IndexRouting build the query and pass routingRequired in + boolean routingRequired = indexMetadata.mapping() == null + ? false + : indexMetadata.mapping().routingRequired(); + // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the + // ones + // with a routing value from the next iteration and delete / select based on the ID. + if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { + /* + * This is a special case where some docs don't have routing values. + * It's annoying, but it's allowed to build an index where some documents + * hve routing and others don't. + * + * Luckily, if the routing field is required in the mapping then we can + * safely assume that all documents which are don't have a routing are + * nested documents. And we pick those up later based on the assignment + * of the document that contains them. + */ + FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); + findSplitDocs( + RoutingFieldMapper.NAME, + Predicates.never(), + leafReader, + maybeWrapConsumer.apply(hasRoutingValue::set) + ); + IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { + if (hasRoutingValue.get(docId) == false) { + bitSetConsumer.accept(docId); + } + }); } - }); + } + if (parentBitSet != null) { + // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. + markChildDocs(parentBitSet, bitSet); + } } + + return new ConstantScoreScorer(score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); } - if (parentBitSet != null) { - // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. - markChildDocs(parentBitSet, bitSet); - } - } - return new ConstantScoreScorer(this, score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); + @Override + public long cost() { + return leafReader.maxDoc(); + } + }; } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 976c99675817f..1d1ef63a5a0ce 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -52,6 +52,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; @@ -784,14 +785,15 @@ private static class RandomMatchQuery extends Query { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final FixedBitSet bits = new FixedBitSet(context.reader().maxDoc()); for (int i = 0; i < bits.length(); i++) { if (randomBoolean()) { bits.set(i); } } - return new ConstantScoreScorer(this, 1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + Scorer scorer = new ConstantScoreScorer(1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index d8bb479e719f6..3892f5cba9b49 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -401,11 +401,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index 76f266eb80ab9..70b0252e4742b 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -69,8 +69,9 @@ public String toString(String field) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + return new DefaultScorerSupplier(scorer); } @Override @@ -347,16 +348,22 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return weight.explain(context, doc); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - scorerCalled = true; - return weight.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { scorerSupplierCalled = true; - return weight.scorerSupplier(context); + ScorerSupplier inScorerSupplier = weight.scorerSupplier(context); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + scorerCalled = true; + return inScorerSupplier.get(leadCost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } @Override diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index 0037642076566..d1f4cb0e1d857 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BoostQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.ConstantScoreQuery; @@ -46,6 +45,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHitCountCollectorManager; @@ -496,9 +496,14 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo } return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { contextIndexSearcher.throwTimeExceededException(); - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + Scorer scorer = new ConstantScoreScorer( + score(), + scoreMode, + DocIdSetIterator.all(context.reader().maxDoc()) + ); + return new DefaultScorerSupplier(scorer); } @Override @@ -746,15 +751,9 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.scorer(context); - } - - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.bulkScorer(context); + return weight.scorerSupplier(context); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index 35b7ee9a9ddf2..b47970be0f187 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -240,11 +240,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java index 9c1bdb236c031..f7a5be707f12e 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java @@ -31,6 +31,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -158,14 +159,14 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo boolean firstSegment = true; @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { if (firstSegment == false && isTimeoutExpected) { shouldTimeout = true; } timeoutTrigger.accept(context); assert shouldTimeout == false : "should have already timed out"; firstSegment = false; - return super.scorer(context); + return super.scorerSupplier(context); } }; } @@ -201,33 +202,49 @@ private static TimeoutQuery newMatchAllBulkScorerTimeoutQuery(boolean timeoutExp public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { return new MatchAllWeight(this, boost, scoreMode) { @Override - public BulkScorer bulkScorer(LeafReaderContext context) { - final float score = score(); - final int maxDoc = context.reader().maxDoc(); - return new BulkScorer() { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier inScorerSupplier = super.scorerSupplier(context); + return new ScorerSupplier() { @Override - public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException { - assert shouldTimeout == false : "should have already timed out"; - max = Math.min(max, maxDoc); - ScoreAndDoc scorer = new ScoreAndDoc(); - scorer.score = score; - collector.setScorer(scorer); - for (int doc = min; doc < max; ++doc) { - scorer.doc = doc; - if (acceptDocs == null || acceptDocs.get(doc)) { - collector.collect(doc); - } - } - if (timeoutExpected) { - // timeout after collecting the first batch of documents from the 1st segment, or the entire 1st segment - shouldTimeout = true; - } - return max == maxDoc ? DocIdSetIterator.NO_MORE_DOCS : max; + public Scorer get(long leadCost) throws IOException { + return inScorerSupplier.get(leadCost); } @Override public long cost() { - return 0; + return inScorerSupplier.cost(); + } + + @Override + public BulkScorer bulkScorer() throws IOException { + final float score = score(); + final int maxDoc = context.reader().maxDoc(); + return new BulkScorer() { + @Override + public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException { + assert shouldTimeout == false : "should have already timed out"; + max = Math.min(max, maxDoc); + Score scorer = new Score(); + scorer.score = score; + collector.setScorer(scorer); + for (int doc = min; doc < max; ++doc) { + if (acceptDocs == null || acceptDocs.get(doc)) { + collector.collect(doc); + } + } + if (timeoutExpected) { + // timeout after collecting the first batch of documents from the 1st segment, or the entire 1st + // segment + shouldTimeout = true; + } + return max == maxDoc ? DocIdSetIterator.NO_MORE_DOCS : max; + } + + @Override + public long cost() { + return 0; + } + }; } }; } @@ -257,14 +274,8 @@ private TestSearchContext createSearchContext(Query query, int size) throws IOEx return context; } - private static class ScoreAndDoc extends Scorable { + private static class Score extends Scorable { float score; - int doc = -1; - - @Override - public int docID() { - return doc; - } @Override public float score() { @@ -314,8 +325,9 @@ protected MatchAllWeight(Query query, float score, ScoreMode scoreMode) { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java index 386c983c8e6af..3694d6f478caa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java @@ -69,14 +69,6 @@ public String toString(String field) { @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - final ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { @@ -96,12 +88,12 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti * can't do that because we need the check the number of fields. */ if (lfd instanceof LeafNumericFieldData n) { - return scorerSupplier(context, n.getLongValues(), this, boost, scoreMode); + return scorerSupplier(context, n.getLongValues(), boost, scoreMode); } if (lfd instanceof LeafOrdinalsFieldData o) { - return scorerSupplier(context, o.getOrdinalsValues(), this, boost, scoreMode); + return scorerSupplier(context, o.getOrdinalsValues(), boost, scoreMode); } - return scorerSupplier(context, lfd.getBytesValues(), this, boost, scoreMode); + return scorerSupplier(context, lfd.getBytesValues(), boost, scoreMode); } @Override @@ -113,7 +105,6 @@ public boolean isCacheable(LeafReaderContext ctx) { private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedNumericDocValues sortedNumerics, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -122,16 +113,9 @@ private ScorerSupplier scorerSupplier( // check for dense field final PointValues points = context.reader().getPointValues(fieldData.getFieldName()); if (points != null && points.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { - return new PredicateScorerSupplier( - weight, - boost, - scoreMode, - maxDoc, - MULTI_VALUE_MATCH_COST, - sortedNumerics::advanceExact - ); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, sortedNumerics::advanceExact); } } final CheckedIntPredicate predicate = doc -> { @@ -144,13 +128,12 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedSetDocValues sortedSetDocValues, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -159,10 +142,9 @@ private ScorerSupplier scorerSupplier( // check for dense field final Terms terms = context.reader().terms(fieldData.getFieldName()); if (terms != null && terms.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -181,20 +163,18 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedBinaryDocValues sortedBinaryDocValues, - Weight weight, float boost, ScoreMode scoreMode ) { final int maxDoc = context.reader().maxDoc(); if (FieldData.unwrapSingleton(sortedBinaryDocValues) != null) { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -212,7 +192,7 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } }; } @@ -266,13 +246,11 @@ public int hashCode() { private static class DocIdSetIteratorScorerSupplier extends ScorerSupplier { - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final DocIdSetIterator docIdSetIterator; - private DocIdSetIteratorScorerSupplier(Weight weight, float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { - this.weight = weight; + private DocIdSetIteratorScorerSupplier(float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { this.score = score; this.scoreMode = scoreMode; this.docIdSetIterator = docIdSetIterator; @@ -280,7 +258,7 @@ private DocIdSetIteratorScorerSupplier(Weight weight, float score, ScoreMode sco @Override public Scorer get(long leadCost) { - return new ConstantScoreScorer(weight, score, scoreMode, docIdSetIterator); + return new ConstantScoreScorer(score, scoreMode, docIdSetIterator); } @Override @@ -290,23 +268,13 @@ public long cost() { } private static class PredicateScorerSupplier extends ScorerSupplier { - - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final int maxDoc; private final int matchCost; private final CheckedIntPredicate predicate; - private PredicateScorerSupplier( - Weight weight, - float score, - ScoreMode scoreMode, - int maxDoc, - int matchCost, - CheckedIntPredicate predicate - ) { - this.weight = weight; + private PredicateScorerSupplier(float score, ScoreMode scoreMode, int maxDoc, int matchCost, CheckedIntPredicate predicate) { this.score = score; this.scoreMode = scoreMode; this.maxDoc = maxDoc; @@ -327,7 +295,7 @@ public float matchCost() { return matchCost; } }; - return new ConstantScoreScorer(weight, score, scoreMode, iterator); + return new ConstantScoreScorer(score, scoreMode, iterator); } @Override diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java index d9e65c385c610..2370a3dee6d03 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java @@ -79,12 +79,10 @@ public Query rewrite(IndexSearcher searcher) throws IOException { */ protected static class CappedBulkScorer extends BulkScorer { final BulkScorer bulkScorer; - final Weight weight; final float maxScore; - public CappedBulkScorer(BulkScorer bulkScorer, Weight weight, float maxScore) { + public CappedBulkScorer(BulkScorer bulkScorer, float maxScore) { this.bulkScorer = bulkScorer; - this.weight = weight; this.maxScore = maxScore; } @@ -125,15 +123,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo final Weight innerWeight = searcher.createWeight(query, scoreMode, boost); if (scoreMode.needsScores()) { return new CappedScoreWeight(this, innerWeight, maxScore) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - final BulkScorer innerScorer = innerWeight.bulkScorer(context); - if (innerScorer == null) { - return null; - } - return new CappedBulkScorer(innerScorer, this, maxScore); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); @@ -152,7 +141,13 @@ public Scorer get(long leadCost) throws IOException { return innerScorer; } } - return new CappedScorer(innerWeight, innerScorer, maxScore); + return new CappedScorer(innerScorer, maxScore); + } + + @Override + public BulkScorer bulkScorer() throws IOException { + final BulkScorer innerScorer = innerScorerSupplier.bulkScorer(); + return new CappedBulkScorer(innerScorer, maxScore); } @Override @@ -166,15 +161,6 @@ public long cost() { public Matches matches(LeafReaderContext context, int doc) throws IOException { return innerWeight.matches(context, doc); } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } }; } else { return innerWeight; diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java index 6ad3b9ce4ef85..ccc90e8f671a6 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java @@ -11,6 +11,7 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -36,8 +37,22 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new CappedScorer(this, innerWeight.scorer(context), maxScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); + if (innerScorerSupplier == null) { + return null; + } + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new CappedScorer(innerScorerSupplier.get(leadCost), maxScore); + } + + @Override + public long cost() { + return innerScorerSupplier.cost(); + } + }; } @Override diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java index 57b2b62b77f6d..67813588ba3be 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java @@ -9,15 +9,14 @@ import org.apache.lucene.search.FilterScorer; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; import java.io.IOException; public class CappedScorer extends FilterScorer { private final float maxScore; - public CappedScorer(Weight weight, Scorer delegate, float maxScore) { - super(delegate, weight); + public CappedScorer(Scorer delegate, float maxScore) { + super(delegate); this.maxScore = maxScore; } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java index 608e5f1972373..191775f46cd72 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -69,44 +70,56 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ByteArrayStreamInput bytes = new ByteArrayStreamInput(); final BinaryDocValues values = DocValues.getBinary(context.reader(), field); - Scorer approxScorer = approxWeight.scorer(context); - if (approxScorer == null) { + ScorerSupplier approxScorerSupplier = approxWeight.scorerSupplier(context); + if (approxScorerSupplier == null) { // No matches to be had return null; } - DocIdSetIterator approxDisi = approxScorer.iterator(); - TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { + + return new ScorerSupplier() { @Override - public boolean matches() throws IOException { - if (values.advanceExact(approxDisi.docID()) == false) { - // Can happen when approxQuery resolves to some form of MatchAllDocs expression - return false; - } - BytesRef arrayOfValues = values.binaryValue(); - bytes.reset(arrayOfValues.bytes); - bytes.setPosition(arrayOfValues.offset); - - int size = bytes.readVInt(); - for (int i = 0; i < size; i++) { - int valLength = bytes.readVInt(); - if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { - return true; + public Scorer get(long leadCost) throws IOException { + Scorer approxScorer = approxScorerSupplier.get(leadCost); + DocIdSetIterator approxDisi = approxScorer.iterator(); + TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { + @Override + public boolean matches() throws IOException { + if (values.advanceExact(approxDisi.docID()) == false) { + // Can happen when approxQuery resolves to some form of MatchAllDocs expression + return false; + } + BytesRef arrayOfValues = values.binaryValue(); + bytes.reset(arrayOfValues.bytes); + bytes.setPosition(arrayOfValues.offset); + + int size = bytes.readVInt(); + for (int i = 0; i < size; i++) { + int valLength = bytes.readVInt(); + if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { + return true; + } + bytes.skipBytes(valLength); + } + return false; + } + + @Override + public float matchCost() { + // TODO: how can we compute this? + return 1000f; } - bytes.skipBytes(valLength); - } - return false; + }; + return new ConstantScoreScorer(score(), scoreMode, twoPhase); } @Override - public float matchCost() { - // TODO: how can we compute this? - return 1000f; + public long cost() { + return approxScorerSupplier.cost(); } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); } @Override From a5c2206cdf5a14aeb3a3a08dc32b51e363eb2361 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 12:49:45 +0200 Subject: [PATCH 093/417] Fix CompiledAutomaton ctor compilation The determinization work limit was removed from the contructor with https://github.com/apache/lucene/pull/485 and also its not not optional anymore to pass in whether the automaton is finite or not. Assuming it is not seems to be the right choice according to https://github.com/apache/lucene/pull/11813 --- .../org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java | 2 +- .../index/mapper/IpPrefixAutomatonUtilTests.java | 4 ++-- .../org/elasticsearch/xpack/versionfield/VersionEncoder.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java index db9e3e77eef67..c08db91e65c1c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java @@ -75,7 +75,7 @@ static CompiledAutomaton buildIpPrefixAutomaton(String ipPrefix) { result = Automata.makeAnyBinary(); } result = Operations.determinize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CompiledAutomaton(result, null, false, 0, true); + return new CompiledAutomaton(result, false, false, true); } private static Automaton getIpv6Automaton(String ipPrefix) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java index ead9897577332..311b340f54466 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java @@ -223,11 +223,11 @@ public void testAutomatonFromIPv6Group() throws UnknownHostException { } private static CompiledAutomaton compileAutomaton(Automaton automaton) { + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); CompiledAutomaton compiledAutomaton = new CompiledAutomaton( automaton, - null, false, - Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, + false, true ); return compiledAutomaton; diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java index 651e07246c59c..4f42103bc4541 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java @@ -214,7 +214,7 @@ static CompiledAutomaton prefixAutomaton(String versionPrefix, boolean caseInsen assert a.isDeterministic(); a = Operations.determinize(a, 0); - return new CompiledAutomaton(a, null, true, 0, true); + return new CompiledAutomaton(a, false, true, true); } static class EncodedVersion { From 4da393a836c6ac3a7e376364f48aabff682954d6 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 12:13:51 +0100 Subject: [PATCH 094/417] Fix MMapDirectory preload --- .../store/smb/SmbMmapFsDirectoryFactory.java | 1 - .../index/store/FsDirectoryFactory.java | 69 ++++--------------- .../index/store/FsDirectoryFactoryTests.java | 58 ++++++++++------ 3 files changed, 53 insertions(+), 75 deletions(-) diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java index ac95538553b92..cef5d897440fe 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java @@ -26,7 +26,6 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index return new SmbDirectoryWrapper( setPreload( new MMapDirectory(location, lockFactory), - lockFactory, new HashSet<>(indexSettings.getValue(IndexModule.INDEX_STORE_PRE_LOAD_SETTING)) ) ); diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java index 05c3554b47602..40e98736406d2 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java @@ -32,6 +32,7 @@ import java.nio.file.Path; import java.util.HashSet; import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { @@ -66,12 +67,12 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index // Use Lucene defaults final FSDirectory primaryDirectory = FSDirectory.open(location, lockFactory); if (primaryDirectory instanceof MMapDirectory mMapDirectory) { - return new HybridDirectory(lockFactory, setPreload(mMapDirectory, lockFactory, preLoadExtensions)); + return new HybridDirectory(lockFactory, setPreload(mMapDirectory, preLoadExtensions)); } else { return primaryDirectory; } case MMAPFS: - return setPreload(new MMapDirectory(location, lockFactory), lockFactory, preLoadExtensions); + return setPreload(new MMapDirectory(location, lockFactory), preLoadExtensions); case SIMPLEFS: case NIOFS: return new NIOFSDirectory(location, lockFactory); @@ -80,17 +81,23 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index } } - public static MMapDirectory setPreload(MMapDirectory mMapDirectory, LockFactory lockFactory, Set preLoadExtensions) - throws IOException { - assert mMapDirectory.getPreload() == false; + /** Sets the preload, if any, on the given directory based on the extensions. Returns the same directory instance. */ + // visibility and extensibility for testing + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + mMapDirectory.setPreload(getPreloadFunc(preLoadExtensions)); + return mMapDirectory; + } + + /** Gets a preload function based on the given preLoadExtensions. */ + static BiPredicate getPreloadFunc(Set preLoadExtensions) { if (preLoadExtensions.isEmpty() == false) { if (preLoadExtensions.contains("*")) { - mMapDirectory.setPreload(true); + return MMapDirectory.ALL_FILES; } else { - return new PreLoadMMapDirectory(mMapDirectory, lockFactory, preLoadExtensions); + return (name, context) -> preLoadExtensions.contains(FileSwitchDirectory.getExtension(name)); } } - return mMapDirectory; + return MMapDirectory.NO_FILES; } /** @@ -161,50 +168,4 @@ MMapDirectory getDelegate() { return delegate; } } - - // TODO it would be nice to share code between PreLoadMMapDirectory and HybridDirectory but due to the nesting aspect of - // directories here makes it tricky. It would be nice to allow MMAPDirectory to pre-load on a per IndexInput basis. - static final class PreLoadMMapDirectory extends MMapDirectory { - private final MMapDirectory delegate; - private final Set preloadExtensions; - - PreLoadMMapDirectory(MMapDirectory delegate, LockFactory lockFactory, Set preload) throws IOException { - super(delegate.getDirectory(), lockFactory); - super.setPreload(false); - this.delegate = delegate; - this.delegate.setPreload(true); - this.preloadExtensions = preload; - assert getPreload() == false; - } - - @Override - public void setPreload(boolean preload) { - throw new IllegalArgumentException("can't set preload on a preload-wrapper"); - } - - @Override - public IndexInput openInput(String name, IOContext context) throws IOException { - if (useDelegate(name)) { - // we need to do these checks on the outer directory since the inner doesn't know about pending deletes - ensureOpen(); - ensureCanRead(name); - return delegate.openInput(name, context); - } - return super.openInput(name, context); - } - - @Override - public synchronized void close() throws IOException { - IOUtils.close(super::close, delegate); - } - - boolean useDelegate(String name) { - final String extension = FileSwitchDirectory.getExtension(name); - return preloadExtensions.contains(extension); - } - - MMapDirectory getDelegate() { - return delegate; - } - } } diff --git a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java index 689a25cd235a2..bcc15930c6c67 100644 --- a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java @@ -32,10 +32,16 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import java.util.HashMap; import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactoryTests extends ESTestCase { + final PreLoadExposingFsDirectoryFactory fsDirectoryFactory = new PreLoadExposingFsDirectoryFactory(); + public void testPreload() throws IOException { doTestPreload(); doTestPreload("nvd", "dvd", "tim"); @@ -59,10 +65,11 @@ public void testPreload() throws IOException { assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.tmp", newIOContext(random()))); assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.fdt__0.tmp", newIOContext(random()))); MMapDirectory delegate = hybridDirectory.getDelegate(); - assertThat(delegate, Matchers.instanceOf(FsDirectoryFactory.PreLoadMMapDirectory.class)); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) delegate; - assertTrue(preLoadMMapDirectory.useDelegate("foo.dvd")); - assertTrue(preLoadMMapDirectory.useDelegate("foo.tmp")); + assertThat(delegate, Matchers.instanceOf(MMapDirectory.class)); + var func = fsDirectoryFactory.preLoadFuncMap.get(delegate); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); + fsDirectoryFactory.preLoadFuncMap.clear(); } } @@ -71,7 +78,21 @@ private Directory newDirectory(Settings settings) throws IOException { Path tempDir = createTempDir().resolve(idxSettings.getUUID()).resolve("0"); Files.createDirectories(tempDir); ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(idxSettings.getIndex(), 0)); - return new FsDirectoryFactory().newDirectory(idxSettings, path); + return fsDirectoryFactory.newDirectory(idxSettings, path); + } + + static class PreLoadExposingFsDirectoryFactory extends FsDirectoryFactory { + + // expose for testing + final Map> preLoadFuncMap = new HashMap<>(); + + @Override + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + var preLoadFunc = FsDirectoryFactory.getPreloadFunc(preLoadExtensions); + mMapDirectory.setPreload(preLoadFunc); + preLoadFuncMap.put(mMapDirectory, preLoadFunc); + return mMapDirectory; + } } private void doTestPreload(String... preload) throws IOException { @@ -84,26 +105,23 @@ private void doTestPreload(String... preload) throws IOException { assertSame(dir, directory); // prevent warnings assertFalse(directory instanceof SleepingLockWrapper); var mmapDirectory = FilterDirectory.unwrap(directory); + assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); if (preload.length == 0) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertFalse(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); } else if (Arrays.asList(preload).contains("*")) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertTrue(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); } else { - assertTrue(directory.toString(), mmapDirectory instanceof FsDirectoryFactory.PreLoadMMapDirectory); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) mmapDirectory; + var func = fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); for (String ext : preload) { - assertTrue("ext: " + ext, preLoadMMapDirectory.useDelegate("foo." + ext)); - assertTrue("ext: " + ext, preLoadMMapDirectory.getDelegate().getPreload()); + assertTrue("ext: " + ext, func.test("foo." + ext, newIOContext(random()))); } - assertFalse(preLoadMMapDirectory.useDelegate("XXX")); - assertFalse(preLoadMMapDirectory.getPreload()); - preLoadMMapDirectory.close(); - expectThrows( - AlreadyClosedException.class, - () -> preLoadMMapDirectory.getDelegate().openInput("foo.tmp", IOContext.DEFAULT) - ); + assertFalse(func.test("XXX", newIOContext(random()))); + mmapDirectory.close(); + expectThrows(AlreadyClosedException.class, () -> mmapDirectory.openInput("foo.tmp", IOContext.DEFAULT)); } } expectThrows( From 5e31775ae9394113298367a3487e97895a5787a2 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 28 Aug 2024 13:37:51 +0200 Subject: [PATCH 095/417] Fix 2 more errors related to openChecksumInput. --- .../codec/bloomfilter/ES85BloomFilterPostingsFormat.java | 8 +------- .../java/org/elasticsearch/indices/SystemIndices.java | 8 ++------ .../elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java | 2 +- 3 files changed, 4 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java index d26fb52a82bcd..81129835518da 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java @@ -36,7 +36,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -142,12 +141,7 @@ static final class FieldsReader extends FieldsProducer { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { CodecUtil.checkIndexHeader( metaIn, BLOOM_CODEC_NAME, diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 5afbb79028067..ad1dd3d25f078 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -262,9 +262,7 @@ private static Map getProductToSystemIndicesMap(M .collect( Collectors.toUnmodifiableMap( Entry::getKey, - entry -> new CharacterRunAutomaton( - Operations.determinize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ) + entry -> new CharacterRunAutomaton(Operations.determinize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)) ) ); } @@ -435,9 +433,7 @@ private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map SystemIndexDescriptor.buildAutomaton(descriptor.getIndexPattern(), descriptor.getAliasName())) .reduce(Operations::union); - return new CharacterRunAutomaton( - Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ); + return new CharacterRunAutomaton(Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton featureToIndexAutomaton(Feature feature) { diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java index 069eb126bcb3c..18adebb145f98 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java @@ -196,7 +196,7 @@ static final OldSegmentInfos readCommit(Directory directory, String segmentFileN long generation = generationFromSegmentsFileName(segmentFileName); // System.out.println(Thread.currentThread() + ": SegmentInfos.readCommit " + segmentFileName); - try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName)) { try { return readCommit(directory, input, generation, minSupportedMajorVersion); } catch (EOFException | NoSuchFileException | FileNotFoundException e) { From 39d4d31e2e1ab6ada9e828d8f232823047086b92 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 12:35:39 +0100 Subject: [PATCH 096/417] spotless --- .../index/mapper/IpPrefixAutomatonUtilTests.java | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java index 311b340f54466..63c9d62d82800 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java @@ -224,12 +224,7 @@ public void testAutomatonFromIPv6Group() throws UnknownHostException { private static CompiledAutomaton compileAutomaton(Automaton automaton) { automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - CompiledAutomaton compiledAutomaton = new CompiledAutomaton( - automaton, - false, - false, - true - ); + CompiledAutomaton compiledAutomaton = new CompiledAutomaton(automaton, false, false, true); return compiledAutomaton; } } From 1bd1ede2a58973887c50ebaba1a30f91d5033d5b Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 12:35:26 +0100 Subject: [PATCH 097/417] Add UpdateForV10 anno, and remove v7 index versions --- .../org/elasticsearch/core/UpdateForV10.java | 23 +++++++++++++++ .../elasticsearch/index/IndexVersions.java | 28 +++---------------- 2 files changed, 27 insertions(+), 24 deletions(-) create mode 100644 libs/core/src/main/java/org/elasticsearch/core/UpdateForV10.java diff --git a/libs/core/src/main/java/org/elasticsearch/core/UpdateForV10.java b/libs/core/src/main/java/org/elasticsearch/core/UpdateForV10.java new file mode 100644 index 0000000000000..0fe816bd3721d --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/core/UpdateForV10.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.core; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation to identify a block of code (a whole class, a method, or a field) that needs to be reviewed (for cleanup, remove or change) + * before releasing 10.0 + */ +@Retention(RetentionPolicy.SOURCE) +@Target({ ElementType.LOCAL_VARIABLE, ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE }) +public @interface UpdateForV10 { +} diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 906dd4ddb51df..7f982fa7a48fb 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -11,7 +11,7 @@ import org.apache.lucene.util.Version; import org.elasticsearch.ReleaseVersions; import org.elasticsearch.core.Assertions; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import java.lang.reflect.Field; import java.util.Collection; @@ -47,29 +47,9 @@ private static IndexVersion def(int id, Version luceneVersion) { return new IndexVersion(id, luceneVersion); } - @UpdateForV9 // remove the index versions with which v9 will not need to interact + @UpdateForV10 // remove the index versions with which v10 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_0_0 = def(7_00_00_99, Version.LUCENE_8_0_0); - - public static final IndexVersion V_7_1_0 = def(7_01_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_0 = def(7_02_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_1 = def(7_02_01_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_3_0 = def(7_03_00_99, Version.LUCENE_8_1_0); - public static final IndexVersion V_7_4_0 = def(7_04_00_99, Version.LUCENE_8_2_0); - public static final IndexVersion V_7_5_0 = def(7_05_00_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_5_2 = def(7_05_02_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_6_0 = def(7_06_00_99, Version.LUCENE_8_4_0); - public static final IndexVersion V_7_7_0 = def(7_07_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_8_0 = def(7_08_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_9_0 = def(7_09_00_99, Version.LUCENE_8_6_0); - public static final IndexVersion V_7_10_0 = def(7_10_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_11_0 = def(7_11_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_12_0 = def(7_12_00_99, Version.LUCENE_8_8_0); - public static final IndexVersion V_7_13_0 = def(7_13_00_99, Version.LUCENE_8_8_2); - public static final IndexVersion V_7_14_0 = def(7_14_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_15_0 = def(7_15_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_16_0 = def(7_16_00_99, Version.LUCENE_8_10_1); - public static final IndexVersion V_7_17_0 = def(7_17_00_99, Version.LUCENE_8_11_1); + public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); @@ -167,7 +147,7 @@ private static IndexVersion def(int id, Version luceneVersion) { * In branches 8.7-8.11 see server/src/main/java/org/elasticsearch/index/IndexVersion.java for the equivalent definitions. */ - public static final IndexVersion MINIMUM_COMPATIBLE = V_7_0_0; + public static final IndexVersion MINIMUM_COMPATIBLE = V_8_0_0; static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; From b7bdba4ecd68b4987e8252ed540094bf908364bc Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 12:57:15 +0100 Subject: [PATCH 098/417] Fix ExitableDirectoryReader --- .../internal/ExitableDirectoryReader.java | 45 ++++++++++++++++++- 1 file changed, 44 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java index ee479df8627e6..77e3008029532 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; -import org.apache.lucene.index.FilterVectorValues; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; @@ -31,6 +30,7 @@ import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import java.io.IOException; +import java.util.Objects; /** * Wraps an {@link IndexReader} with a {@link QueryCancellation} @@ -621,4 +621,47 @@ private void checkAndThrowWithSampling() { } } } + + /** Delegates all methods to a wrapped {@link FloatVectorValues}. */ + private abstract static class FilterVectorValues extends FloatVectorValues { + + /** Wrapped values */ + protected final FloatVectorValues in; + + /** Sole constructor */ + protected FilterVectorValues(FloatVectorValues in) { + Objects.requireNonNull(in); + this.in = in; + } + + @Override + public int docID() { + return in.docID(); + } + + @Override + public int nextDoc() throws IOException { + return in.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } + + @Override + public int dimension() { + return in.dimension(); + } + + @Override + public int size() { + return in.size(); + } + + @Override + public float[] vectorValue() throws IOException { + return in.vectorValue(); + } + } } From 919e906f826b2cb5a446e4473c61caad45ba791a Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 12:59:08 +0100 Subject: [PATCH 099/417] Fix FilterFloatVectorValues name --- .../search/internal/ExitableDirectoryReader.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java index 77e3008029532..aed6a612a052d 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java @@ -529,7 +529,7 @@ private void checkAndThrowWithSampling() { } } - private static class ExitableFloatVectorValues extends FilterVectorValues { + private static class ExitableFloatVectorValues extends FilterFloatVectorValues { private int calls; private final QueryCancellation queryCancellation; @@ -623,13 +623,13 @@ private void checkAndThrowWithSampling() { } /** Delegates all methods to a wrapped {@link FloatVectorValues}. */ - private abstract static class FilterVectorValues extends FloatVectorValues { + private abstract static class FilterFloatVectorValues extends FloatVectorValues { /** Wrapped values */ protected final FloatVectorValues in; /** Sole constructor */ - protected FilterVectorValues(FloatVectorValues in) { + protected FilterFloatVectorValues(FloatVectorValues in) { Objects.requireNonNull(in); this.in = in; } From 0d672106506f07386efbc9430026e1f394248907 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 14:39:43 +0200 Subject: [PATCH 100/417] Implement missing prepareSeekExact methods for TermsEnum In https://github.com/apache/lucene/pull/13359 a new "prepareSeekExact" method was added that can improve seeking on TermsEnum implementations. Two of our own subclasses of TermsEnum don't seem to support seeking for text, so we can safely throw an UOE there. The third (FilterableTermsEnum) changes to simple returning a Supplier for the actual "seek" method for now. --- .../common/lucene/index/FilterableTermsEnum.java | 8 +++++++- .../index/mapper/flattened/FlattenedFieldMapper.java | 6 ++++++ .../xpack/core/termsenum/action/SimpleTermCountEnum.java | 6 ++++++ 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index e9ecfbad42811..3bc1949cdfcbf 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -26,6 +26,7 @@ import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.core.Nullable; import java.io.IOException; @@ -61,7 +62,7 @@ static class Holder { protected BytesRef current; protected final int docsEnumFlag; - public FilterableTermsEnum(IndexReader reader, String field, int docsEnumFlag, @Nullable Query filter) throws IOException { + public FilterableTermsEnum(IndexReader reader, String field, int docsEnumFlag, @Nullable Query f) throws IOException { if ((docsEnumFlag != PostingsEnum.FREQS) && (docsEnumFlag != PostingsEnum.NONE)) { throw new IllegalArgumentException("invalid docsEnumFlag of " + docsEnumFlag); } @@ -176,6 +177,11 @@ public boolean seekExact(BytesRef text) throws IOException { } } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) { + return () -> this.seekExact(bytesRef); + } + @Override public int docFreq() throws IOException { return currentDocFreq; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index 0c1340867849a..986193880a123 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; @@ -459,6 +460,11 @@ public AttributeSource attributes() { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java index b4952373dfdd3..92568c4f31c18 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -69,6 +70,11 @@ public AttributeSource attributes() { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); From e63efe9235be679eb9561fe37a3d08c032a1e281 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 15:06:56 +0200 Subject: [PATCH 101/417] Fix AutomatonQuery constructor compile errors The AutomatonQuery contructor doesn't support the determinization work limit parameter any more. Since we almost always use the default, we can simply omit calling this constructor. --- .../index/mapper/extras/SearchAsYouTypeFieldMapper.java | 4 ++-- .../common/lucene/search/CaseInsensitivePrefixQuery.java | 8 ++++---- .../lucene/search/CaseInsensitiveWildcardQuery.java | 4 ++-- .../org/elasticsearch/index/mapper/StringFieldType.java | 6 +++--- .../org/elasticsearch/index/mapper/TextFieldMapper.java | 4 ++-- .../xpack/versionfield/VersionFieldWildcardQuery.java | 4 ++-- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java index d521f9b2d2a31..9041eae4e472c 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java @@ -468,8 +468,8 @@ public Query prefixQuery( } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java index e83edaf1d9e22..e2d52935ead95 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java @@ -19,12 +19,12 @@ public CaseInsensitivePrefixQuery(Term term) { super(term, caseInsensitivePrefix(term.text())); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary) { + super(term, caseInsensitivePrefix(term.text()), isBinary); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { + super(term, caseInsensitivePrefix(term.text()), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java index 9480ce19e6c87..dd40db7d01b7e 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java @@ -25,8 +25,8 @@ public CaseInsensitiveWildcardQuery(Term term) { super(term, toCaseInsensitiveWildcardAutomaton(term)); } - public CaseInsensitiveWildcardQuery(Term term, int determinizeWorkLimit, boolean isBinary, RewriteMethod rewriteMethod) { - super(term, toCaseInsensitiveWildcardAutomaton(term), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitiveWildcardQuery(Term term, boolean isBinary, RewriteMethod rewriteMethod) { + super(term, toCaseInsensitiveWildcardAutomaton(term), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 778c733c745ac..5ad64a67b5b17 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -101,8 +101,8 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool Term prefix = new Term(name(), indexedValueForSearch(value)); if (caseInsensitive) { return method == null - ? new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new CaseInsensitivePrefixQuery(prefix,false) + : new CaseInsensitivePrefixQuery(prefix, false, method); } return method == null ? new PrefixQuery(prefix) : new PrefixQuery(prefix, method); } @@ -171,7 +171,7 @@ protected Query wildcardQuery( if (caseInsensitive) { return method == null ? new CaseInsensitiveWildcardQuery(term) - : new CaseInsensitiveWildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + : new CaseInsensitiveWildcardQuery(term, false, method); } return method == null ? new WildcardQuery(term) : new WildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, method); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 9c98647f37bd9..8da769c3543d0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -602,8 +602,8 @@ public Query prefixQuery( } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField.name(), value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java index 387a49a29dc23..0370a02c709bf 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java @@ -40,11 +40,11 @@ class VersionFieldWildcardQuery extends AutomatonQuery { private static final byte WILDCARD_CHAR = '?'; VersionFieldWildcardQuery(Term term, boolean caseInsensitive) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true); + super(term, toAutomaton(term, caseInsensitive), true); } VersionFieldWildcardQuery(Term term, boolean caseInsensitive, RewriteMethod rewriteMethod) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true, rewriteMethod); + super(term, toAutomaton(term, caseInsensitive), true, rewriteMethod); } private static Automaton toAutomaton(Term wildcardquery, boolean caseInsensitive) { From 97c6ba7185e53ef4903ed3b1e1ad6b1ef01937d1 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 28 Aug 2024 16:44:49 +0200 Subject: [PATCH 102/417] Fix StoredFields-related compile errors. --- .../extras/SourceConfirmedTextQueryTests.java | 2 +- .../extras/SourceIntervalsSourceTests.java | 2 +- .../FieldUsageTrackingDirectoryReader.java | 14 ------ .../common/lucene/LuceneTests.java | 7 ++- .../lucene/index/FreqTermsEnumTests.java | 4 +- .../PersistedClusterStateServiceTests.java | 4 +- .../index/engine/InternalEngineTests.java | 4 +- .../RecoverySourcePruneMergePolicyTests.java | 10 +++-- .../mapper/BooleanScriptFieldTypeTests.java | 12 ++++- .../mapper/DoubleScriptFieldTypeTests.java | 17 +++++-- .../index/mapper/IpScriptFieldTypeTests.java | 8 ++-- .../mapper/KeywordScriptFieldTypeTests.java | 17 +++++-- .../mapper/LongScriptFieldTypeTests.java | 17 +++++-- .../index/shard/RefreshListenersTests.java | 5 +-- .../index/shard/ShardSplittingQueryTests.java | 4 +- .../indices/IndicesRequestCacheTests.java | 2 +- .../lucene/queries/BlendedTermQueryTests.java | 8 ++-- .../script/ScriptTermStatsTests.java | 4 +- .../AbstractScriptFieldTypeTestCase.java | 2 +- .../accesscontrol/FieldSubsetReader.java | 16 ------- .../sourceonly/SourceOnlySnapshotTests.java | 11 +++-- .../accesscontrol/FieldSubsetReaderTests.java | 44 +------------------ .../cache/full/PersistentCache.java | 7 ++- .../InMemoryNoOpCommitDirectoryTests.java | 4 +- 24 files changed, 110 insertions(+), 115 deletions(-) diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java index 81e1dd7099860..4c2b486c87586 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java @@ -60,7 +60,7 @@ public class SourceConfirmedTextQueryTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = context -> docID -> { sourceFetchCount.incrementAndGet(); - return Collections.singletonList(context.reader().document(docID).get("body")); + return Collections.singletonList(context.reader().storedFields().document(docID).get("body")); }; public void testTerm() throws Exception { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java index ba14c891c471c..636e61726d880 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java @@ -40,7 +40,7 @@ public class SourceIntervalsSourceTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = - context -> docID -> Collections.singletonList(context.reader().document(docID).get("body")); + context -> docID -> Collections.singletonList(context.reader().storedFields().document(docID).get("body")); public void testIntervals() throws IOException { final FieldType ft = new FieldType(TextField.TYPE_STORED); diff --git a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java index 07fa169642dbf..27c87550c34b5 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java @@ -102,15 +102,6 @@ static final class FieldUsageTrackingLeafReader extends SequentialStoredFieldsLe this.notifier = notifier; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f != null) { - f = new FieldUsageTrackingTermVectorFields(f); - } - return f; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -135,11 +126,6 @@ public PointValues getPointValues(String field) throws IOException { return pointValues; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { StoredFields storedFields = super.storedFields(); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 3892f5cba9b49..c674ef505c77c 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexOrDocValuesQuery; @@ -489,8 +490,9 @@ public void testWrapAllDocsLive() throws Exception { IndexSearcher searcher = newSearcher(reader); Set actualDocs = new HashSet<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } @@ -535,8 +537,9 @@ public void testWrapLiveDocsNotExposeAbortedDocuments() throws Exception { IndexSearcher searcher = newSearcher(reader); List actualDocs = new ArrayList<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index 8fd4a2c228ad8..548110680839b 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -19,6 +19,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -126,8 +127,9 @@ public void setUp() throws Exception { // now go over each doc, build the relevant references and filter reader = DirectoryReader.open(iw); List filterTerms = new ArrayList<>(); + StoredFields storedFields = reader.storedFields(); for (int docId = 0; docId < reader.maxDoc(); docId++) { - Document doc = reader.document(docId); + Document doc = storedFields.document(docId); addFreqs(doc, referenceAll); if (deletedIds.contains(doc.getField("id").stringValue()) == false) { addFreqs(doc, referenceNotDeleted); diff --git a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java index ee5b4972c13ad..17ba9cdc41f79 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java @@ -19,6 +19,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -1775,9 +1776,10 @@ private static void forEachDocument(DirectoryReader reader, Set types, C final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); document.add(new StringField(TYPE_FIELD_NAME, typeName, Field.Store.NO)); consumer.accept(document); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 77f5fa301c78d..938731f932632 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -2432,7 +2432,7 @@ class OpAndVersion { Engine.Get engineGet = new Engine.Get(true, false, doc.id()); try (Engine.GetResult get = engine.get(engineGet, mappingLookup, documentParser, randomSearcherWrapper())) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString())); String removed = op % 3 == 0 && values.size() > 0 ? values.remove(0) : null; String added = "v_" + idGenerator.incrementAndGet(); @@ -2478,7 +2478,7 @@ class OpAndVersion { Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id()), mappingLookup, documentParser, randomSearcherWrapper()) ) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString())); assertThat(currentValues, equalTo(new HashSet<>(values))); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java index 1480af7b9b6ff..b5160dbbb3646 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.MatchAllDocsQuery; @@ -68,8 +69,9 @@ public void testPruneAll() throws IOException { writer.forceMerge(1); writer.commit(); try (DirectoryReader reader = DirectoryReader.open(writer)) { + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); if (pruneIdField) { assertEquals(1, document.getFields().size()); assertEquals("source", document.getFields().get(0).name()); @@ -150,8 +152,9 @@ public void testPruneSome() throws IOException { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("even")); @@ -191,8 +194,9 @@ public void testPruneNone() throws IOException { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("extra_source")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index 89d5e300112b4..877b4effe4350 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -120,8 +121,15 @@ public void testSort() throws IOException { BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [false]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [true]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [false]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [true]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index 6e12778f87cf9..18c4bfca5c8be 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -119,9 +120,19 @@ public void testSort() throws IOException { DoubleScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1.1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2.1]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4.2]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4.2]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index 4593b149e13db..5590fbd4f96f6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -124,16 +125,17 @@ public void testSort() throws IOException { BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); + StoredFields storedFields = reader.storedFields(); assertThat( - reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.1\"]}") ); assertThat( - reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.2\"]}") ); assertThat( - reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.4\"]}") ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index b5270b358ec40..c18ea83955392 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -114,9 +115,19 @@ public void testSort() throws IOException { BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"a\"]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"b\"]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"d\"]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"a\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"b\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"d\"]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index c9ac1516b6f8e..b0b953279629c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; @@ -131,9 +132,19 @@ public void testSort() throws IOException { LongScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 55bfc2480d4d6..5eb740ba37180 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -436,9 +436,8 @@ public void testLotsOfThreads() throws Exception { ) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); - org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.document( - getResult.docIdAndVersion().docId - ); + org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.storedFields() + .document(getResult.docIdAndVersion().docId); assertThat(document.getValues("test"), arrayContaining(testFieldValue)); } } catch (Exception t) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java index 9d03cc5b8aa8c..62e76e015afef 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; @@ -171,6 +172,7 @@ void assertSplit(Directory dir, IndexMetadata metadata, int targetShardId, boole int doc; int numActual = 0; int lastDoc = 0; + StoredFields storedFields = reader.storedFields(); while ((doc = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { lastDoc = doc; while (shard_id.nextDoc() < doc) { @@ -180,7 +182,7 @@ void assertSplit(Directory dir, IndexMetadata metadata, int targetShardId, boole } assertEquals(shard_id.docID(), doc); long shardID = shard_id.nextValue(); - BytesRef id = reader.document(doc).getBinaryValue("_id"); + BytesRef id = storedFields.document(doc).getBinaryValue("_id"); String actualId = Uid.decodeId(id.bytes, id.offset, id.length); assertNotEquals(ctx.reader() + " docID: " + doc + " actualID: " + actualId, shardID, targetShardId); } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index e8f6061efb8d9..d3fc58cec5fb9 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -421,7 +421,7 @@ public BytesReference get() { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(id))), 1); assertEquals(1, topDocs.totalHits.value); - Document document = reader.document(topDocs.scoreDocs[0].doc); + Document document = reader.storedFields().document(topDocs.scoreDocs[0].doc); out.writeString(document.get("value")); loadedFromCache = false; return out.bytes(); diff --git a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java index 02a79ccba5bb2..4e0916d308f40 100644 --- a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java @@ -87,7 +87,7 @@ public void testDismaxQuery() throws IOException { query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { BooleanQuery.Builder query = new BooleanQuery.Builder(); @@ -109,7 +109,7 @@ public void testDismaxQuery() throws IOException { query.add(gen, BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 4); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(1), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { @@ -249,7 +249,7 @@ public void testMinTTF() throws IOException { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } reader.close(); w.close(); @@ -331,7 +331,7 @@ public void testMissingFields() throws IOException { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); reader.close(); w.close(); diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java index d748ad0f1569d..cef3b0bc902d1 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -335,10 +336,11 @@ private void assertAllDocs(Set terms, Function fun withIndexSearcher(searcher -> { for (LeafReaderContext leafReaderContext : searcher.getLeafContexts()) { IndexReader reader = leafReaderContext.reader(); + StoredFields storedFields = reader.storedFields(); DocIdSetIterator docIdSetIterator = DocIdSetIterator.all(reader.maxDoc()); ScriptTermStats termStats = new ScriptTermStats(searcher, leafReaderContext, docIdSetIterator::docID, terms); while (docIdSetIterator.nextDoc() <= reader.maxDoc()) { - String docId = reader.document(docIdSetIterator.docID()).get("id"); + String docId = storedFields.document(docIdSetIterator.docID()).get("id"); if (expectedValues.containsKey(docId)) { assertThat(function.apply(termStats), expectedValues.get(docId)); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index d812e158a1675..207e5291cbd16 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -501,7 +501,7 @@ private void assertQueryOnlyOnText(String queryName, ThrowingRunnable buildQuery } protected final String readSource(IndexReader reader, int docId) throws IOException { - return reader.document(docId).getBinaryValue("_source").utf8ToString(); + return reader.storedFields().document(docId).getBinaryValue("_source").utf8ToString(); } protected final void checkExpensiveQuery(BiConsumer queryBuilder) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java index 09a49c53ee1a5..908f58c5f9147 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java @@ -155,17 +155,6 @@ public FieldInfos getFieldInfos() { return fieldInfos; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f == null) { - return null; - } - f = new FieldFilterFields(f); - // we need to check for emptyness, so we can return null: - return f.iterator().hasNext() ? f : null; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -264,11 +253,6 @@ private static int step(CharacterRunAutomaton automaton, String key, int state) return state; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - super.document(docID, new FieldSubsetStoredFieldVisitor(visitor)); - } - @Override protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) { return new FieldSubsetStoredFieldsReader(reader); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java index 65d057408f8bd..46b4384af0914 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.IndexSearcher; @@ -108,8 +109,10 @@ DirectoryReader wrapReader(DirectoryReader reader) throws IOException { logger.warn(snapReader + " " + reader); assertEquals(snapReader.maxDoc(), reader.maxDoc()); assertEquals(snapReader.numDocs(), reader.numDocs()); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < snapReader.maxDoc(); i++) { - assertEquals(snapReader.document(i).get("_source"), reader.document(i).get("_source")); + assertEquals(snapStoredFields.document(i).get("_source"), storedFields.document(i).get("_source")); } for (LeafReaderContext ctx : snapReader.leaves()) { if (ctx.reader() instanceof SegmentReader) { @@ -188,8 +191,10 @@ public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 3); assertEquals(snapReader.numDocs(), 2); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < 3; i++) { - assertEquals(snapReader.document(i).get("src"), reader.document(i).get("src")); + assertEquals(snapStoredFields.document(i).get("src"), storedFields.document(i).get("src")); } IndexSearcher searcher = newSearcher(snapReader); TopDocs id = searcher.search(new TermQuery(new Term("id", "1")), 10); @@ -321,7 +326,7 @@ public boolean keepFullyDeletedSegment(IOSupplier readerIOSupplier) try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 1); assertEquals(snapReader.numDocs(), 1); - assertEquals("3", snapReader.document(0).getField("rank").stringValue()); + assertEquals("3", snapReader.storedFields().document(0).getField("rank").stringValue()); } try (IndexReader writerReader = DirectoryReader.open(writer)) { assertEquals(writerReader.maxDoc(), 2); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java index 9c269259792bc..692946f11fd2d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java @@ -274,11 +274,6 @@ public void testStoredFieldsString() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("testA", d2.get("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -306,11 +301,6 @@ public void testStoredFieldsBinary() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(new BytesRef("testA"), d2.getBinaryValue("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -338,11 +328,6 @@ public void testStoredFieldsInt() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -370,11 +355,6 @@ public void testStoredFieldsLong() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1L, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -402,11 +382,6 @@ public void testStoredFieldsFloat() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1F, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -434,11 +409,6 @@ public void testStoredFieldsDouble() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1D, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -468,7 +438,7 @@ public void testVectors() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - Fields vectors = ir.getTermVectors(0); + Fields vectors = ir.termVectors().get(0); Set seenFields = new HashSet<>(); for (String field : vectors) { seenFields.add(field); @@ -701,11 +671,6 @@ public void testSourceFilteringIntegration() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("{\"fieldA\":\"testA\"}", d2.getBinaryValue(SourceFieldMapper.NAME).utf8ToString()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -1200,7 +1165,7 @@ public void testFilterAwayAllVectors() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldB"))); // sees no fields - assertNull(ir.getTermVectors(0)); + assertNull(ir.termVectors().get(0)); TestUtil.checkReader(ir); IOUtils.close(ir, iw, dir); @@ -1228,14 +1193,9 @@ public void testEmpty() throws Exception { assertNull(segmentReader.terms("foo")); // see no vectors - assertNull(segmentReader.getTermVectors(0)); assertNull(segmentReader.termVectors().get(0)); // see no stored fields - { - Document document = segmentReader.document(0); - assertEquals(0, document.getFields().size()); - } { Document document = segmentReader.storedFields().document(0); assertEquals(0, document.getFields().size()); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java index da08c6b38819b..a7fb5571995b3 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -173,9 +174,10 @@ long getCacheSize(ShardId shardId, SnapshotId snapshotId, Predicate predic final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final String cacheFileId = getValue(document, CACHE_ID_FIELD); if (predicate.test(snapshotCacheDir.resolve(cacheFileId))) { long size = buildCacheFileRanges(document).stream().mapToLong(ByteRange::length).sum(); @@ -423,9 +425,10 @@ static Map loadDocuments(Path directoryPath) throws IOExceptio for (LeafReaderContext leafReaderContext : indexReader.leaves()) { final LeafReader leafReader = leafReaderContext.reader(); final Bits liveDocs = leafReader.getLiveDocs(); + final StoredFields storedFields = leafReader.storedFields(); for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { - final Document document = leafReader.document(i); + final Document document = storedFields.document(i); logger.trace("loading document [{}]", document); documents.put(getValue(document, CACHE_ID_FIELD), document); } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java index c97d6cb4cab08..eab6f1a629f36 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java @@ -179,7 +179,7 @@ public void testSupportsNoOpCommits() throws IOException { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } try (IndexWriter indexWriter = new IndexWriter(inMemoryNoOpCommitDirectory, new IndexWriterConfig())) { @@ -226,7 +226,7 @@ public void testSupportsDeletes() throws IOException { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } assertEquals(1, DirectoryReader.listCommits(inMemoryNoOpCommitDirectory).size()); From 041e4db9fa9c92dc1ed8a237d1351d278d6fadf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 17:30:14 +0200 Subject: [PATCH 103/417] Fix missing RegExp determinization Since https://github.com/apache/lucene/pull/513 Lucenes RegEx class doesn't minimize or determinize any more and lets the caller decide whether to perform and of these operations. This change uses Operations.determinize instead at the call site. --- .../search/runtime/StringScriptFieldRegexpQuery.java | 8 +++++++- .../mapper/ConstantKeywordFieldMapper.java | 6 +++++- .../xpack/wildcard/mapper/WildcardFieldMapper.java | 2 +- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java index 02c9914f2b850..444d5b8fc0c88 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -33,7 +34,12 @@ public StringScriptFieldRegexpQuery( script, leafFactory, fieldName, - new ByteRunAutomaton(new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates)) + new ByteRunAutomaton( + Operations.determinize( + new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(), + maxDeterminizedStates + ) + ) ); this.pattern = pattern; this.syntaxFlags = syntaxFlags; diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index f2222e0970ae0..605a0196bd848 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -19,6 +19,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.LevenshteinAutomata; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.logging.DeprecationCategory; @@ -290,7 +291,10 @@ public Query regexpQuery( return new MatchNoDocsQuery(); } - final Automaton automaton = new RegExp(regexp, syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates); + final Automaton automaton = Operations.determinize( + new RegExp(regexp, syntaxFlags, matchFlags).toAutomaton(), + maxDeterminizedStates + ); final CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); if (runAutomaton.run(this.value)) { return new MatchAllDocsQuery(); diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index a61ecec57a1c5..4645918c4a853 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -388,7 +388,7 @@ public Query regexpQuery( Query approxNgramQuery = rewriteBoolToNgramQuery(approxBooleanQuery); RegExp regex = new RegExp(value, syntaxFlags, matchFlags); - Automaton automaton = regex.toAutomaton(maxDeterminizedStates); + Automaton automaton = Operations.determinize(regex.toAutomaton(), maxDeterminizedStates); // We can accelerate execution with the ngram query return new BinaryDvConfirmedAutomatonQuery(approxNgramQuery, name(), value, automaton); From c9b6ffd40228ae27b828099626934482c805f2d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 17:46:21 +0200 Subject: [PATCH 104/417] Fix Missing WildcardQuery#toAutomaton(Term) method This method got a second parameter which is the determinization work limit in https://github.com/apache/lucene/pull/485 --- .../search/runtime/StringScriptFieldWildcardQuery.java | 3 ++- .../esql/core/expression/predicate/regex/LikePattern.java | 3 +-- .../core/expression/predicate/regex/WildcardPattern.java | 3 +-- .../xpack/ql/expression/predicate/regex/LikePattern.java | 3 +-- .../ql/expression/predicate/regex/WildcardPattern.java | 3 +-- .../xpack/wildcard/mapper/WildcardFieldMapper.java | 2 +- .../xpack/wildcard/mapper/WildcardFieldMapperTests.java | 8 ++++++-- 7 files changed, 13 insertions(+), 12 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java index 6242ab805954a..4f87e5956cb20 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java @@ -12,6 +12,7 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -43,7 +44,7 @@ private static Automaton buildAutomaton(Term term, boolean caseInsensitive) { if (caseInsensitive) { return AutomatonQueries.toCaseInsensitiveWildcardAutomaton(term); } - return WildcardQuery.toAutomaton(term); + return WildcardQuery.toAutomaton(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java index b579848a51f30..be1ad0391c8ca 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java @@ -50,8 +50,7 @@ public char escape() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java index c2c0ad60f45d1..3e9cbf92727c2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java @@ -38,8 +38,7 @@ public String pattern() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java index 6f8915dc685d0..e4f5810ac89d3 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java @@ -50,8 +50,7 @@ public char escape() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java index 325a021045b81..6703f1aeacbb5 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java @@ -38,8 +38,7 @@ public String pattern() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 4645918c4a853..13f49204fa398 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -348,7 +348,7 @@ public Query wildcardQuery(String wildcardPattern, RewriteMethod method, boolean } Automaton automaton = caseInsensitive ? AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term(name(), wildcardPattern)) - : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern)); + : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (clauseCount > 0) { // We can accelerate execution with the ngram query BooleanQuery approxQuery = rewritten.build(); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 578fc90bd3e5b..ef4f80137e15e 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -41,6 +41,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -678,7 +679,7 @@ public void testWildcardAcceleration() throws IOException, ParseException { public void testQueryCachingEquality() throws IOException, ParseException { String pattern = "A*b*B?a"; // Case sensitivity matters when it comes to caching - Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); Automaton caseInSensitiveAutomaton = AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term("field", pattern)); BinaryDvConfirmedAutomatonQuery csQ = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), @@ -696,7 +697,10 @@ public void testQueryCachingEquality() throws IOException, ParseException { assertNotEquals(csQ.hashCode(), ciQ.hashCode()); // Same query should be equal - Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton( + new Term("field", pattern), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); BinaryDvConfirmedAutomatonQuery csQ2 = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), "field", From 3c6ba73d4f46fd7e68caf5437ad447a53bf12e8a Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 28 Aug 2024 17:57:14 +0200 Subject: [PATCH 105/417] Fix compile errors in old-lucene-versions package --- .../xpack/lucene/bwc/codecs/BWCCodec.java | 7 ++---- .../LegacySortedSetDocValuesWrapper.java | 6 ++--- .../lucene50/Lucene50FieldInfosFormat.java | 1 + .../lucene54/Lucene54DocValuesProducer.java | 6 +++++ .../lucene60/MetadataOnlyBKDReader.java | 22 +++++++++---------- 5 files changed, 23 insertions(+), 19 deletions(-) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 25b4b685ac50f..13f6140587e4a 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.lucene.bwc.codecs; -import org.apache.lucene.backward_codecs.lucene70.Lucene70Codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FieldsConsumer; @@ -27,7 +26,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec; import java.io.IOException; import java.util.ArrayList; @@ -101,6 +99,7 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { false, fieldInfo.getIndexOptions(), fieldInfo.getDocValuesType(), + fieldInfo.hasDocValuesSkipIndex(), fieldInfo.getDocValuesGen(), fieldInfo.attributes(), fieldInfo.getPointDimensionCount(), @@ -119,9 +118,7 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { } public static SegmentInfo wrap(SegmentInfo segmentInfo) { - // special handling for Lucene70Codec (which is currently bundled with Lucene) - // Use BWCLucene70Codec instead as that one extends BWCCodec (similar to all other older codecs) - final Codec codec = segmentInfo.getCodec() instanceof Lucene70Codec ? new BWCLucene70Codec() : segmentInfo.getCodec(); + final Codec codec = segmentInfo.getCodec(); final SegmentInfo segmentInfo1 = new SegmentInfo( segmentInfo.dir, // Use Version.LATEST instead of original version, otherwise SegmentCommitInfo will bark when processing (N-1 limitation) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java index 21b6818bd5613..80236f3847e12 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java @@ -53,7 +53,7 @@ public int nextDoc() { while (docID < maxDoc) { values.setDocument(docID); ord = values.nextOrd(); - if (ord != NO_MORE_ORDS) { + if (ord != LegacySortedSetDocValues.NO_MORE_ORDS) { return docID; } docID++; @@ -81,7 +81,7 @@ public boolean advanceExact(int target) throws IOException { docID = target; values.setDocument(docID); ord = values.nextOrd(); - return ord != NO_MORE_ORDS; + return ord != LegacySortedSetDocValues.NO_MORE_ORDS; } @Override @@ -92,7 +92,7 @@ public long cost() { @Override public long nextOrd() { long result = ord; - if (result != NO_MORE_ORDS) { + if (result != LegacySortedSetDocValues.NO_MORE_ORDS) { ord = values.nextOrd(); } return result; diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java index 83fcb17449100..0c71ea2df6932 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java @@ -103,6 +103,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm storePayloads, indexOptions, docValuesType, + false, dvGen, attributes, 0, diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java index 09147e821d9fb..607d9903abc87 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -1316,6 +1317,11 @@ public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { } } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return null; + } + private SortedSetDocValues getSortedSetWithAddresses(FieldInfo field) throws IOException { final long valueCount = binaries.get(field.name).count; // we keep the byte[]s and list of ords on disk, these could be large diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java index f3ce3ea0755e1..43203caf571f1 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java @@ -63,14 +63,14 @@ public MetadataOnlyBKDReader(IndexInput metaIn) throws IOException { numLeaves = metaIn.readVInt(); assert numLeaves > 0; - minPackedValue = new byte[config.packedIndexBytesLength]; - maxPackedValue = new byte[config.packedIndexBytesLength]; - - metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength); - metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength); - final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim); - for (int dim = 0; dim < config.numIndexDims; dim++) { - if (comparator.compare(minPackedValue, dim * config.bytesPerDim, maxPackedValue, dim * config.bytesPerDim) > 0) { + minPackedValue = new byte[config.packedIndexBytesLength()]; + maxPackedValue = new byte[config.packedIndexBytesLength()]; + + metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength()); + metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength()); + final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim()); + for (int dim = 0; dim < config.numIndexDims(); dim++) { + if (comparator.compare(minPackedValue, dim * config.bytesPerDim(), maxPackedValue, dim * config.bytesPerDim()) > 0) { throw new CorruptIndexException( "minPackedValue " + new BytesRef(minPackedValue) @@ -104,17 +104,17 @@ public byte[] getMaxPackedValue() { @Override public int getNumDimensions() { - return config.numDims; + return config.numDims(); } @Override public int getNumIndexDimensions() { - return config.numIndexDims; + return config.numIndexDims(); } @Override public int getBytesPerDimension() { - return config.bytesPerDim; + return config.bytesPerDim(); } @Override From c6dfc0b8abff6faa063a1b07198a88a63d74ba4b Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 17:42:22 +0100 Subject: [PATCH 106/417] Restore IndexVersion constants temporarily, to enable compilation --- .../elasticsearch/index/IndexVersions.java | 38 +++++++++++++++++-- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7f982fa7a48fb..df12dc546052d 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -11,9 +11,10 @@ import org.apache.lucene.util.Version; import org.elasticsearch.ReleaseVersions; import org.elasticsearch.core.Assertions; -import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.core.UpdateForV9; import java.lang.reflect.Field; +import java.text.ParseException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -47,9 +48,38 @@ private static IndexVersion def(int id, Version luceneVersion) { return new IndexVersion(id, luceneVersion); } - @UpdateForV10 // remove the index versions with which v10 will not need to interact - public static final IndexVersion ZERO = def(0, Version.LATEST); + // TODO: this is just a hack to allow to keep the V7 IndexVersion constants, during compilation. Remove + private static Version parseUnchecked(String version) { + try { + return Version.parse(version); + } catch (ParseException e) { + throw new RuntimeException(e); + } + } + @UpdateForV9 // remove the index versions with which v9 will not need to interact + public static final IndexVersion ZERO = def(0, Version.LATEST); + public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0")); + + public static final IndexVersion V_7_1_0 = def(7_01_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_0 = def(7_02_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_1 = def(7_02_01_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_3_0 = def(7_03_00_99, parseUnchecked("8.1.0")); + public static final IndexVersion V_7_4_0 = def(7_04_00_99, parseUnchecked("8.2.0")); + public static final IndexVersion V_7_5_0 = def(7_05_00_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_5_2 = def(7_05_02_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_6_0 = def(7_06_00_99, parseUnchecked("8.4.0")); + public static final IndexVersion V_7_7_0 = def(7_07_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_8_0 = def(7_08_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_9_0 = def(7_09_00_99, parseUnchecked("8.6.0")); + public static final IndexVersion V_7_10_0 = def(7_10_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_11_0 = def(7_11_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_12_0 = def(7_12_00_99, parseUnchecked("8.8.0")); + public static final IndexVersion V_7_13_0 = def(7_13_00_99, parseUnchecked("8.8.2")); + public static final IndexVersion V_7_14_0 = def(7_14_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_15_0 = def(7_15_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_16_0 = def(7_16_00_99, parseUnchecked("8.10.1")); + public static final IndexVersion V_7_17_0 = def(7_17_00_99, parseUnchecked("8.11.1")); public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); @@ -147,7 +177,7 @@ private static IndexVersion def(int id, Version luceneVersion) { * In branches 8.7-8.11 see server/src/main/java/org/elasticsearch/index/IndexVersion.java for the equivalent definitions. */ - public static final IndexVersion MINIMUM_COMPATIBLE = V_8_0_0; + public static final IndexVersion MINIMUM_COMPATIBLE = V_7_0_0; static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; From 710995bafa3f15ca90606e073103ad021e3679a3 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 28 Aug 2024 18:58:50 +0200 Subject: [PATCH 107/417] Fix signature of TermInSetQuery --- .../aggregations/ChildrenToParentAggregatorTests.java | 2 +- .../aggregations/ParentToChildrenAggregatorTests.java | 2 +- .../join/query/HasChildQueryBuilderTests.java | 3 ++- .../org/elasticsearch/percolator/CandidateQueryTests.java | 6 ++---- .../org/elasticsearch/percolator/QueryAnalyzerTests.java | 2 +- .../org/elasticsearch/index/mapper/IdFieldMapper.java | 5 +++-- .../org/elasticsearch/index/mapper/StringFieldType.java | 8 ++------ .../org/elasticsearch/index/mapper/IdFieldTypeTests.java | 4 +++- .../elasticsearch/index/mapper/KeywordFieldTypeTests.java | 2 +- .../mapper/flattened/KeyedFlattenedFieldTypeTests.java | 2 +- .../analytics/stringstats/StringStatsAggregatorTests.java | 2 +- .../xpack/esql/enrich/EnrichQuerySourceOperatorTests.java | 2 +- 12 files changed, 19 insertions(+), 21 deletions(-) diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index 5a5ccb640f03d..7181f9b49f5f6 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -106,7 +106,7 @@ public void testParentChild() throws IOException { // verify for each children for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), indexReader, aggregation -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId("child0_" + parent))), indexReader, aggregation -> { assertEquals( "Expected one result for min-aggregation for parent: " + parent + ", but had aggregation-results: " + aggregation, 1, diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java index 0bb64dcccbf11..3b556e2afb333 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -103,7 +103,7 @@ public void testParentChild() throws IOException { }); for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexReader, child -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(parent))), indexReader, child -> { assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount()); assertEquals( expectedParentChildRelations.get(parent).v2(), diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index ca6e5a8078f6f..80523e2d0223b 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -53,6 +53,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; @@ -343,7 +344,7 @@ static void assertLateParsingQuery(Query query, String type, String id) throws I assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST)); assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).getQuery(); - assertEquals(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(id)), termsQuery); + assertEquals(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(id))), termsQuery); // check the type filter assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.FILTER)); assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).getQuery()); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 67d99132c0d46..30f2ddd9db7f1 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -246,15 +246,13 @@ public void testDuel() throws Exception { queryFunctions.add( () -> new TermInSetQuery( field1, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); queryFunctions.add( () -> new TermInSetQuery( field2, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); // many iterations with boolean queries, which are the most complex queries to deal with when nested diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index b3b47d909b046..2f38a10dd4844 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -81,7 +81,7 @@ public void testExtractQueryMetadata_termQuery() { } public void testExtractQueryMetadata_termsQuery() { - TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2")); + TermInSetQuery termsQuery = new TermInSetQuery("_field", List.of(new BytesRef("_term1"), new BytesRef("_term2"))); Result result = analyze(termsQuery); assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(1)); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java index c5b1f575d941f..ae8da44486114 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java @@ -21,6 +21,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Map; /** @@ -101,13 +102,13 @@ public boolean isSearchable() { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(v -> { + List bytesRefs = values.stream().map(v -> { Object idObject = v; if (idObject instanceof BytesRef) { idObject = ((BytesRef) idObject).utf8ToString(); } return Uid.encodeId(idObject.toString()); - }).toArray(BytesRef[]::new); + }).toList(); return new TermInSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 5ad64a67b5b17..39cfeca175c6a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -100,9 +100,7 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool failIfNotIndexed(); Term prefix = new Term(name(), indexedValueForSearch(value)); if (caseInsensitive) { - return method == null - ? new CaseInsensitivePrefixQuery(prefix,false) - : new CaseInsensitivePrefixQuery(prefix, false, method); + return method == null ? new CaseInsensitivePrefixQuery(prefix, false) : new CaseInsensitivePrefixQuery(prefix, false, method); } return method == null ? new PrefixQuery(prefix) : new PrefixQuery(prefix, method); } @@ -169,9 +167,7 @@ protected Query wildcardQuery( term = new Term(name(), indexedValueForSearch(value)); } if (caseInsensitive) { - return method == null - ? new CaseInsensitiveWildcardQuery(term) - : new CaseInsensitiveWildcardQuery(term, false, method); + return method == null ? new CaseInsensitiveWildcardQuery(term) : new CaseInsensitiveWildcardQuery(term, false, method); } return method == null ? new WildcardQuery(term) : new WildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, method); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java index 6e6691de5d0b3..547eea0eb2a02 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java @@ -18,6 +18,8 @@ import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; +import java.util.List; + public class IdFieldTypeTests extends ESTestCase { public void testRangeQuery() { @@ -48,7 +50,7 @@ public void testTermsQuery() { Mockito.when(context.indexVersionCreated()).thenReturn(IndexVersion.current()); MappedFieldType ft = new ProvidedIdFieldMapper.IdFieldType(() -> false); Query query = ft.termQuery("id", context); - assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query); + assertEquals(new TermInSetQuery("_id", List.of(Uid.encodeId("id"))), query); } public void testIsAggregatable() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index 99b0582331a65..3cd974609557f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -109,7 +109,7 @@ protected TokenStream normalize(String fieldName, TokenStream in) { public void testTermsQuery() { MappedFieldType ft = new KeywordFieldType("field"); - BytesRef[] terms = new BytesRef[] { new BytesRef("foo"), new BytesRef("bar") }; + List terms = List.of(new BytesRef("foo"), new BytesRef("bar")); assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), MOCK_CONTEXT)); MappedFieldType ft2 = new KeywordFieldType("field", false, true, Map.of()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java index 6c71d7c44dc33..7fb5f082c29b6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java @@ -80,7 +80,7 @@ public void testTermQuery() { public void testTermsQuery() { KeyedFlattenedFieldType ft = createFieldType(); - Query expected = new TermInSetQuery(ft.name(), new BytesRef("key\0value1"), new BytesRef("key\0value2")); + Query expected = new TermInSetQuery(ft.name(), List.of(new BytesRef("key\0value1"), new BytesRef("key\0value2"))); List terms = new ArrayList<>(); terms.add("value1"); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java index 20da254657c1a..04f0563e433a2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java @@ -170,7 +170,7 @@ public void testNoMatchingField() throws IOException { } public void testQueryFiltering() throws IOException { - testAggregation(new TermInSetQuery("text", new BytesRef("test0"), new BytesRef("test1")), iw -> { + testAggregation(new TermInSetQuery("text", List.of(new BytesRef("test0"), new BytesRef("test1"))), iw -> { for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java index 107c2af11c4f1..04da5d406fbb9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java @@ -108,7 +108,7 @@ public void testQueries() throws Exception { QueryList queryList = QueryList.termQueryList(uidField, mock(SearchExecutionContext.class), inputTerms, KEYWORD); assertThat(queryList.getPositionCount(), equalTo(6)); assertThat(queryList.getQuery(0), equalTo(new TermQuery(new Term("uid", new BytesRef("b2"))))); - assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", new BytesRef("c1"), new BytesRef("a2")))); + assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", List.of(new BytesRef("c1"), new BytesRef("a2"))))); assertThat(queryList.getQuery(2), equalTo(new TermQuery(new Term("uid", new BytesRef("z2"))))); assertNull(queryList.getQuery(3)); assertThat(queryList.getQuery(4), equalTo(new TermQuery(new Term("uid", new BytesRef("a3"))))); From f0a1818b551e4588aa70552d4dac22d08a49532b Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 20:21:53 +0100 Subject: [PATCH 108/417] KnnVectorsReader is no longer Accountable --- .../index/codec/vectors/ES813FlatVectorFormat.java | 5 ----- .../index/codec/vectors/ES813Int8FlatVectorFormat.java | 6 ------ 2 files changed, 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java index 0655cdc8376e8..0e3ad8901e25f 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java @@ -144,10 +144,5 @@ public void search(String field, byte[] target, KnnCollector knnCollector, Bits public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java index 557b803875b31..86ff47ef066ca 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java @@ -152,11 +152,5 @@ public void search(String field, byte[] target, KnnCollector knnCollector, Bits public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } - } } From 19f1cb5d45fdfde8cacdbb6ab414374734e94777 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 20:30:48 +0100 Subject: [PATCH 109/417] KnnVectorsFormat requires getMaxDimensions --- .../index/codec/vectors/ES813FlatVectorFormat.java | 6 ++++++ .../index/codec/vectors/ES813Int8FlatVectorFormat.java | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java index 0e3ad8901e25f..27130129ebe81 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java @@ -28,6 +28,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.hnsw.OrdinalTranslatedKnnCollector; import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import java.io.IOException; @@ -54,6 +55,11 @@ public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException return new ES813FlatVectorReader(format.fieldsReader(state)); } + @Override + public int getMaxDimensions(String fieldName) { + return DenseVectorFieldMapper.MAX_DIMS_COUNT; + } + static class ES813FlatVectorWriter extends KnnVectorsWriter { private final FlatVectorsWriter writer; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java index 86ff47ef066ca..2be887ba7b71f 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java @@ -26,6 +26,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.hnsw.OrdinalTranslatedKnnCollector; import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import java.io.IOException; @@ -57,6 +58,11 @@ public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException return new ES813FlatVectorReader(format.fieldsReader(state)); } + @Override + public int getMaxDimensions(String fieldName) { + return DenseVectorFieldMapper.MAX_DIMS_COUNT; + } + @Override public String toString() { return NAME + "(name=" + NAME + ", innerFormat=" + format + ")"; From 4652f7e56da77026d54fd6d7fbf222bf6b3d4667 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 20:34:47 +0100 Subject: [PATCH 110/417] ES815BitFlatVectorFormat should implement getMaxDimensions --- .../index/codec/vectors/ES815BitFlatVectorFormat.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java index 86bc58c5862ee..4cff706981e47 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java @@ -14,6 +14,7 @@ import org.apache.lucene.codecs.hnsw.FlatVectorsFormat; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import java.io.IOException; @@ -40,6 +41,11 @@ public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException return new ES813FlatVectorFormat.ES813FlatVectorReader(format.fieldsReader(state)); } + @Override + public int getMaxDimensions(String s) { + return DenseVectorFieldMapper.MAX_DIMS_COUNT; + } + @Override public String toString() { return NAME; From 4159c569992d7c8bfabc2eecd4d03ceb97e90723 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 28 Aug 2024 20:56:55 +0100 Subject: [PATCH 111/417] Fix TranslogDirectoryReader --- .../index/engine/TranslogDirectoryReader.java | 20 +++++++++---------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index e054fc52b562e..3e94a57387e79 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -12,10 +12,10 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.IndexCommit; @@ -151,6 +151,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + false, -1, Collections.emptyMap(), 0, @@ -170,6 +171,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + false, -1, Collections.emptyMap(), 0, @@ -189,6 +191,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.DOCS, DocValuesType.NONE, + false, -1, Collections.emptyMap(), 0, @@ -345,6 +348,11 @@ public NumericDocValues getNormValues(String field) throws IOException { return getDelegate().getNormValues(field); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return getDelegate().getDocValuesSkipper(field); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { return getDelegate().getFloatVectorValues(field); @@ -388,11 +396,6 @@ public LeafMetaData getMetaData() { return getDelegate().getMetaData(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - return getDelegate().getTermVectors(docID); - } - @Override public TermVectors termVectors() throws IOException { return getDelegate().termVectors(); @@ -428,11 +431,6 @@ public int maxDoc() { return 1; } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - private void readStoredFieldsDirectly(StoredFieldVisitor visitor) throws IOException { if (visitor.needsField(FAKE_SOURCE_FIELD) == StoredFieldVisitor.Status.YES) { BytesReference sourceBytes = operation.source(); From 2efd1a09a08b3aeaba87b8839db5ca97dbe57ad5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 29 Aug 2024 06:11:22 +0000 Subject: [PATCH 112/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-f23711a3e36 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 48221df22a639..90659d580a9fe 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-3be89ea2d59 +lucene = 9.12.0-snapshot-f23711a3e36 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d2b6674f9aba6..5d1f4ca009816 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2836,129 +2836,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From f535d48d08c910762fd0797bd4f2bdda6cac325b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 29 Aug 2024 06:13:01 +0000 Subject: [PATCH 113/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-e45b5ebdf88 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index cc95244609eab..878d2d1042eb7 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-0f5359a2ac6 +lucene = 10.0.0-snapshot-e45b5ebdf88 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a15a1a9ccc35b..1fc813512bfc1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2836,129 +2836,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 93628bcbc4136e3f4af1d79a163e417d20a06d51 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 29 Aug 2024 08:39:08 +0200 Subject: [PATCH 114/417] Replace `BooleanQuery#TooManyClauses`, `BooleanQuery#getMaxClauseCount()`, `BooleanQuery#setMaxClauseCount()` by `IndexSearcher#TooManyClauses`, `IndexSearcher#getMaxClauseCount()`, `IndexSearcher#setMaxClauseCount()` --- .../elasticsearch/percolator/PercolatorFieldMapper.java | 2 +- .../elasticsearch/percolator/CandidateQueryTests.java | 6 +++--- .../percolator/PercolatorFieldMapperTests.java | 7 ++++--- .../search/SpanBooleanQueryRewriteWithMaxClause.java | 6 +++--- .../common/lucene/search/XMoreLikeThis.java | 5 +++-- .../org/elasticsearch/index/query/IntervalBuilder.java | 6 +++--- .../elasticsearch/index/query/TermsSetQueryBuilder.java | 5 ++--- .../org/elasticsearch/index/search/MatchQueryParser.java | 9 +++++---- .../index/query/MatchQueryBuilderTests.java | 9 +++++---- .../index/query/SpanMultiTermQueryBuilderTests.java | 6 +++--- .../org/elasticsearch/xpack/graph/test/GraphTests.java | 4 ++-- .../xpack/graph/action/TransportGraphExploreAction.java | 6 +++--- .../pivot/CompositeBucketsChangeCollector.java | 4 ++-- 13 files changed, 39 insertions(+), 36 deletions(-) diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index ad936a5491b69..c20c6dce71c87 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -296,7 +296,7 @@ Tuple createCandidateQuery(IndexReader indexReader) throw List extractedTerms = t.v1(); Map> encodedPointValuesByField = t.v2(); // `1 + ` is needed to take into account the EXTRACTION_FAILED should clause - boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= BooleanQuery + boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= IndexSearcher .getMaxClauseCount(); List subQueries = new ArrayList<>(); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 30f2ddd9db7f1..108c2b3f5b635 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -945,10 +945,10 @@ public void testPercolateSmallAndLargeDocument() throws Exception { } // This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery: - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try (Directory directory = new ByteBuffersDirectory()) { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) { Document document = new Document(); for (int i = 0; i < maxClauseCount; i++) { @@ -983,7 +983,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception { assertEquals(2, topDocs.scoreDocs[1].doc); } } finally { - BooleanQuery.setMaxClauseCount(origMaxClauseCount); + IndexSearcher.setMaxClauseCount(origMaxClauseCount); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 4adc7f9b5ba27..ef805cbaa27af 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -416,10 +417,10 @@ public void testExtractTermsAndRanges() throws Exception { } public void testCreateCandidateQuery() throws Exception { - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); addQueryFieldMappings(); MemoryIndex memoryIndex = new MemoryIndex(false); @@ -448,7 +449,7 @@ public void testCreateCandidateQuery() throws Exception { assertThat(t.v1().clauses().get(1).getQuery().toString(), containsString(fieldName + ".range_field: collectTerms(IndexReader reader, MultiTermQuery qu + query.toString() + " ] " + "exceeds maxClauseCount [ Boolean maxClauseCount is set to " - + BooleanQuery.getMaxClauseCount() + + IndexSearcher.getMaxClauseCount() + "]" ); } else { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index f8d0c81466dcc..54cd4c9946f62 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.similarities.TFIDFSimilarity; @@ -207,7 +208,7 @@ public final class XMoreLikeThis { /** * Return a Query with no more than this many terms. * - * @see BooleanQuery#getMaxClauseCount + * @see IndexSearcher#getMaxClauseCount * @see #setMaxQueryTerms */ public static final int DEFAULT_MAX_QUERY_TERMS = 25; @@ -468,7 +469,7 @@ private void addToQuery(PriorityQueue q, BooleanQuery.Builder query) try { query.add(tq, BooleanClause.Occur.SHOULD); - } catch (BooleanQuery.TooManyClauses ignore) { + } catch (IndexSearcher.TooManyClauses ignore) { break; } } diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java index 6f75702032c75..729cdffd557a6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java @@ -19,7 +19,7 @@ import org.apache.lucene.queries.intervals.IntervalMatchesIterator; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; @@ -188,7 +188,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -203,7 +203,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept TokenStream ts = it.next(); IntervalsSource phrase = combineSources(analyzeTerms(ts), 0, true); if (paths.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } paths.add(phrase); } diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java index da0447a0f864d..54f7cb4580a62 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.sandbox.search.CoveringQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValues; @@ -272,8 +271,8 @@ protected Query doToQuery(SearchExecutionContext context) { return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query."); } // Fail before we attempt to create the term queries: - if (values.size() > BooleanQuery.getMaxClauseCount()) { - throw new BooleanQuery.TooManyClauses(); + if (values.size() > IndexSearcher.getMaxClauseCount()) { + throw new IndexSearcher.TooManyClauses(); } List queries = createTermQueries(context); diff --git a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java index dd08eb7cc31b2..9a83e3ce80d66 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostAttribute; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.QueryBuilder; @@ -689,7 +690,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -707,7 +708,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in SpanQuery q = createSpanQuery(ts, field, usePrefix); if (q != null) { if (queries.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queries.add(q); } @@ -721,14 +722,14 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in Term[] terms = graph.getTerms(field, start); assert terms.length > 0; if (terms.length >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queryPos = newSpanQuery(terms, usePrefix); } if (queryPos != null) { if (clauses.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } clauses.add(queryPos); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 278d4ae505bdc..e2d0aec72d7bd 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -17,6 +17,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; @@ -526,9 +527,9 @@ public void testAliasWithSynonyms() throws Exception { public void testMaxBooleanClause() { MatchQueryParser query = new MatchQueryParser(createSearchExecutionContext()); query.setAnalyzer(new MockGraphAnalyzer(createGiantGraph(40))); - expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); query.setAnalyzer(new MockGraphAnalyzer(createGiantGraphMultiTerms())); - expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); } private static class MockGraphAnalyzer extends Analyzer { @@ -566,7 +567,7 @@ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraph(int numPos } /** - * Creates a graph token stream with {@link BooleanQuery#getMaxClauseCount()} + * Creates a graph token stream with {@link IndexSearcher#getMaxClauseCount()} * expansions at the last position. **/ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms() { @@ -577,7 +578,7 @@ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms( tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 2)); tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); - for (int i = 0; i < BooleanQuery.getMaxClauseCount(); i++) { + for (int i = 0; i < IndexSearcher.getMaxClauseCount(); i++) { tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 1)); } return tokens.toArray(new CannedBinaryTokenStream.BinaryToken[0]); diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index 1257fa43f3216..6f3fd47a0073c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -219,7 +219,7 @@ public void testToQueryInnerTermQuery() throws IOException { assertThat(prefixQuery.getPrefix().text(), equalTo("f")); assertThat(wrapper.getRewriteMethod(), instanceOf(SpanBooleanQueryRewriteWithMaxClause.class)); SpanBooleanQueryRewriteWithMaxClause rewrite = (SpanBooleanQueryRewriteWithMaxClause) wrapper.getRewriteMethod(); - assertThat(rewrite.getMaxExpansions(), equalTo(BooleanQuery.getMaxClauseCount())); + assertThat(rewrite.getMaxExpansions(), equalTo(IndexSearcher.getMaxClauseCount())); assertTrue(rewrite.isHardLimit()); } } @@ -264,8 +264,8 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { iw.addDocument(singleton(new TextField("body", "foo bar" + Integer.toString(i), Field.Store.NO))); } try (IndexReader reader = iw.getReader()) { - int origBoolMaxClauseCount = BooleanQuery.getMaxClauseCount(); - BooleanQuery.setMaxClauseCount(1); + int origBoolMaxClauseCount = IndexSearcher.getMaxClauseCount(); + IndexSearcher.setMaxClauseCount(1); try { QueryBuilder queryBuilder = new SpanMultiTermQueryBuilder(QueryBuilders.prefixQuery("body", "bar")); IndexSearcher searcher = newSearcher(reader); diff --git a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java index 3623d3671e83f..6d90b0e67ee83 100644 --- a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.graph.test; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; @@ -165,7 +165,7 @@ public void testLargeNumberTermsStartCrawl() { VertexRequest peopleNames = hop1.addVertexRequest("people").minDocCount(1); peopleNames.addInclude("john", 1); - for (int i = 0; i < BooleanQuery.getMaxClauseCount() + 1; i++) { + for (int i = 0; i < IndexSearcher.getMaxClauseCount() + 1; i++) { peopleNames.addInclude("unknown" + i, 1); } diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 36e8eaf94c8be..b60ce13e0228c 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.ExceptionsHelper; @@ -564,7 +564,7 @@ private static void addBigOrClause(Map> lastHopFindings, Boo for (Entry> entry : lastHopFindings.entrySet()) { numClauses += entry.getValue().size(); } - if (numClauses < BooleanQuery.getMaxClauseCount()) { + if (numClauses < IndexSearcher.getMaxClauseCount()) { // We can afford to build a Boolean OR query with individual // boosts for interesting terms for (Entry> entry : lastHopFindings.entrySet()) { @@ -755,7 +755,7 @@ private double getInitialTotalSignalStrength(Hop rootHop, Sampler sample) { private static void addNormalizedBoosts(BoolQueryBuilder includesContainer, VertexRequest vr) { TermBoost[] termBoosts = vr.includeValues(); - if ((includesContainer.should().size() + termBoosts.length) > BooleanQuery.getMaxClauseCount()) { + if ((includesContainer.should().size() + termBoosts.length) > IndexSearcher.getMaxClauseCount()) { // Too many terms - we need a cheaper form of query to execute this List termValues = new ArrayList<>(); for (TermBoost tb : termBoosts) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java index 684e3a085405d..68b31d4f466b6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.transform.transforms.pivot; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.geo.GeoPoint; @@ -560,7 +560,7 @@ static class GeoTileFieldCollector implements FieldCollector { @Override public int getMaxPageSize() { // this collector is limited by indices.query.bool.max_clause_count, default 1024 - return BooleanQuery.getMaxClauseCount(); + return IndexSearcher.getMaxClauseCount(); } @Override From 3210526abf9675121712618cacb7e699e17ed401 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 29 Aug 2024 08:56:10 +0200 Subject: [PATCH 115/417] Replace BooleanCaluse#getquery by BooleanCaluse#query and BooleanCaluse#getOccur by BooleanCaluse#occur --- .../join/query/HasChildQueryBuilderTests.java | 10 +++++----- .../org/elasticsearch/percolator/QueryAnalyzer.java | 2 +- .../percolator/PercolatorFieldMapperTests.java | 10 +++++----- .../elasticsearch/common/lucene/search/Queries.java | 2 +- .../org/elasticsearch/index/search/NestedHelper.java | 12 ++++++------ .../search/morelikethis/XMoreLikeThisTests.java | 4 ++-- .../index/query/BoolQueryBuilderTests.java | 10 +++++----- .../index/query/CombinedFieldsQueryParsingTests.java | 4 ++-- .../index/query/ExistsQueryBuilderTests.java | 4 ++-- .../query/MatchBoolPrefixQueryBuilderTests.java | 4 ++-- .../index/query/MatchQueryBuilderTests.java | 2 +- .../index/query/QueryStringQueryBuilderTests.java | 8 ++++---- .../index/query/SimpleQueryStringBuilderTests.java | 2 +- .../ml/search/WeightedTokensQueryBuilderTests.java | 6 +++--- .../inference/queries/SemanticQueryBuilderTests.java | 8 ++++---- .../ml/queries/SparseVectorQueryBuilderTests.java | 4 ++-- .../ml/queries/TextExpansionQueryBuilderTests.java | 4 ++-- .../xpack/wildcard/mapper/WildcardFieldMapper.java | 6 +++--- .../wildcard/mapper/WildcardFieldMapperTests.java | 6 +++--- 19 files changed, 54 insertions(+), 54 deletions(-) diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index 80523e2d0223b..e55e624f8df8e 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -341,13 +341,13 @@ static void assertLateParsingQuery(Query query, String type, String id) throws I BooleanQuery booleanQuery = (BooleanQuery) lateParsingQuery.getInnerQuery(); assertThat(booleanQuery.clauses().size(), equalTo(2)); // check the inner ids query, we have to call rewrite to get to check the type it's executed against - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); - TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).getQuery(); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(booleanQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); + TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).query(); assertEquals(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(id))), termsQuery); // check the type filter - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.FILTER)); - assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).getQuery()); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(BooleanClause.Occur.FILTER)); + assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).query()); } @Override diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 84fb889c6dd5a..4332268a23c9e 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -161,7 +161,7 @@ public QueryVisitor getSubVisitor(Occur occur, Query parent) { int minimumShouldMatchValue = 0; if (parent instanceof BooleanQuery bq) { if (bq.getMinimumNumberShouldMatch() == 0 - && bq.clauses().stream().anyMatch(c -> c.getOccur() == Occur.MUST || c.getOccur() == Occur.FILTER)) { + && bq.clauses().stream().anyMatch(c -> c.occur() == Occur.MUST || c.occur() == Occur.FILTER)) { return QueryVisitor.EMPTY_VISITOR; } minimumShouldMatchValue = bq.getMinimumNumberShouldMatch(); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index ef805cbaa27af..729bbc8aa7850 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -435,8 +435,8 @@ public void testCreateCandidateQuery() throws Exception { Tuple t = fieldType.createCandidateQuery(indexReader); assertTrue(t.v2()); assertEquals(2, t.v1().clauses().size()); - assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); - assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class)); + assertThat(t.v1().clauses().get(0).query(), instanceOf(CoveringQuery.class)); + assertThat(t.v1().clauses().get(1).query(), instanceOf(TermQuery.class)); // Now push it over the edge, so that it falls back using TermInSetQuery memoryIndex.addField("field2", "value", new WhitespaceAnalyzer()); @@ -444,10 +444,10 @@ public void testCreateCandidateQuery() throws Exception { t = fieldType.createCandidateQuery(indexReader); assertFalse(t.v2()); assertEquals(3, t.v1().clauses().size()); - TermInSetQuery terms = (TermInSetQuery) t.v1().clauses().get(0).getQuery(); + TermInSetQuery terms = (TermInSetQuery) t.v1().clauses().get(0).query(); assertEquals(maxClauseCount - 1, terms.getTermData().size()); - assertThat(t.v1().clauses().get(1).getQuery().toString(), containsString(fieldName + ".range_field: c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(this::mightMatchNestedDocs); } } else if (query instanceof ESToParentBlockJoinQuery) { @@ -136,13 +136,13 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } } else { diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java index 95feefa623572..f60018cf24e43 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java @@ -116,7 +116,7 @@ public boolean incrementToken() throws IOException { final double boost10 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|10 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) @@ -124,7 +124,7 @@ public boolean incrementToken() throws IOException { final double boost1 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|1 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index a4cc96a2063dc..154f29174bb95 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -206,15 +206,15 @@ public void testMinShouldMatchFilterWithoutShouldClauses() throws Exception { assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(booleanQuery.clauses().size(), equalTo(1)); BooleanClause booleanClause = booleanQuery.clauses().get(0); - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.FILTER)); - assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class)); - BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.getQuery(); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.FILTER)); + assertThat(booleanClause.query(), instanceOf(BooleanQuery.class)); + BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.query(); // we didn't set minimum should match initially, there are no should clauses so it should be 0 assertThat(innerBooleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(innerBooleanQuery.clauses().size(), equalTo(1)); BooleanClause innerBooleanClause = innerBooleanQuery.clauses().get(0); - assertThat(innerBooleanClause.getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(innerBooleanClause.getQuery(), instanceOf(MatchAllDocsQuery.class)); + assertThat(innerBooleanClause.occur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(innerBooleanClause.query(), instanceOf(MatchAllDocsQuery.class)); } public void testMinShouldMatchBiggerThanNumberOfShouldClauses() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java index 42bf39f8e58ab..da1e1c47c2027 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java @@ -163,8 +163,8 @@ public void testOperator() throws Exception { assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(minimumShouldMatch)); assertThat(booleanQuery.clauses().size(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(occur)); - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(occur)); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(occur)); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(occur)); } public void testQueryBoost() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index 6ddbbf25c9871..bf9b48e2216e8 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -67,7 +67,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, Collection childFields = context.getMatchingFieldNames(field + ".*"); assertThat(booleanQuery.clauses().size(), equalTo(childFields.size())); for (BooleanClause booleanClause : booleanQuery) { - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.SHOULD)); } } else if (context.getFieldType(field).hasDocValues() || context.getFieldType(field).getTextSearchInfo().hasNorms()) { assertThat(constantScoreQuery.getQuery(), instanceOf(FieldExistsQuery.class)); @@ -86,7 +86,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, assertThat(booleanQuery.clauses().size(), equalTo(fields.size())); for (int i = 0; i < fields.size(); i++) { BooleanClause booleanClause = booleanQuery.clauses().get(i); - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.SHOULD)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java index 0544498f72296..70393efa7af53 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -100,7 +100,7 @@ protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Que // all queries except the last should be TermQuery or SynonymQuery final Set allQueriesExceptLast = IntStream.range(0, booleanQuery.clauses().size() - 1) .mapToObj(booleanQuery.clauses()::get) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .collect(Collectors.toSet()); assertThat( allQueriesExceptLast, @@ -127,7 +127,7 @@ protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Que if (queryBuilder.minimumShouldMatch() != null) { final int optionalClauses = (int) booleanQuery.clauses() .stream() - .filter(clause -> clause.getOccur() == BooleanClause.Occur.SHOULD) + .filter(clause -> clause.occur() == BooleanClause.Occur.SHOULD) .count(); final int expected = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch()); assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(expected)); diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index e2d0aec72d7bd..e872f60b090ba 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -163,7 +163,7 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, // calculate expected minimumShouldMatch value int optionalClauses = 0; for (BooleanClause c : bq.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { optionalClauses++; } } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 160d1ec83f91f..f3839a08f7995 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -924,10 +924,10 @@ public void testToQueryBooleanQueryMultipleBoosts() throws Exception { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - assertThat(booleanQuery.clauses().get(0).getQuery(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "foo")))); - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - assertThat(booleanQuery.clauses().get(1).getQuery(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "bar")))); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanQuery.clauses().get(0).query(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "foo")))); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanQuery.clauses().get(1).query(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "bar")))); } public void testToQueryPhraseQueryBoostAndSlop() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 76fc2d493d897..12af5c57ab326 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -314,7 +314,7 @@ protected void doAssertLuceneQuery(SimpleQueryStringBuilder queryBuilder, Query private static int shouldClauses(BooleanQuery query) { int result = 0; for (BooleanClause c : query.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { result++; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java index bb727204e2651..114ad90354c61 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java @@ -279,7 +279,7 @@ private void assertCorrectLuceneQuery(String name, Query query, List exp booleanClauses.size() ); for (int i = 0; i < booleanClauses.size(); i++) { - Query clauseQuery = booleanClauses.get(i).getQuery(); + Query clauseQuery = booleanClauses.get(i).query(); assertTrue(name + " query " + query + " expected to be a BoostQuery", clauseQuery instanceof BoostQuery); // FeatureQuery is not visible so we check the String representation assertTrue(name + " query " + query + " expected to be a FeatureQuery", clauseQuery.toString().contains("FeatureQuery")); @@ -353,8 +353,8 @@ protected void doAssertLuceneQuery(WeightedTokensQueryBuilder queryBuilder, Quer Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index c2b99923bae61..abc2de5b0d929 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -193,9 +193,9 @@ private void assertSparseEmbeddingLuceneQuery(Query query) { BooleanQuery innerBooleanQuery = (BooleanQuery) innerQuery; assertThat(innerBooleanQuery.clauses().size(), equalTo(queryTokenCount)); innerBooleanQuery.forEach(c -> { - assertThat(c.getOccur(), equalTo(SHOULD)); - assertThat(c.getQuery(), instanceOf(BoostQuery.class)); - assertThat(((BoostQuery) c.getQuery()).getBoost(), equalTo(TOKEN_WEIGHT)); + assertThat(c.occur(), equalTo(SHOULD)); + assertThat(c.query(), instanceOf(BoostQuery.class)); + assertThat(((BoostQuery) c.query()).getBoost(), equalTo(TOKEN_WEIGHT)); }); } @@ -217,7 +217,7 @@ private Query assertOuterBooleanQuery(Query query) { List outerMustClauses = new ArrayList<>(); List outerFilterClauses = new ArrayList<>(); for (BooleanClause clause : outerBooleanQuery.clauses()) { - BooleanClause.Occur occur = clause.getOccur(); + BooleanClause.Occur occur = clause.occur(); if (occur == MUST) { outerMustClauses.add(clause); } else if (occur == FILTER) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java index 3d17d8dd23ff6..13cf6d87728a8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java @@ -166,8 +166,8 @@ protected void doAssertLuceneQuery(SparseVectorQueryBuilder queryBuilder, Query Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java index 8da6fc843614e..00d50e0d0d7bb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java @@ -139,8 +139,8 @@ protected void doAssertLuceneQuery(TextExpansionQueryBuilder queryBuilder, Query Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 13f49204fa398..c5b3b63427780 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -548,9 +548,9 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { BooleanQuery.Builder rewritten = new BooleanQuery.Builder(); int clauseCount = 0; for (BooleanClause clause : bq) { - Query q = rewriteBoolToNgramQuery(clause.getQuery()); + Query q = rewriteBoolToNgramQuery(clause.query()); if (q != null) { - if (clause.getOccur().equals(Occur.FILTER)) { + if (clause.occur().equals(Occur.FILTER)) { // Can't drop "should" clauses because it can elevate a sibling optional item // to mandatory (shoulds with 1 clause) causing false negatives // Dropping MUSTs increase false positives which are OK because are verified anyway. @@ -559,7 +559,7 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { break; } } - rewritten.add(q, clause.getOccur()); + rewritten.add(q, clause.occur()); } } return rewritten.build(); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index ef4f80137e15e..1cdaf23e8070d 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -920,11 +920,11 @@ private Query rewriteFiltersToMustsForComparisonPurposes(Query q) { if (q instanceof BooleanQuery bq) { BooleanQuery.Builder result = new BooleanQuery.Builder(); for (BooleanClause cq : bq.clauses()) { - Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.getQuery()); - if (cq.getOccur() == Occur.FILTER) { + Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.query()); + if (cq.occur() == Occur.FILTER) { result.add(rewritten, Occur.MUST); } else { - result.add(rewritten, cq.getOccur()); + result.add(rewritten, cq.occur()); } } return result.build(); From e9824067b53b72c47291c7c372ab9b538bbd9262 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 29 Aug 2024 09:00:16 +0200 Subject: [PATCH 116/417] Replace BooleanCaluse#getquery by BooleanCaluse#query and BooleanCaluse#getOccur by BooleanCaluse#occur (cont) --- .../index/query/MatchBoolPrefixQueryBuilderTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java index 70393efa7af53..a0bac9340ae72 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -121,7 +121,7 @@ protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Que }); // the last query should be PrefixQuery - final Query shouldBePrefixQuery = booleanQuery.clauses().get(booleanQuery.clauses().size() - 1).getQuery(); + final Query shouldBePrefixQuery = booleanQuery.clauses().get(booleanQuery.clauses().size() - 1).query(); assertThat(shouldBePrefixQuery, instanceOf(PrefixQuery.class)); if (queryBuilder.minimumShouldMatch() != null) { @@ -268,7 +268,7 @@ private static void assertBooleanQuery(Query actual, List expectedClauseQ assertThat(actualBooleanQuery.clauses(), everyItem(hasProperty("occur", equalTo(BooleanClause.Occur.SHOULD)))); for (int i = 0; i < actualBooleanQuery.clauses().size(); i++) { - final Query clauseQuery = actualBooleanQuery.clauses().get(i).getQuery(); + final Query clauseQuery = actualBooleanQuery.clauses().get(i).query(); assertThat(clauseQuery, equalTo(expectedClauseQueries.get(i))); } } From 85b4797192ed073244e59e2b7ad4cb2400ea3de8 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 08:37:57 +0100 Subject: [PATCH 117/417] Replace removed constants in NodeConstruction --- .../main/java/org/elasticsearch/node/NodeConstruction.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index ec0d293dc0064..7dea27ace8924 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -400,8 +400,8 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr Constants.OS_ARCH, Constants.JVM_VENDOR, Constants.JVM_NAME, - Constants.JAVA_VERSION, - Constants.JVM_VERSION + System.getProperty("java.version"), + Runtime.version().toString() ); logger.info("JVM home [{}], using bundled JDK [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); logger.info("JVM arguments {}", Arrays.toString(jvmInfo.getInputArguments())); From a054ed85d139df9d5c7ebb83cbeecce3129d132c Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 08:44:45 +0100 Subject: [PATCH 118/417] Override abstract getMaxDimensions in ES815HnswBitVectorsFormat --- .../index/codec/vectors/ES815HnswBitVectorsFormat.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java index f7884c0b73688..899ae6d7e83b2 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java @@ -16,6 +16,7 @@ import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsWriter; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import java.io.IOException; @@ -61,6 +62,11 @@ public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException return new Lucene99HnswVectorsReader(state, flatVectorsFormat.fieldsReader(state)); } + @Override + public int getMaxDimensions(String s) { + return DenseVectorFieldMapper.MAX_DIMS_COUNT; + } + @Override public String toString() { return "ES815HnswBitVectorsFormat(name=ES815HnswBitVectorsFormat, maxConn=" From c9c8c95872eda6d419dcd34fcb6489e8f2111ba4 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 08:50:40 +0100 Subject: [PATCH 119/417] Fix overrides in DocumentLeafReader --- .../index/mapper/DocumentLeafReader.java | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java index db90c8f052a5e..83c7a775d26ce 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java @@ -10,10 +10,10 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -146,11 +146,6 @@ public FieldInfos getFieldInfos() { return new FieldInfos(new FieldInfo[0]); } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { return new StoredFields() { @@ -202,6 +197,11 @@ public NumericDocValues getNormValues(String field) throws IOException { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String s) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -232,11 +232,6 @@ public LeafMetaData getMetaData() { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public int numDocs() { throw new UnsupportedOperationException(); @@ -283,6 +278,7 @@ private static FieldInfo fieldInfo(String name) { false, IndexOptions.NONE, DocValuesType.NONE, + false, -1, Collections.emptyMap(), 0, From d357adbc97202aaa2205a30dcbdc286ef68ea4b5 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 09:02:04 +0100 Subject: [PATCH 120/417] Fix IOContext construction in Store --- server/src/main/java/org/elasticsearch/index/store/Store.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 07406a57903d8..f0a645b3cf110 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -32,6 +32,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.Lock; import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; @@ -146,7 +147,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * Specific {@link IOContext} indicating that we will read only the Lucene file footer (containing the file checksum) * See {@link MetadataSnapshot#checksumFromLuceneFile}. */ - public static final IOContext READONCE_CHECKSUM = new IOContext(IOContext.READONCE, true); + public static final IOContext READONCE_CHECKSUM = new IOContext(IOContext.Context.DEFAULT, null, null, ReadAdvice.SEQUENTIAL); private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; From 4f1ed5228351e5703678e9525a05c2e3c95315b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 19:21:00 +0200 Subject: [PATCH 121/417] Remove Scorable#docID implementations This method was removed in https://github.com/apache/lucene/pull/12407 so we also need to remove it in implementations of Scorable. --- .../painless/ScriptedMetricAggContextsTests.java | 5 ----- .../join/aggregations/ParentJoinAggregator.java | 5 ----- .../aggregations/bucket/nested/NestedAggregator.java | 7 ------- .../bucket/sampler/BestDocsDeferringCollector.java | 8 -------- .../search/aggregations/MultiBucketCollectorTests.java | 10 ++-------- .../search/query/QueryPhaseCollectorTests.java | 10 ---------- .../search/sort/BucketedSortForFloatsTests.java | 6 ------ 7 files changed, 2 insertions(+), 49 deletions(-) diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java index 2d3f09fc7243a..8eae139eb8226 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -73,11 +73,6 @@ public void testMapBasic() throws IOException { Map state = new HashMap<>(); Scorable scorer = new Scorable() { - @Override - public int docID() { - return 0; - } - @Override public float score() { return 0.5f; diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java index d93a17d58d6f3..428a5d4bf19a2 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java @@ -133,11 +133,6 @@ protected void prepareSubAggs(long[] ordsToCollect) throws IOException { public float score() { return 1f; } - - @Override - public int docID() { - return childDocsIter.docID(); - } }); final Bits liveDocs = ctx.reader().getLiveDocs(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 39dfd6e4aac3a..28e010f541a74 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -206,19 +206,12 @@ void processBufferedChildBuckets() throws IOException { } private static class CachedScorable extends Scorable { - int doc; float score; @Override public final float score() { return score; } - - @Override - public int docID() { - return doc; - } - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index ccb1095ce37f4..adfd7c56e36b6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -213,7 +213,6 @@ class PerSegmentCollects extends Scorable { private final AggregationExecutionContext aggCtx; int maxDocId = Integer.MIN_VALUE; private float currentScore; - private int currentDocId = -1; private Scorable currentScorer; PerSegmentCollects(AggregationExecutionContext aggCtx) throws IOException { @@ -248,7 +247,6 @@ public void replayRelatedMatches(List sd) throws IOException { leafCollector.setScorer(this); currentScore = 0; - currentDocId = -1; if (maxDocId < 0) { return; } @@ -258,7 +256,6 @@ public void replayRelatedMatches(List sd) throws IOException { int rebased = scoreDoc.doc - aggCtx.getLeafReaderContext().docBase; if ((rebased >= 0) && (rebased <= maxDocId)) { currentScore = scoreDoc.score; - currentDocId = rebased; // We stored the bucket ID in Lucene's shardIndex property // for convenience. leafCollector.collect(rebased, scoreDoc.shardIndex); @@ -275,11 +272,6 @@ public float score() throws IOException { return currentScore; } - @Override - public int docID() { - return currentDocId; - } - public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index cfb9c4bb83249..ff4ad059559fc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -32,14 +32,8 @@ import static org.hamcrest.Matchers.equalTo; public class MultiBucketCollectorTests extends ESTestCase { - private static class ScoreAndDoc extends Scorable { + private static class Score extends Scorable { float score; - int doc = -1; - - @Override - public int docID() { - return doc; - } @Override public float score() { @@ -246,7 +240,7 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { collector1 = new TerminateAfterBucketCollector(collector1, 1); collector2 = new TerminateAfterBucketCollector(collector2, 2); - Scorable scorer = new ScoreAndDoc(); + Scorable scorer = new Score(); List collectors = Arrays.asList(collector1, collector2); Collections.shuffle(collectors, random()); diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java index f222e697488d2..dbfd9d83ee887 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java @@ -1138,11 +1138,6 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { public float score() { return 0; } - - @Override - public int docID() { - return 0; - } }; QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( @@ -1472,11 +1467,6 @@ public float score() throws IOException { return 0; } - @Override - public int docID() { - return 0; - } - @Override public void setMinCompetitiveScore(float minScore) { setMinCompetitiveScoreCalled = true; diff --git a/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForFloatsTests.java b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForFloatsTests.java index 0f088d2948fcb..7f136a097e24a 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForFloatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/BucketedSortForFloatsTests.java @@ -120,18 +120,12 @@ public void testScorer() throws IOException { } private class MockScorable extends Scorable { - private int doc; private float score; @Override public float score() throws IOException { return score; } - - @Override - public int docID() { - return doc; - } } /** From fe525fffe9b8bc648c7e488c2b85fd396f295120 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 09:30:38 +0100 Subject: [PATCH 122/417] Fix SpanRewriteMethod overload --- .../search/SpanBooleanQueryRewriteWithMaxClause.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java index 28dd6609efd10..182037a7884de 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -58,10 +58,11 @@ public boolean isHardLimit() { } @Override - public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public SpanQuery rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { final MultiTermQuery.RewriteMethod delegate = new MultiTermQuery.RewriteMethod() { @Override - public Query rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public Query rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { + IndexReader reader = indexSearcher.getIndexReader(); Collection queries = collectTerms(reader, query); if (queries.size() == 0) { return new SpanMatchNoDocsQuery(query.getField(), "no expansion found for " + query.toString()); @@ -111,6 +112,6 @@ private Collection collectTerms(IndexReader reader, MultiTermQuery qu return queries; } }; - return (SpanQuery) delegate.rewrite(reader, query); + return (SpanQuery) delegate.rewrite(indexSearcher, query); } } From 135d1f068da97d9551a9fdcac6d07851c22a757c Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 09:35:13 +0100 Subject: [PATCH 123/417] Fix LegacyTypeFieldMapper to use Collection --- .../org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java index 359faf2d6f4b0..bcc45200d05cb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java @@ -10,7 +10,6 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.SearchExecutionContext; @@ -69,7 +68,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + var bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } From b48c4c0efec7faa2ff429e1fefcc4823157b8383 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 23:25:35 +0200 Subject: [PATCH 124/417] Fix missing CheckIndex#setChecksumsOnly method This method was removed in https://github.com/apache/lucene/pull/12797 in favour of a new "level" setting. For the checksums-only checks we do in ES we only need to set the CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS level. --- .../elasticsearch/gateway/PersistedClusterStateService.java | 2 +- .../index/shard/RemoveCorruptedLuceneSegmentsAction.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 49ac38d656278..595b60e5a649b 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -446,7 +446,7 @@ OnDiskState loadBestOnDiskState(boolean checkClean) throws IOException { // resources during test execution checkIndex.setThreadCount(1); checkIndex.setInfoStream(printStream); - checkIndex.setChecksumsOnly(true); + checkIndex.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); isClean = checkIndex.checkIndex().clean; } diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java index 03fc7f428222d..31e98135932c6 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java @@ -32,7 +32,7 @@ public static Tuple getClea final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); @@ -63,7 +63,7 @@ public static void execute(Terminal terminal, Directory indexDirectory, Lock wri final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); From 73a15a7885421ce5669e85d358ea5d2bae35e054 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 28 Aug 2024 23:44:30 +0200 Subject: [PATCH 125/417] Fix BooleanQuery deprecated methods (ctd) From migrate.md in Lucene: `BooleanQuery#TooManyClauses`, `BooleanQuery#getMaxClauseCount()` and `BooleanQuery#setMaxClauseCount()` are deprecated. Use `IndexSearcher#TooManyClauses`, `IndexSearcher#getMaxClauseCount()`, `IndexSearcher#setMaxClauseCount()` instead. --- .../index/query/SpanMultiTermQueryBuilderTests.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index 6f3fd47a0073c..ae2f7bf32f81b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -17,7 +17,6 @@ import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; @@ -273,7 +272,7 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { RuntimeException exc = expectThrows(RuntimeException.class, () -> query.rewrite(searcher)); assertThat(exc.getMessage(), containsString("maxClauseCount")); } finally { - BooleanQuery.setMaxClauseCount(origBoolMaxClauseCount); + IndexSearcher.setMaxClauseCount(origBoolMaxClauseCount); } } } From cec9f6c0fe043e4c448590496808bde408b61f17 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 09:56:45 +0100 Subject: [PATCH 126/417] Revert accidental param name change in FilterableTermsEnum --- .../elasticsearch/common/lucene/index/FilterableTermsEnum.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 3bc1949cdfcbf..5652f3c46ee84 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -62,7 +62,7 @@ static class Holder { protected BytesRef current; protected final int docsEnumFlag; - public FilterableTermsEnum(IndexReader reader, String field, int docsEnumFlag, @Nullable Query f) throws IOException { + public FilterableTermsEnum(IndexReader reader, String field, int docsEnumFlag, @Nullable Query filter) throws IOException { if ((docsEnumFlag != PostingsEnum.FREQS) && (docsEnumFlag != PostingsEnum.NONE)) { throw new IllegalArgumentException("invalid docsEnumFlag of " + docsEnumFlag); } From fdd137cf4dde5b5b53c51f6587e571463f412acd Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 09:59:37 +0100 Subject: [PATCH 127/417] Fix construction of FieldInfo in DeduplicatingFieldInfosFormat --- .../elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java index 75ec265a68391..e1bfb7a3ecbee 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java @@ -53,6 +53,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType(), + fi.hasDocValuesSkipIndex(), fi.getDocValuesGen(), internStringStringMap(fi.attributes()), fi.getPointDimensionCount(), From 942bedb7451ad1694887bdec33c0a6305d854dbb Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 10:02:43 +0100 Subject: [PATCH 128/417] Replace JAVA_VERSION with Runtime.version() --- .../main/java/org/elasticsearch/bootstrap/BootstrapChecks.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 84811362c08e6..2313b3df46448 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -700,7 +700,7 @@ String jvmVendor() { } String javaVersion() { - return Constants.JAVA_VERSION; + return Runtime.version().toString(); } @Override From 430aed2c78f878df47b271908d5b06a8cdec5356 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 29 Aug 2024 11:22:39 +0200 Subject: [PATCH 129/417] Replace BooleanCaluse#getquery by BooleanCaluse#query and BooleanCaluse#getOccur by BooleanCaluse#occur (cont) --- .../org/elasticsearch/percolator/CandidateQueryTests.java | 4 ++-- .../common/lucene/search/morelikethis/XMoreLikeThisTests.java | 2 +- .../index/query/CombinedFieldsQueryParsingTests.java | 4 ++-- .../xpack/inference/queries/SemanticQueryBuilderTests.java | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 108c2b3f5b635..41fbd0e10260f 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -929,7 +929,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(CoveringQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); assertEquals(2L, topDocs.totalHits.value); assertEquals(2, topDocs.scoreDocs.length); @@ -968,7 +968,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); assertEquals(2L, topDocs.totalHits.value); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java index f60018cf24e43..c96da966f7aeb 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java @@ -177,7 +177,7 @@ public void testTopN() throws Exception { expectedTerms[idx++] = new Term("text", text); } for (BooleanClause clause : clauses) { - Term term = ((TermQuery) clause.getQuery()).getTerm(); + Term term = ((TermQuery) clause.query()).getTerm(); assertTrue(Arrays.asList(expectedTerms).contains(term)); } diff --git a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java index da1e1c47c2027..94c801ee9409a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java @@ -142,8 +142,8 @@ public void testWildcardFieldPattern() throws Exception { BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.clauses().size(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(CombinedFieldQuery.class)); - assertThat(booleanQuery.clauses().get(1).getQuery(), instanceOf(CombinedFieldQuery.class)); + assertThat(booleanQuery.clauses().get(0).query(), instanceOf(CombinedFieldQuery.class)); + assertThat(booleanQuery.clauses().get(1).query(), instanceOf(CombinedFieldQuery.class)); }); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index abc2de5b0d929..25211cdb9cb87 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -230,7 +230,7 @@ private Query assertOuterBooleanQuery(Query query) { assertThat(outerMustClauses.size(), equalTo(1)); assertThat(outerFilterClauses.size(), equalTo(1)); - return outerMustClauses.get(0).getQuery(); + return outerMustClauses.get(0).query(); } @Override From 31b78fee4a9fef5743570ad11d33370a8fb30a1a Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 11:23:46 +0200 Subject: [PATCH 130/417] Remove offloading of TimeSeriesIndexSearcher to executor A separate executor is no longer needed, single sliced searches are executed on the caller thread, hence we can just use a null executor given that TimeSeriesIndexSearcher does not support inter-segment concurrency --- .../support/TimeSeriesIndexSearcher.java | 33 ++----------------- 1 file changed, 3 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index 67bf8b79ef1b1..baf552408c6b3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -21,7 +21,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; -import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.lucene.search.function.MinScoreScorer; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; @@ -37,9 +36,6 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.FutureTask; -import java.util.concurrent.RunnableFuture; import java.util.function.IntSupplier; import static org.elasticsearch.index.IndexSortConfig.TIME_SERIES_SORT; @@ -67,10 +63,7 @@ public TimeSeriesIndexSearcher(IndexSearcher searcher, List cancellati searcher.getSimilarity(), searcher.getQueryCache(), searcher.getQueryCachingPolicy(), - false, - searcher.getExecutor(), - 1, - -1 + false ); } catch (IOException e) { // IOException from wrapping the index searcher which should never happen. @@ -93,28 +86,8 @@ public void setMinimumScore(Float minimumScore) { public void search(Query query, BucketCollector bucketCollector) throws IOException { query = searcher.rewrite(query); Weight weight = searcher.createWeight(query, bucketCollector.scoreMode(), 1); - if (searcher.getExecutor() == null) { - search(bucketCollector, weight); - bucketCollector.postCollection(); - return; - } - // offload to the search worker thread pool whenever possible. It will be null only when search.worker_threads_enabled is false - RunnableFuture task = new FutureTask<>(() -> { - search(bucketCollector, weight); - bucketCollector.postCollection(); - return null; - }); - searcher.getExecutor().execute(task); - try { - task.get(); - } catch (InterruptedException e) { - throw new ThreadInterruptedException(e); - } catch (ExecutionException e) { - if (e.getCause() instanceof RuntimeException runtimeException) { - throw runtimeException; - } - throw new RuntimeException(e.getCause()); - } + search(bucketCollector, weight); + bucketCollector.postCollection(); } private void search(BucketCollector bucketCollector, Weight weight) throws IOException { From 00d5ea0e329beb60ec2937f9d227dcbcae7f619b Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 11:33:28 +0200 Subject: [PATCH 131/417] Replace NO_MORE_ORDS usage in DocumentLeafReader --- .../org/elasticsearch/index/mapper/DocumentLeafReader.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java index 83c7a775d26ce..a9aa531ca62d7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java @@ -479,9 +479,7 @@ private static SortedSetDocValues sortedSetDocValues(List values) { @Override public long nextOrd() { i++; - if (i >= values.size()) { - return NO_MORE_ORDS; - } + assert i < values.size(); return i; } From 272397dc1c37d5b97985162b0499dd02852b1941 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 11:42:47 +0200 Subject: [PATCH 132/417] Remove NO_MORE_ORDS usages in KeyedFlattenedLeafFieldData --- .../flattened/KeyedFlattenedLeafFieldData.java | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java index 7acf32db2a5aa..c7bd0645010d0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java @@ -204,12 +204,8 @@ public long nextOrd() throws IOException { } long ord = delegate.nextOrd(); - if (ord != NO_MORE_ORDS && ord <= maxOrd) { - assert ord >= minOrd; - return mapOrd(ord); - } else { - return NO_MORE_ORDS; - } + assert ord <= maxOrd; + return mapOrd(ord); } @Override @@ -224,7 +220,7 @@ public boolean advanceExact(int target) throws IOException { int count = 0; while (true) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } if (ord >= minOrd) { @@ -245,7 +241,7 @@ public boolean advanceExact(int target) throws IOException { while (true) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } From 2b1dd2b18a6238d0d2323eaad88d1739ea5fceed Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 11:45:34 +0200 Subject: [PATCH 133/417] Replace NO_MORE_ORDS usage in ES87TSDBDocValuesProducer --- .../index/codec/tsdb/ES87TSDBDocValuesProducer.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index ad725d79b8810..a453c065ccd52 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -658,9 +658,8 @@ public long nextOrd() throws IOException { i = 0; count = ords.docValueCount(); } - if (i++ == count) { - return NO_MORE_ORDS; - } + assert i < count; + i++; return ords.nextValue(); } From 11c5868b0303b8ade02b52f43c2d1d4b9b5a9c32 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 29 Aug 2024 12:20:13 +0200 Subject: [PATCH 134/417] add getSkipper to ES87TSDBDocValuesProducer --- .../index/codec/tsdb/ES87TSDBDocValuesProducer.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index a453c065ccd52..35d2924dc60ba 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -15,6 +15,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -698,6 +699,11 @@ public long cost() { }; } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return null; + } + @Override public void checkIntegrity() throws IOException { CodecUtil.checksumEntireFile(data); From 54cc38a6ce7be96e7731141c58ea445f0b6048d3 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 12:20:24 +0100 Subject: [PATCH 135/417] Fix usage of TermInSetQuery::getTermData in NestedHelper --- .../index/search/NestedHelper.java | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index 94557ee263d6b..21af1ca26c66e 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -17,6 +17,7 @@ import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -24,6 +25,7 @@ import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; +import java.io.IOException; import java.util.function.Predicate; /** Utility class to filter parent and children clauses when building nested @@ -52,15 +54,15 @@ public boolean mightMatchNestedDocs(Query query) { // We only handle term(s) queries and range queries, which should already // cover a high majority of use-cases return mightMatchNestedDocs(((TermQuery) query).getTerm().field()); - } else if (query instanceof TermInSetQuery) { - PrefixCodedTerms terms = ((TermInSetQuery) query).getTermData(); - if (terms.size() > 0) { - PrefixCodedTerms.TermIterator it = terms.iterator(); - it.next(); - return mightMatchNestedDocs(it.field()); - } else { - return false; + } else if (query instanceof TermInSetQuery termInSetQuery) { + try { + if (termInSetQuery.getTermsCount() > 0) { + return mightMatchNestedDocs(termInSetQuery.getField()); + } + } catch (IOException e) { + throw new AssertionError(e); // cannot happen } + return false; } else if (query instanceof PointRangeQuery) { return mightMatchNestedDocs(((PointRangeQuery) query).getField()); } else if (query instanceof IndexOrDocValuesQuery) { From 57d47cb09bb972ab050147241be9abbaeac1a192 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 12:31:06 +0100 Subject: [PATCH 136/417] Fix usage of TermInSetQuery::getTermData in NestedHelper --- .../index/search/NestedHelper.java | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index 21af1ca26c66e..e1e8861ce6551 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.search; -import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -17,7 +16,6 @@ import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -119,15 +117,15 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { return false; } else if (query instanceof TermQuery) { return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath); - } else if (query instanceof TermInSetQuery) { - PrefixCodedTerms terms = ((TermInSetQuery) query).getTermData(); - if (terms.size() > 0) { - PrefixCodedTerms.TermIterator it = terms.iterator(); - it.next(); - return mightMatchNonNestedDocs(it.field(), nestedPath); - } else { - return false; + } else if (query instanceof TermInSetQuery termInSetQuery) { + try { + if (termInSetQuery.getTermsCount() > 0) { + return mightMatchNestedDocs(termInSetQuery.getField()); + } + } catch (IOException e) { + throw new AssertionError(e); // cannot happen } + return false; } else if (query instanceof PointRangeQuery) { return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath); } else if (query instanceof IndexOrDocValuesQuery) { From 5c26bef5123c5905087dcb51919152138bdf7c26 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 29 Aug 2024 12:32:35 +0100 Subject: [PATCH 137/417] Implement abstract NumericComparator methods --- .../search/retriever/rankdoc/RankDocsSortField.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsSortField.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsSortField.java index 3cd29d352028b..b87375c36b4fa 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsSortField.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsSortField.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.NumericComparator; +import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.hnsw.IntToIntFunction; import org.elasticsearch.search.rank.RankDoc; @@ -69,6 +70,16 @@ public void setTopValue(Integer value) { topValue = value; } + @Override + protected long missingValueAsComparableLong() { + return missingValue; + } + + @Override + protected long sortableBytesToLong(byte[] bytes) { + return NumericUtils.sortableBytesToInt(bytes, 0); + } + @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { IntToIntFunction docToRank = doc -> rankDocMap.getOrDefault(context.docBase + doc, Integer.MAX_VALUE); From abf88c5ad84ef1240f113e46360588639fa550ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 29 Aug 2024 14:16:09 +0200 Subject: [PATCH 138/417] Remove assertion in ScoreSortBuilder This assertion uses the removed docId method from Scorable, I don't see a real replacement here atm. --- .../java/org/elasticsearch/search/sort/ScoreSortBuilder.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index 0c9b56b1855d7..dd36d6df6cd95 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -125,7 +125,6 @@ public void setScorer(Scorable scorer) { @Override protected boolean advanceExact(int doc) throws IOException { - assert doc == scorer.docID() : "expected scorer to be on [" + doc + "] but was on [" + scorer.docID() + "]"; /* We will never be called by documents that don't match the * query and they'll all have a score, thus `true`. */ score = scorer.score(); From 24300c01e520736af54d1a87213f3e9e59293d96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 29 Aug 2024 16:01:00 +0200 Subject: [PATCH 139/417] Remove settind docId on cached scorable Since there is no docID getter or any other access on Scorable any longer, I think its safe to remove setting it. --- .../search/aggregations/bucket/nested/NestedAggregator.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 28e010f541a74..874c05a1f517e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -196,7 +196,6 @@ void processBufferedChildBuckets() throws IOException { } for (; childDocId < currentParentDoc; childDocId = childDocs.nextDoc()) { - cachedScorer.doc = childDocId; for (var bucket : bucketBuffer) { collectBucket(sub, childDocId, bucket); } From e42ef050309e39856b99086c75d233b97895bdf1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 29 Aug 2024 16:10:44 +0200 Subject: [PATCH 140/417] Fix Scorer ctor that requires Weight Since https://github.com/apache/lucene/pull/13440 Scorers don't keep track of a Weight any longer, so the constructor doesn't exist any longer and callers need to track the Weight if needed. So far I don't think we have any cases of this. --- .../java/org/elasticsearch/percolator/CandidateQueryTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 41fbd0e10260f..fabe8148f9fd0 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -1311,7 +1311,7 @@ protected boolean match(int doc) { } } }; - Scorer scorer = new Scorer(this) { + Scorer scorer = new Scorer() { @Override public int docID() { From fb575f295a0e95f51127e8f72169635981bee1fc Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 29 Aug 2024 16:57:18 +0200 Subject: [PATCH 141/417] Fix compile errors related to ScoreCachingWrapperScorer. --- .../aggregations/MultiBucketCollector.java | 30 +++++++- .../search/query/QueryPhaseCollector.java | 9 ++- .../MultiBucketCollectorTests.java | 73 +++++++++++++++++++ 3 files changed, 105 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java index b956658f1226d..0abf73f28b1ef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreCachingWrappingScorer; import org.apache.lucene.search.ScoreMode; import java.io.IOException; @@ -200,6 +199,7 @@ private static class MultiLeafBucketCollector extends LeafBucketCollector { private final boolean cacheScores; private final LeafBucketCollector[] collectors; private int numCollectors; + private ScoreCachingScorable scorable; private MultiLeafBucketCollector(List collectors, boolean cacheScores) { this.collectors = collectors.toArray(new LeafBucketCollector[collectors.size()]); @@ -210,11 +210,11 @@ private MultiLeafBucketCollector(List collectors, boolean c @Override public void setScorer(Scorable scorer) throws IOException { if (cacheScores) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); + scorable = new ScoreCachingScorable(scorer); } for (int i = 0; i < numCollectors; ++i) { final LeafCollector c = collectors[i]; - c.setScorer(scorer); + c.setScorer(cacheScores ? scorable : scorer); } } @@ -226,6 +226,9 @@ private void removeCollector(int i) { @Override public void collect(int doc, long bucket) throws IOException { + if (scorable != null) { + scorable.curDoc = doc; + } final LeafBucketCollector[] collectors = this.collectors; int numCollectors = this.numCollectors; for (int i = 0; i < numCollectors;) { @@ -243,4 +246,25 @@ public void collect(int doc, long bucket) throws IOException { } } } + + private static class ScoreCachingScorable extends Scorable { + + private final Scorable in; + private int curDoc = -1; // current document + private int scoreDoc = -1; // document that score was computed on + private float score; + + ScoreCachingScorable(Scorable in) { + this.in = in; + } + + @Override + public float score() throws IOException { + if (curDoc != scoreDoc) { + score = in.score(); + scoreDoc = curDoc; + } + return score; + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java index b63b961ed7b6a..a195f33b9e304 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java @@ -205,7 +205,11 @@ public DocIdSetIterator competitiveIterator() throws IOException { } }; } - return new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + LeafCollector leafCollector = new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { + leafCollector = ScoreCachingWrappingScorer.wrap(leafCollector); + } + return leafCollector; } private class TopDocsLeafCollector implements LeafCollector { @@ -259,9 +263,6 @@ private class CompositeLeafCollector implements LeafCollector { @Override public void setScorer(Scorable scorer) throws IOException { - if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); - } scorer = new FilterScorable(scorer) { @Override public void setMinCompetitiveScore(float minScore) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index ff4ad059559fc..58d732a1706d7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -268,4 +268,77 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { assertFalse(setScorerCalled1.get()); assertFalse(setScorerCalled2.get()); } + + public void testCacheScores() throws IOException { + ScoringBucketCollector scoringBucketCollector1 = new ScoringBucketCollector(); + ScoringBucketCollector scoringBucketCollector2 = new ScoringBucketCollector(); + + DummyScorable scorable = new DummyScorable(); + + // First test the tester + LeafBucketCollector leafBucketCollector1 = scoringBucketCollector1.getLeafCollector(null); + LeafBucketCollector leafBucketCollector2 = scoringBucketCollector2.getLeafCollector(null); + leafBucketCollector1.setScorer(scorable); + leafBucketCollector2.setScorer(scorable); + leafBucketCollector1.collect(0, 0); + leafBucketCollector2.collect(0, 0); + assertEquals(2, scorable.numScoreCalls); + + // reset + scorable.numScoreCalls = 0; + LeafBucketCollector leafBucketCollector = MultiBucketCollector.wrap( + randomBoolean(), + Arrays.asList(scoringBucketCollector1, scoringBucketCollector2) + ).getLeafCollector(null); + leafBucketCollector.collect(0, 0); + // Even though both leaf collectors called scorable.score(), it only got called once thanks to caching + assertEquals(1, scorable.numScoreCalls); + } + + private static class ScoringBucketCollector extends BucketCollector { + @Override + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; // needs scores + } + + @Override + public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx) throws IOException { + return new ScoringLeafBucketCollector(); + } + + @Override + public void preCollection() throws IOException { + + } + + @Override + public void postCollection() throws IOException { + + } + } + + private static class ScoringLeafBucketCollector extends LeafBucketCollector { + + private Scorable scorable; + + @Override + public void setScorer(Scorable scorer) throws IOException { + this.scorable = scorer; + } + + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + scorable.score(); + } + } + + private static class DummyScorable extends Scorable { + int numScoreCalls = 0; + + @Override + public float score() throws IOException { + numScoreCalls++; + return 42f; + } + } } From 9fa151643ee481f28f89a7cf54ed1f8bd6772418 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 17:04:00 +0200 Subject: [PATCH 142/417] Omit the executor when creating TimeSeriesIndexSearcher (#112357) This is a leftover from #111099: now that we use a single thread pool, we don't require offloading, which was the only reason to carry around the executor in the time series index searcher, given that the number of slices is always 1 and it does not support concurrency. Furthermore, providing the executor introduces some risks of potential concurrency caused by non-slice based operations that can't be disabled. This commit removes the executor constructor argument and effectively disable any concurrency in TimeSeriesIndexSearcher. --- .../search/aggregations/support/TimeSeriesIndexSearcher.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index c3faf0e1900dc..a065ae1a88164 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -63,10 +63,7 @@ public TimeSeriesIndexSearcher(IndexSearcher searcher, List cancellati searcher.getSimilarity(), searcher.getQueryCache(), searcher.getQueryCachingPolicy(), - false, - searcher.getExecutor(), - 1, - -1 + false ); } catch (IOException e) { // IOException from wrapping the index searcher which should never happen. From 43a65ccddc84c67dc26d2af0608102ce96087d4c Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 29 Aug 2024 17:05:46 +0200 Subject: [PATCH 143/417] Fix more compile errors related to stored fields and term vectors. --- .../elasticsearch/gateway/PersistedClusterStateService.java | 4 +++- .../org/elasticsearch/index/engine/LuceneChangesSnapshot.java | 2 +- .../elasticsearch/index/termvectors/TermVectorsService.java | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 595b60e5a649b..17d01bc9332ef 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.search.DocIdSetIterator; @@ -702,10 +703,11 @@ private static void consumeFromType( final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + final StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { logger.trace("processing doc {}", docIdSetIterator.docID()); - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final BytesArray documentData = new BytesArray(document.getBinaryValue(DATA_FIELD_NAME)); if (document.getField(PAGE_FIELD_NAME) == null) { diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index e63d5ef87973b..ffdaeeae00927 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -339,7 +339,7 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { assert storedFieldsReaderOrd == leaf.ord : storedFieldsReaderOrd + " != " + leaf.ord; storedFieldsReader.document(segmentDocID, fields); } else { - leaf.reader().document(segmentDocID, fields); + leaf.reader().storedFields().document(segmentDocID, fields); } final Translog.Operation op; diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index a30249e94177e..6b377f18f7e73 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -95,7 +95,7 @@ static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequ /* or from an existing document */ else if (docIdAndVersion != null) { // fields with stored term vectors - termVectorsByField = docIdAndVersion.reader.getTermVectors(docIdAndVersion.docId); + termVectorsByField = docIdAndVersion.reader.termVectors().get(docIdAndVersion.docId); Set selectedFields = request.selectedFields(); // generate tvs for fields where analyzer is overridden if (selectedFields == null && request.perFieldAnalyzer() != null) { @@ -300,7 +300,7 @@ private static Fields generateTermVectors( } } /* and read vectors from it */ - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request) throws IOException { From d93ea4e87d2f30576c4bc74a4002b29af6fd27bd Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 29 Aug 2024 17:09:14 +0200 Subject: [PATCH 144/417] Fix error wrt TermState. --- .../java/org/elasticsearch/script/ScriptTermStats.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java index 0863e649487b9..9017d1fa8ade5 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java @@ -15,6 +15,7 @@ import org.apache.lucene.index.TermStates; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.util.IOSupplier; import org.elasticsearch.common.util.CachedSupplier; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.search.internal.ContextIndexSearcher; @@ -214,7 +215,12 @@ private PostingsEnum[] loadPostings() { continue; } - TermState state = termStates.get(leafReaderContext); + IOSupplier stateSupplier = termStates.get(leafReaderContext); + if (stateSupplier == null) { + postings[i] = null; + continue; + } + TermState state = stateSupplier.get(); if (state == null) { postings[i] = null; continue; From 1f715b6d3017769ce08b44893d17e6709d479a6f Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 29 Aug 2024 17:10:31 +0200 Subject: [PATCH 145/417] Add missing getSkipper impl. --- .../index/engine/RecoverySourcePruneMergePolicy.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java index 239e85a50df7c..4197623867cb9 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -12,6 +12,7 @@ import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CodecReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterNumericDocValues; @@ -187,6 +188,11 @@ public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { return in.getSortedSet(field); } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return in.getSkipper(field); + } + @Override public void checkIntegrity() throws IOException { in.checkIntegrity(); From c41caa2894a03bbed0372f18cba936e397abb21e Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 29 Aug 2024 17:13:46 +0200 Subject: [PATCH 146/417] Remove reference to old version, use a string representation of the version instead. --- .../java/org/elasticsearch/index/store/StoreFileMetadata.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java index 9a2655cd6e56f..0d76ee69910da 100644 --- a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java +++ b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java @@ -194,7 +194,7 @@ public BytesRef hash() { * * This ID may be {@link StoreFileMetadata#UNAVAILABLE_WRITER_UUID} (i.e. zero-length) if unavailable, e.g.: * - * - The file was written by a version of Lucene prior to {@link org.apache.lucene.util.Version#LUCENE_8_6_0}. + * - The file was written by a version of Lucene prior to 8.6.0. * - The metadata came from a version of Elasticsearch prior to {@link StoreFileMetadata#WRITER_UUID_MIN_VERSION}). * - The file is not one of the files listed above. * From a371a6bf0a6eaea50f3b15808abf62c3ddf56d16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 29 Aug 2024 17:49:19 +0200 Subject: [PATCH 147/417] Lucene RegExp.COMPLEMENT was removed --- .../main/java/org/elasticsearch/index/query/RegexpFlag.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index 6bac92bedd4a6..b8334abec3d95 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -36,8 +36,11 @@ public enum RegexpFlag { /** * Enables complement expression of the form: {@code ~<expression>} + * NOCOMMIT: Lucenes RegExp class has removed the COMPLEMENT flag in https://issues.apache.org/jira/browse/LUCENE-10010 + * I'm currently not sure if it still supports the "~" operator but we need an enum constant for + * parsing our own flag syntax, so leaving a tombstone here for now */ - COMPLEMENT(RegExp.COMPLEMENT), + COMPLEMENT(0x0002), /** * Enables empty language expression: {@code #} From 7f2216e5bb89bab1a4c5302714f5e5b6324c3206 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 17:51:58 +0200 Subject: [PATCH 148/417] Fix collector creation relataed compile error in LuceneTopNSourceOperator --- .../compute/lucene/LuceneTopNSourceOperator.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 2e32d20a2365e..c9990bd5f9994 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; @@ -230,8 +231,9 @@ static final class PerShardCollector { if (sortAndFormats.isEmpty()) { throw new IllegalStateException("sorts must not be disabled in TopN"); } + // We don't use CollectorManager here as we don't retrieve the total hits and sort by score. - this.topFieldCollector = TopFieldCollector.create(sortAndFormats.get().sort, limit, 0); + this.topFieldCollector = new TopFieldCollectorManager(sortAndFormats.get().sort, limit, 0).newCollector(); } LeafCollector getLeafCollector(LeafReaderContext leafReaderContext) throws IOException { From a17f194ea8bbeef98b9dd0967bf75ef4d5dd06cc Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 17:55:04 +0200 Subject: [PATCH 149/417] Fix compile error in RewriteCachingDirectoryReader --- .../frozen/RewriteCachingDirectoryReader.java | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java index e66d41d089437..12864dd66a857 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java @@ -9,9 +9,9 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; @@ -23,7 +23,6 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.TermVectors; import org.apache.lucene.index.Terms; @@ -214,6 +213,11 @@ public NumericDocValues getNormValues(String field) { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -257,11 +261,6 @@ public LeafMetaData getMetaData() { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docId) { - throw new UnsupportedOperationException(); - } - @Override public TermVectors termVectors() throws IOException { throw new UnsupportedOperationException(); @@ -282,11 +281,6 @@ public int maxDoc() { return maxDoc; } - @Override - public void document(int docID, StoredFieldVisitor visitor) { - throw new UnsupportedOperationException(); - } - @Override protected void doClose() {} From 78bfea4d61fc67ceb9b42a7ef8c5e9f3f3a7e6d0 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 17:56:25 +0200 Subject: [PATCH 150/417] fix compile error in SourceOnlySnapshot around FieldInfo creation --- .../elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 093ec031d0b30..c9883a8337aae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -254,6 +254,7 @@ private SegmentCommitInfo syncSegment( false, IndexOptions.NONE, DocValuesType.NONE, + fieldInfo.hasDocValuesSkipIndex(), -1, fieldInfo.attributes(), 0, From 135ee92b33de65136d7f532f07795400e3b934ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 29 Aug 2024 00:12:54 +0200 Subject: [PATCH 151/417] Work around removed TermsInSetQuery#getTermData This method was deprecated and removed in https://github.com/apache/lucene/commit/00910cd6a4de6e50b48f7abac301a4c7faa722ab without a replacement. I think this change works around the missing method without change in behaviour expect for the QueryAnalyzer change that is marked with NOCOMMIT --- .../percolator/QueryAnalyzer.java | 10 ++-- .../PercolatorFieldMapperTests.java | 2 +- .../index/mapper/MappedFieldType.java | 11 ++--- .../index/search/NestedHelper.java | 49 +++++++++++++------ 4 files changed, 42 insertions(+), 30 deletions(-) diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 4332268a23c9e..1bac5368bbae5 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.percolator; -import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanTermQuery; @@ -29,6 +28,7 @@ import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.index.query.DateRangeIncludingNowQuery; +import org.elasticsearch.index.search.NestedHelper; import org.elasticsearch.lucene.queries.BlendedTermQuery; import java.util.ArrayList; @@ -197,12 +197,10 @@ public void consumeTerms(Query query, Term... termsToConsume) { @Override public void consumeTermsMatching(Query query, String field, Supplier automaton) { if (query instanceof TermInSetQuery q) { - PrefixCodedTerms.TermIterator ti = q.getTermData().iterator(); - BytesRef term; + // NOCOMMIT this is a workaround that only gets one term + Term term = NestedHelper.getTermInSetTerm(q); Set qe = new HashSet<>(); - while ((term = ti.next()) != null) { - qe.add(new QueryExtraction(new Term(field, term))); - } + qe.add(new QueryExtraction(term)); this.terms.add(new Result(true, qe, 1)); } else { super.consumeTermsMatching(query, field, automaton); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 729bbc8aa7850..37fb1cac0b50b 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -445,7 +445,7 @@ public void testCreateCandidateQuery() throws Exception { assertFalse(t.v2()); assertEquals(3, t.v1().clauses().size()); TermInSetQuery terms = (TermInSetQuery) t.v1().clauses().get(0).query(); - assertEquals(maxClauseCount - 1, terms.getTermData().size()); + assertEquals(maxClauseCount - 1, terms.getTermsCount()); assertThat(t.v1().clauses().get(1).query().toString(), containsString(fieldName + ".range_field: 0) { - return mightMatchNestedDocs(termInSetQuery.getField()); - } - } catch (IOException e) { - throw new AssertionError(e); // cannot happen + } else if (query instanceof TermInSetQuery) { + Term term = getTermInSetTerm((TermInSetQuery) query); + if (term != null) { + return mightMatchNestedDocs(term.field()); + } else { + return false; } - return false; } else if (query instanceof PointRangeQuery) { return mightMatchNestedDocs(((PointRangeQuery) query).getField()); } else if (query instanceof IndexOrDocValuesQuery) { @@ -117,15 +118,13 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { return false; } else if (query instanceof TermQuery) { return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath); - } else if (query instanceof TermInSetQuery termInSetQuery) { - try { - if (termInSetQuery.getTermsCount() > 0) { - return mightMatchNestedDocs(termInSetQuery.getField()); - } - } catch (IOException e) { - throw new AssertionError(e); // cannot happen + } else if (query instanceof TermInSetQuery) { + Term term = getTermInSetTerm((TermInSetQuery) query); + if (term != null) { + return mightMatchNonNestedDocs(term.field(), nestedPath); + } else { + return false; } - return false; } else if (query instanceof PointRangeQuery) { return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath); } else if (query instanceof IndexOrDocValuesQuery) { @@ -178,4 +177,22 @@ boolean mightMatchNonNestedDocs(String field, String nestedPath) { return true; } + public static Term getTermInSetTerm(TermInSetQuery tisQuery) { + try { + if (tisQuery.getTermsCount() == 1) { + final SetOnce collectedTerm = new SetOnce<>(); + tisQuery.visit(new QueryVisitor() { + @Override + public void consumeTerms(Query query, Term... terms) { + collectedTerm.set(terms[0]); + } + }); + return collectedTerm.get(); + } + return null; + } catch (IOException e) { + // TODO should never happen, remove throwing IOException from TermInSetQuery in Lucene + } + return null; + } } From dea12a01c5e0def42558d414523c25863c212e73 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 18:08:35 +0200 Subject: [PATCH 152/417] Fix FieldTypeTestCase compile error around FieldInfo creation --- .../java/org/elasticsearch/index/mapper/FieldTypeTestCase.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index d4c6f8f3df873..b23eeea6c6c1c 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -138,6 +138,7 @@ public FieldInfo getFieldInfoWithName(String name) { randomBoolean(), IndexOptions.NONE, DocValuesType.NONE, + randomBoolean(), -1, new HashMap<>(), 1, From c921d8ebb6ca55b7277d08949d66f6299a8ebbd1 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 18:09:29 +0200 Subject: [PATCH 153/417] Fix compile error in ThrowingLeafReaderWrapper caused by override of getTermVectors --- .../test/engine/ThrowingLeafReaderWrapper.java | 7 ------- 1 file changed, 7 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index 544505c16960a..2daa3ab3dceea 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -85,13 +85,6 @@ public Terms terms(String field) throws IOException { return terms; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields fields = super.getTermVectors(docID); - thrower.maybeThrow(Flags.TermVectors); - return fields == null ? null : new ThrowingFields(fields, thrower); - } - /** * Wraps a Fields but with additional asserts */ From 65843ef3fc5c2850c3bf788312afe7ddbe3cebab Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 18:10:55 +0200 Subject: [PATCH 154/417] Fix compile errors in ElasticsearchAssertions due to BooleanClause.getQuery() -> query() --- .../elasticsearch/test/hamcrest/ElasticsearchAssertions.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 49c244167fe19..46c49f1c26134 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -701,8 +701,8 @@ public static T assertBooleanSubQuery(Query query, Class su assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery q = (BooleanQuery) query; assertThat(q.clauses(), hasSize(greaterThan(i))); - assertThat(q.clauses().get(i).getQuery(), instanceOf(subqueryType)); - return subqueryType.cast(q.clauses().get(i).getQuery()); + assertThat(q.clauses().get(i).query(), instanceOf(subqueryType)); + return subqueryType.cast(q.clauses().get(i).query()); } /** From 66294519613d81504c409e0be05d4441f85517e3 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 18:22:36 +0200 Subject: [PATCH 155/417] Fix compile error in CountedKeywordFieldMapper due to removal of NO_MORE_ORDS --- .../xpack/countedkeyword/CountedKeywordFieldMapper.java | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index dcf4ba7a3ce25..d24f7b22ce740 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -246,11 +246,8 @@ public int docValueCount() { @Override public long nextOrd() { - if (ordsForThisDoc.hasNext()) { - return ordsForThisDoc.next(); - } else { - return NO_MORE_ORDS; - } + assert ordsForThisDoc.hasNext(); + return ordsForThisDoc.next(); } @Override From a7a30d3145b5bd6809d1bd41429b09ff0bbb71a9 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 29 Aug 2024 18:39:29 +0200 Subject: [PATCH 156/417] Some trivial compile fixes --- .../query/DistanceFeatureQueryBuilderTests.java | 4 ++-- .../index/query/MoreLikeThisQueryBuilderTests.java | 2 +- .../lucene/queries/BlendedTermQueryTests.java | 12 ++++++------ .../support/TimeSeriesIndexSearcherTests.java | 5 ++++- 4 files changed, 13 insertions(+), 10 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java index b3bcce1dbc94d..236dd5c5d90a8 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.document.LatLonPoint; -import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.LongField; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -80,7 +80,7 @@ protected void doAssertLuceneQuery(DistanceFeatureQueryBuilder queryBuilder, Que } else { // NANOSECONDS pivotLong = pivotVal.getNanos(); } - expectedQuery = LongPoint.newDistanceFeatureQuery(fieldName, 1.0f, originLong, pivotLong); + expectedQuery = LongField.newDistanceFeatureQuery(fieldName, 1.0f, originLong, pivotLong); } assertEquals(expectedQuery, query); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 4337f78dc5ec7..76a4900aee25d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -254,7 +254,7 @@ protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; for (BooleanClause booleanClause : booleanQuery) { - if (booleanClause.getQuery() instanceof MoreLikeThisQuery moreLikeThisQuery) { + if (booleanClause.query() instanceof MoreLikeThisQuery moreLikeThisQuery) { assertThat(moreLikeThisQuery.getLikeFields().length, greaterThan(0)); } } diff --git a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java index 4e0916d308f40..fd383c7726f28 100644 --- a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java @@ -119,8 +119,8 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("unknown_field")) { assertThat(termStates.docFreq(), equalTo(0)); @@ -139,8 +139,8 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); assertThat(termStates.docFreq(), equalTo(0)); assertThat(termStates.totalTermFreq(), equalTo(0L)); @@ -154,8 +154,8 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("username")) { assertThat(termStates.docFreq(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java index 03913717992c9..4e461f3495612 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java @@ -200,7 +200,10 @@ public void testCollectFromMiddle() throws IOException { BucketCollector collector = getBucketCollector(2 * DOC_COUNTS); // skip the first doc of segment 1 and 2 - indexSearcher.search(SortedSetDocValuesField.newSlowSetQuery("_tsid", new BytesRef("tsid0"), new BytesRef("tsid1")), collector); + indexSearcher.search( + SortedSetDocValuesField.newSlowSetQuery("_tsid", List.of(new BytesRef("tsid0"), new BytesRef("tsid1"))), + collector + ); collector.postCollection(); reader.close(); From 7ac442c65cbd370d64011b84ca27eaa30ea98a84 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 20:59:24 +0200 Subject: [PATCH 157/417] [TEST] Fix compile errors in SourceOnlySnapshotShardTests --- .../sourceonly/SourceOnlySnapshotShardTests.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java index e39ddc170c0a9..e4a53d0a34d6a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.MatchAllDocsQuery; @@ -430,8 +431,9 @@ public void onFailure(Exception e) { assertEquals(original.exists(), restored.exists()); if (original.exists()) { - Document document = original.docIdAndVersion().reader.document(original.docIdAndVersion().docId); - Document restoredDocument = restored.docIdAndVersion().reader.document(restored.docIdAndVersion().docId); + StoredFields storedFields = original.docIdAndVersion().reader.storedFields(); + Document document = storedFields.document(original.docIdAndVersion().docId); + Document restoredDocument = storedFields.document(restored.docIdAndVersion().docId); for (IndexableField field : document) { assertEquals(document.get(field.name()), restoredDocument.get(field.name())); } @@ -470,7 +472,7 @@ public IndexShard reindex(DirectoryReader reader, MappingMetadata mapping) throw for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { rootFieldsVisitor.reset(); - leafReader.document(i, rootFieldsVisitor); + leafReader.storedFields().document(i, rootFieldsVisitor); rootFieldsVisitor.postProcess(targetShard.mapperService()::fieldType); String id = rootFieldsVisitor.id(); BytesReference source = rootFieldsVisitor.source(); From 92201c2921d431c9eb777079686c1464be2096c3 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 21:17:56 +0200 Subject: [PATCH 158/417] Fix IOContext creation in CachedBlobContainerIndexInput --- .../store/input/CachedBlobContainerIndexInput.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java index 81cf205c13dd2..4711043fff281 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.ReadAdvice; import org.elasticsearch.blobcache.BlobCacheUtils; import org.elasticsearch.blobcache.common.ByteRange; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo; @@ -35,7 +36,7 @@ public class CachedBlobContainerIndexInput extends MetadataCachingIndexInput { * a complete part of the {@link #fileInfo} at once in the cache and should not be * used for anything else than what the {@link #prefetchPart(int, Supplier)} method does. */ - public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(); + public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(IOContext.Context.DEFAULT, null, null, ReadAdvice.NORMAL); private static final Logger logger = LogManager.getLogger(CachedBlobContainerIndexInput.class); From 9b1d19290a20bd1aabb19decee862875fab59917 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 21:20:22 +0200 Subject: [PATCH 159/417] Fix compile errors in BlobCacheBufferedIndexInput --- .../blobcache/common/BlobCacheBufferedIndexInput.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java index 95b2324d03b52..16645e7523c36 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java @@ -390,12 +390,11 @@ public IndexInput clone() { /** Returns default buffer sizes for the given {@link IOContext} */ public static int bufferSize(IOContext context) { - switch (context.context) { + switch (context.context()) { case MERGE: return MERGE_BUFFER_SIZE; case DEFAULT: case FLUSH: - case READ: default: return BUFFER_SIZE; } From 64c9c9aeb58b46d14f11bbba035ef03f0fa72b6e Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 21:24:47 +0200 Subject: [PATCH 160/417] [TEST] Fix compile error in CandidateQueryTests --- .../org/elasticsearch/percolator/CandidateQueryTests.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index fabe8148f9fd0..12a9d8560f946 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.memory.MemoryIndex; @@ -1162,12 +1163,13 @@ private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex logger.error("topDocs.scoreDocs[{}].doc={}", i, topDocs.scoreDocs[i].doc); logger.error("topDocs.scoreDocs[{}].score={}", i, topDocs.scoreDocs[i].score); } + StoredFields storedFields = shardSearcher.storedFields(); for (int i = 0; i < controlTopDocs.scoreDocs.length; i++) { logger.error("controlTopDocs.scoreDocs[{}].doc={}", i, controlTopDocs.scoreDocs[i].doc); logger.error("controlTopDocs.scoreDocs[{}].score={}", i, controlTopDocs.scoreDocs[i].score); // Additional stored information that is useful when debugging: - String queryToString = shardSearcher.doc(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); + String queryToString = storedFields.document(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); logger.error("controlTopDocs.scoreDocs[{}].query_to_string={}", i, queryToString); TermsEnum tenum = MultiTerms.getTerms(shardSearcher.getIndexReader(), fieldType.queryTermsField.name()).iterator(); From 35145c5267de45495c028e9d937da6b66258213c Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 21:32:37 +0200 Subject: [PATCH 161/417] catch IOException when calling Expression#evaluate See https://github.com/apache/lucene/pull/12878 --- .../expression/ExpressionDoubleValuesScript.java | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java index a7935800ec4ba..0dc69484e83c1 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java @@ -16,6 +16,8 @@ import org.apache.lucene.search.SortField; import org.elasticsearch.script.DoubleValuesScript; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.function.Function; /** @@ -36,12 +38,20 @@ public DoubleValuesScript newInstance() { return new DoubleValuesScript() { @Override public double execute() { - return exprScript.evaluate(new DoubleValues[0]); + try { + return exprScript.evaluate(new DoubleValues[0]); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override public double evaluate(DoubleValues[] functionValues) { - return exprScript.evaluate(functionValues); + try { + return exprScript.evaluate(functionValues); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override From 3be5a1e65b79189b72ef909b12a78233ba710357 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 21:35:00 +0200 Subject: [PATCH 162/417] Catch one more IOException in ExpressionScriptEngine --- .../script/expression/ExpressionScriptEngine.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index 7d80a0d401013..fe781b4f013ca 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -35,6 +35,8 @@ import org.elasticsearch.script.TermsSetQueryScript; import org.elasticsearch.search.lookup.SearchLookup; +import java.io.IOException; +import java.io.UncheckedIOException; import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedAction; @@ -232,7 +234,11 @@ public Double execute() { placeholder.setValue(((Number) value).doubleValue()); } }); - return expr.evaluate(functionValuesArray); + try { + return expr.evaluate(functionValuesArray); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } }; }; From 98ea807e79a7e130830aa61d2bd537b1a626f1c9 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 21:43:50 +0200 Subject: [PATCH 163/417] Adjust JavascriptCompiler#compile call in ExpressionScriptEngine See https://github.com/apache/lucene/pull/12873 --- .../expression/ExpressionScriptEngine.java | 42 ++++--------------- 1 file changed, 7 insertions(+), 35 deletions(-) diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index fe781b4f013ca..25def98c68813 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -13,7 +13,6 @@ import org.apache.lucene.expressions.js.JavascriptCompiler; import org.apache.lucene.expressions.js.VariableContext; import org.apache.lucene.search.DoubleValuesSource; -import org.elasticsearch.SpecialPermission; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; @@ -23,7 +22,6 @@ import org.elasticsearch.script.AggregationScript; import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.BucketAggregationSelectorScript; -import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.DoubleValuesScript; import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.FilterScript; @@ -37,9 +35,6 @@ import java.io.IOException; import java.io.UncheckedIOException; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; @@ -157,36 +152,13 @@ public String getType() { @Override public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { - // classloader created here - final SecurityManager sm = System.getSecurityManager(); - SpecialPermission.check(); - Expression expr = AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Expression run() { - try { - // snapshot our context here, we check on behalf of the expression - AccessControlContext engineContext = AccessController.getContext(); - ClassLoader loader = getClass().getClassLoader(); - if (sm != null) { - loader = new ClassLoader(loader) { - @Override - protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { - try { - engineContext.checkPermission(new ClassPermission(name)); - } catch (SecurityException e) { - throw new ClassNotFoundException(name, e); - } - return super.loadClass(name, resolve); - } - }; - } - // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here - return JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS, loader); - } catch (ParseException e) { - throw convertToScriptException("compile error", scriptSource, scriptSource, e); - } - } - }); + Expression expr; + try { + // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here + expr = JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS); + } catch (ParseException e) { + throw convertToScriptException("compile error", scriptSource, scriptSource, e); + } if (contexts.containsKey(context) == false) { throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]"); } From b16d1ba1d692b87f70e341a7d57e60b71891e1bb Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 21:54:15 +0200 Subject: [PATCH 164/417] [TEST] Fix compile errors in SimpleLuceneTests --- .../org/elasticsearch/deps/lucene/SimpleLuceneTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java index 24e635b2f8b76..edad379f10a5d 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java @@ -83,12 +83,12 @@ public void testSimpleNumericOps() throws Exception { try (IndexReader reader = DirectoryReader.open(indexWriter)) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - Document doc = searcher.doc(topDocs.scoreDocs[0].doc); + Document doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); IndexableField f = doc.getField("test"); assertThat(f.numericValue(), equalTo(2)); topDocs = searcher.search(IntPoint.newExactQuery("test", 2), 1); - doc = searcher.doc(topDocs.scoreDocs[0].doc); + doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); } @@ -114,7 +114,7 @@ public void testOrdering() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); final ArrayList fieldsOrder = new ArrayList<>(); - searcher.doc(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { + searcher.storedFields().document(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { @Override public Status needsField(FieldInfo fieldInfo) throws IOException { fieldsOrder.add(fieldInfo.name); From 29c342b2291fc75661e5e6aae05a02cc5b20cbee Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 21:57:01 +0200 Subject: [PATCH 165/417] [TEST] Fix compile error in BinaryRangeAggregatorTests --- .../aggregations/bucket/range/BinaryRangeAggregatorTests.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java index 99502dd3372cf..e26c96177a89c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java @@ -41,9 +41,7 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (i == ords.length) { - return NO_MORE_ORDS; - } + assert i < ords.length; return ords[i++]; } From 59f1ec11f64389e450f2c5824d5e3819e372f839 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 21:58:21 +0200 Subject: [PATCH 166/417] [TEST] Fix compile errors in AbstractStringFieldDataTestCase --- .../index/fielddata/AbstractStringFieldDataTestCase.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index 0bcad16497cfb..007882290bd39 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -264,7 +264,7 @@ public void testActualMissingValue(boolean reverse) throws IOException { assertEquals(numDocs, topDocs.totalHits.value); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); final BytesRef value = new BytesRef(docValue == null ? missingValue : docValue); if (reverse) { assertTrue(previousValue.compareTo(value) >= 0); @@ -323,7 +323,7 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException { assertThat(topDocs.totalHits.value, lessThanOrEqualTo((long) numDocs)); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); if (first && docValue == null) { assertNull(previousValue); } else if (first == false && docValue != null) { @@ -413,7 +413,7 @@ public void testNestedSorting(MultiValueMode sortMode) throws IOException { assertTrue("expected " + docID + " to be a parent", parents.get(docID)); BytesRef cmpValue = null; for (int child = parents.prevSetBit(docID - 1) + 1; child < docID; ++child) { - String[] sVals = searcher.doc(child).getValues("text"); + String[] sVals = searcher.storedFields().document(child).getValues("text"); final BytesRef[] vals; if (sVals.length == 0) { vals = new BytesRef[0]; From f315dabb253b3c9114facb10e1510769020d3c38 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 22:00:59 +0200 Subject: [PATCH 167/417] [TEST] Fix compile error in AnnotatedTextFieldMapperTests --- .../mapper/annotatedtext/AnnotatedTextFieldMapperTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index a5319387a2b68..b40b6e63c5452 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -241,7 +241,7 @@ public void testIndexedTermVectors() throws IOException { withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), reader -> { LeafReader leaf = reader.leaves().get(0).reader(); - Terms terms = leaf.getTermVector(0, "field"); + Terms terms = leaf.termVectors().get(0, "field"); TermsEnum iterator = terms.iterator(); BytesRef term; Set foundTerms = new HashSet<>(); From 2adb8c32fed4316608369bb2da9a7f272149f0ce Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 22:05:40 +0200 Subject: [PATCH 168/417] [TEST] Fix compile errors in InternalEngineTests --- .../org/elasticsearch/index/engine/InternalEngineTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 938731f932632..779cf8d90c883 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -265,7 +265,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("test", luceneDoc.get("value")); } @@ -278,7 +278,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("updated", luceneDoc.get("value")); } @@ -5675,7 +5675,7 @@ public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException, try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs search = searcher.search(new MatchAllDocsQuery(), searcher.getIndexReader().numDocs()); for (int i = 0; i < search.scoreDocs.length; i++) { - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[i].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[i].doc); assertEquals("updated", luceneDoc.get("value")); } int totalNumDocs = numDocs - numDeletes.get(); From 46b67c62b0e392d031164924770dc0772f5b54fc Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 22:07:07 +0200 Subject: [PATCH 169/417] [TEST] Fix compile errors in NestedSortingTests --- .../search/nested/NestedSortingTests.java | 62 ++++++++++--------- 1 file changed, 32 insertions(+), 30 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index 97e58a4a16f23..a68561f1355ac 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -620,15 +621,16 @@ public void testMultiLevelNestedSorting() throws IOException { QueryBuilder queryBuilder = new MatchAllQueryBuilder(); TopFieldDocs topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(5L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + StoredFields storedFields = searcher.storedFields(); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(234L)); - assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); + assertThat(storedFields.document(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(976L)); - assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(storedFields.document(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MAX_VALUE)); // Specific genre @@ -636,25 +638,25 @@ public void testMultiLevelNestedSorting() throws IOException { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(234L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } @@ -664,15 +666,15 @@ public void testMultiLevelNestedSorting() throws IOException { queryBuilder = new MatchAllQueryBuilder(); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(5L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(849L)); - assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(180L)); - assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(storedFields.document(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MIN_VALUE)); } @@ -681,25 +683,25 @@ public void testMultiLevelNestedSorting() throws IOException { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(849L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(180L)); } @@ -717,9 +719,9 @@ public void testMultiLevelNestedSorting() throws IOException { searcher ); assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); sortBuilder.order(SortOrder.DESC); @@ -730,9 +732,9 @@ public void testMultiLevelNestedSorting() throws IOException { searcher ); assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(76L)); } @@ -755,9 +757,9 @@ public void testMultiLevelNestedSorting() throws IOException { searcher ); assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MAX_VALUE)); sortBuilder.order(SortOrder.DESC); @@ -768,9 +770,9 @@ public void testMultiLevelNestedSorting() throws IOException { searcher ); assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MIN_VALUE)); } @@ -785,25 +787,25 @@ public void testMultiLevelNestedSorting() throws IOException { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } From e6c5c5d42be6c590469011d34511fb28bd81721e Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 22:08:18 +0200 Subject: [PATCH 170/417] [TEST] Fix compile error in AbstractTermVectorsTestCase --- .../action/termvectors/AbstractTermVectorsTestCase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index afbc98cb0754f..22bc3cd058fd5 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -399,6 +399,6 @@ protected Fields getTermVectorsFromLucene(DirectoryReader directoryReader, TestD ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(1, scoreDocs.length); - return directoryReader.getTermVectors(scoreDocs[0].doc); + return directoryReader.termVectors().get(scoreDocs[0].doc); } } From 495ebaaead535e7e25bcc96a406eb8a98ccb7cf7 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 22:09:13 +0200 Subject: [PATCH 171/417] [TEST] Fix compile error in MoreLikeThisQueryBuilderTests --- .../index/query/MoreLikeThisQueryBuilderTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 76a4900aee25d..34342fcedcfd3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -245,7 +245,7 @@ private static Fields generateFields(String[] fieldNames, String text) throws IO for (String fieldName : fieldNames) { index.addField(fieldName, text, new WhitespaceAnalyzer()); } - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } @Override From 84e84fb50fe658055e3fab7a418fc94f1b9fb9f2 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 22:09:56 +0200 Subject: [PATCH 172/417] [TEST] fix compile error in StoredNumericValuesTests --- .../elasticsearch/index/mapper/StoredNumericValuesTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index e8f71775a961f..ec102b07208ba 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -79,7 +79,7 @@ public void testBytesAndNumericRepresentation() throws Exception { "field10" ); CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); - searcher.doc(0, fieldsVisitor); + searcher.storedFields().document(0, fieldsVisitor); fieldsVisitor.postProcess(mapperService::fieldType); assertThat(fieldsVisitor.fields().size(), equalTo(10)); From 68409a168019aab86150fa620c3dcb111a1a9468 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 22:11:34 +0200 Subject: [PATCH 173/417] [TEST] Fix compile errors in ProfileScorerTests --- .../search/profile/query/ProfileScorerTests.java | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java index be0ce00145226..5c3a9aee5a6b9 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java @@ -9,14 +9,12 @@ package org.elasticsearch.search.profile.query; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.MultiReader; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Matches; import org.apache.lucene.search.MatchesIterator; import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -174,22 +172,16 @@ public Iterator iterator() { } public void testPropagateMinCompetitiveScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); assertEquals(0.42f, fakeScorer.minCompetitiveScore, 0f); } public void testPropagateMaxScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); fakeScorer.maxScore = 42f; From 11c21a1088f194107021d5bfe84d0c9a1a0c02b0 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 20 Aug 2024 23:57:38 +0200 Subject: [PATCH 174/417] Remove search worker pool (#111099) No more need for this pool, now that Lucene can safely execute on the current pool. --- docs/reference/modules/threadpool.asciidoc | 8 +----- .../elasticsearch/node/NodeConstruction.java | 13 +++++++++ .../search/DefaultSearchContext.java | 3 ++- .../elasticsearch/search/SearchService.java | 20 +++++++++----- .../search/internal/ContextIndexSearcher.java | 7 +++++ .../DefaultBuiltInExecutorBuilders.java | 10 ------- .../elasticsearch/threadpool/ThreadPool.java | 2 -- .../search/SearchServiceTests.java | 8 +++--- .../search/dfs/DfsPhaseTests.java | 2 +- .../snapshots/SnapshotResiliencyTests.java | 2 -- .../threadpool/ThreadPoolTests.java | 21 --------------- .../aggregations/AggregatorTestCase.java | 2 +- .../ConcurrentSearchSingleNodeTests.java | 27 ++++++------------- .../ConcurrentSearchTestPluginTests.java | 27 ++++++------------- .../input/MetadataCachingIndexInput.java | 1 - 15 files changed, 58 insertions(+), 95 deletions(-) diff --git a/docs/reference/modules/threadpool.asciidoc b/docs/reference/modules/threadpool.asciidoc index ed4becbfbb6d0..9e6e5fb80f999 100644 --- a/docs/reference/modules/threadpool.asciidoc +++ b/docs/reference/modules/threadpool.asciidoc @@ -13,16 +13,10 @@ There are several thread pools, but the important ones include: [[search-threadpool]] `search`:: - For coordination of count/search operations at the shard level whose computation - is offloaded to the search_worker thread pool. Used also by fetch and other search + For count/search operations at the shard level. Used also by fetch and other search related operations Thread pool type is `fixed` with a size of `int((`<>`pass:[ * ]3) / 2) + 1`, and queue_size of `1000`. -`search_worker`:: - For the heavy workload of count/search operations that may be executed concurrently - across segments within the same shard when possible. Thread pool type is `fixed` - with a size of `int((`<>`pass:[ * ]3) / 2) + 1`, and unbounded queue_size . - [[search-throttled]]`search_throttled`:: For count/search/suggest/get operations on `search_throttled indices`. Thread pool type is `fixed` with a size of `1`, and queue_size of `100`. diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 118d68d73fa27..f1e887625e440 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -83,6 +83,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -506,6 +507,7 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, for (final ExecutorBuilder builder : threadPool.builders()) { additionalSettings.addAll(builder.getRegisteredSettings()); } + addBwcSearchWorkerSettings(additionalSettings); SettingsExtension.load().forEach(e -> additionalSettings.addAll(e.getSettings())); // this is as early as we can validate settings at this point. we already pass them to ThreadPool @@ -536,6 +538,17 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, return settingsModule; } + @UpdateForV9 + private static void addBwcSearchWorkerSettings(List> additionalSettings) { + // TODO remove the below settings, they are unused and only here to enable BwC for deployments that still use them + additionalSettings.add( + Setting.intSetting("thread_pool.search_worker.queue_size", 0, Setting.Property.NodeScope, Setting.Property.DeprecatedWarning) + ); + additionalSettings.add( + Setting.intSetting("thread_pool.search_worker.size", 0, Setting.Property.NodeScope, Setting.Property.DeprecatedWarning) + ); + } + private SearchModule createSearchModule(Settings settings, ThreadPool threadPool, TelemetryProvider telemetryProvider) { IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList(), telemetryProvider); diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index dc92cfd11fce3..203834648eb67 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -186,7 +186,7 @@ final class DefaultSearchContext extends SearchContext { enableQueryPhaseParallelCollection, field -> getFieldCardinality(field, readerContext.indexService(), engineSearcher.getDirectoryReader()) ); - if (executor == null) { + if (maximumNumberOfSlices <= 1) { this.searcher = new ContextIndexSearcher( engineSearcher.getIndexReader(), engineSearcher.getSimilarity(), @@ -290,6 +290,7 @@ static int determineMaximumNumberOfSlices( ToLongFunction fieldCardinality ) { return executor instanceof ThreadPoolExecutor tpe + && tpe.getQueue().isEmpty() && isParallelCollectionSupportedForResults(resultsType, request.source(), fieldCardinality, enableQueryPhaseParallelCollection) ? tpe.getMaximumPoolSize() : 1; diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 67d5d6337d77c..26c3bf6ceeffe 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -143,7 +143,6 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -229,7 +228,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv "search.worker_threads_enabled", true, Property.NodeScope, - Property.Dynamic + Property.Dynamic, + Property.DeprecatedWarning ); public static final Setting QUERY_PHASE_PARALLEL_COLLECTION_ENABLED = Setting.boolSetting( @@ -282,7 +282,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private final FetchPhase fetchPhase; private final RankFeatureShardPhase rankFeatureShardPhase; - private volatile boolean enableSearchWorkerThreads; + private volatile Executor searchExecutor; private volatile boolean enableQueryPhaseParallelCollection; private volatile long defaultKeepAlive; @@ -376,7 +376,10 @@ public SearchService( clusterService.getClusterSettings() .addSettingsUpdateConsumer(ENABLE_REWRITE_AGGS_TO_FILTER_BY_FILTER, this::setEnableRewriteAggsToFilterByFilter); - enableSearchWorkerThreads = SEARCH_WORKER_THREADS_ENABLED.get(settings); + if (SEARCH_WORKER_THREADS_ENABLED.get(settings)) { + searchExecutor = threadPool.executor(Names.SEARCH); + } + clusterService.getClusterSettings().addSettingsUpdateConsumer(SEARCH_WORKER_THREADS_ENABLED, this::setEnableSearchWorkerThreads); enableQueryPhaseParallelCollection = QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.get(settings); @@ -385,7 +388,11 @@ public SearchService( } private void setEnableSearchWorkerThreads(boolean enableSearchWorkerThreads) { - this.enableSearchWorkerThreads = enableSearchWorkerThreads; + if (enableSearchWorkerThreads) { + searchExecutor = threadPool.executor(Names.SEARCH); + } else { + searchExecutor = null; + } } private void setEnableQueryPhaseParallelCollection(boolean enableQueryPhaseParallelCollection) { @@ -1126,7 +1133,6 @@ private DefaultSearchContext createSearchContext( reader.indexShard().shardId(), request.getClusterAlias() ); - ExecutorService executor = this.enableSearchWorkerThreads ? threadPool.executor(Names.SEARCH_WORKER) : null; searchContext = new DefaultSearchContext( reader, request, @@ -1135,7 +1141,7 @@ private DefaultSearchContext createSearchContext( timeout, fetchPhase, lowLevelCancellation, - executor, + searchExecutor, resultsType, enableQueryPhaseParallelCollection, minimumDocsPerSlice diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 293bb1eff6f3f..4937d253209b9 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -356,6 +356,13 @@ private T search(Weight weight, CollectorManager } } + /** + * Similar to the lucene implementation, with the following changes made: + * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search threads + * so it can be parallelized. Also, it needs to happen in the same thread where doc_values are read, as it consumes them and Lucene + * does not allow consuming them from a different thread. + * 2) handles the ES TimeExceededException + */ @Override public void search(List leaves, Weight weight, Collector collector) throws IOException { collector.setWeight(weight); diff --git a/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java b/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java index a4046f2f1594c..cfae0c9bce38a 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java +++ b/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java @@ -70,16 +70,6 @@ public Map getBuilders(Settings settings, int allocated new EsExecutors.TaskTrackingConfig(true, searchAutoscalingEWMA) ) ); - result.put( - ThreadPool.Names.SEARCH_WORKER, - new FixedExecutorBuilder( - settings, - ThreadPool.Names.SEARCH_WORKER, - searchOrGetThreadPoolSize, - -1, - EsExecutors.TaskTrackingConfig.DEFAULT - ) - ); result.put( ThreadPool.Names.SEARCH_COORDINATION, new FixedExecutorBuilder( diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 10b92f8c6dace..3a9f091a934b3 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -87,7 +87,6 @@ public static class Names { public static final String ANALYZE = "analyze"; public static final String WRITE = "write"; public static final String SEARCH = "search"; - public static final String SEARCH_WORKER = "search_worker"; public static final String SEARCH_COORDINATION = "search_coordination"; public static final String AUTO_COMPLETE = "auto_complete"; public static final String SEARCH_THROTTLED = "search_throttled"; @@ -157,7 +156,6 @@ public static ThreadPoolType fromType(String type) { entry(Names.ANALYZE, ThreadPoolType.FIXED), entry(Names.WRITE, ThreadPoolType.FIXED), entry(Names.SEARCH, ThreadPoolType.FIXED), - entry(Names.SEARCH_WORKER, ThreadPoolType.FIXED), entry(Names.SEARCH_COORDINATION, ThreadPoolType.FIXED), entry(Names.AUTO_COMPLETE, ThreadPoolType.FIXED), entry(Names.MANAGEMENT, ThreadPoolType.SCALING), diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 9236339713e5a..49701ca774c54 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -2747,7 +2747,7 @@ public void testEnableSearchWorkerThreads() throws IOException { */ public void testSlicingBehaviourForParallelCollection() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY); - ThreadPoolExecutor executor = (ThreadPoolExecutor) indexService.getThreadPool().executor(ThreadPool.Names.SEARCH_WORKER); + ThreadPoolExecutor executor = (ThreadPoolExecutor) indexService.getThreadPool().executor(ThreadPool.Names.SEARCH); final int configuredMaxPoolSize = 10; executor.setMaximumPoolSize(configuredMaxPoolSize); // We set this explicitly to be independent of CPU cores. int numDocs = randomIntBetween(50, 100); @@ -2837,7 +2837,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.FETCH, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertNull(searcher.getExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( @@ -2853,7 +2853,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.NONE, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertNull(searcher.getExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( @@ -2876,7 +2876,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertNull(searcher.getExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( diff --git a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java index a0f37bcbb7fb1..a2b5671944405 100644 --- a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java @@ -39,7 +39,7 @@ public class DfsPhaseTests extends ESTestCase { @Before public final void init() { threadPool = new TestThreadPool(DfsPhaseTests.class.getName()); - threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH_WORKER); + threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH); } @After diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index f5e69a65a6d06..8629a96acc5a4 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1846,8 +1846,6 @@ private Environment createEnvironment(String nodeName) { Settings.builder() .put(NODE_NAME_SETTING.getKey(), nodeName) .put(PATH_HOME_SETTING.getKey(), tempDir.resolve(nodeName).toAbsolutePath()) - // test uses the same executor service for all thread pools, search worker would need to be a different one - .put(SearchService.SEARCH_WORKER_THREADS_ENABLED.getKey(), false) .put(Environment.PATH_REPO_SETTING.getKey(), tempDir.resolve("repo").toAbsolutePath()) .putList( ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey(), diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java index b19f058d2c6c6..395ae07765016 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java @@ -24,8 +24,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; -import java.util.concurrent.LinkedTransferQueue; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.util.concurrent.EsExecutors.TaskTrackingConfig.DEFAULT; @@ -370,25 +368,6 @@ public void testWriteThreadPoolUsesTaskExecutionTimeTrackingEsThreadPoolExecutor } } - public void testSearchWorkedThreadPool() { - final int allocatedProcessors = randomIntBetween(1, EsExecutors.allocatedProcessors(Settings.EMPTY)); - final ThreadPool threadPool = new TestThreadPool( - "test", - Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), allocatedProcessors).build() - ); - try { - ExecutorService executor = threadPool.executor(ThreadPool.Names.SEARCH_WORKER); - assertThat(executor, instanceOf(ThreadPoolExecutor.class)); - ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) executor; - int expectedPoolSize = allocatedProcessors * 3 / 2 + 1; - assertEquals(expectedPoolSize, threadPoolExecutor.getCorePoolSize()); - assertEquals(expectedPoolSize, threadPoolExecutor.getMaximumPoolSize()); - assertThat(threadPoolExecutor.getQueue(), instanceOf(LinkedTransferQueue.class)); - } finally { - assertTrue(terminate(threadPool)); - } - } - public void testScheduledOneShotRejection() { final var name = "fixed-bounded"; final var threadPool = new TestThreadPool( diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index f3fc4479a21a4..6ca513516d90e 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -211,7 +211,7 @@ public abstract class AggregatorTestCase extends ESTestCase { @Before public final void initPlugins() { threadPool = new TestThreadPool(AggregatorTestCase.class.getName()); - threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH_WORKER); + threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH); List plugins = new ArrayList<>(getSearchPlugins()); plugins.add(new AggCardinalityUpperBoundPlugin()); SearchModule searchModule = new SearchModule(Settings.EMPTY, plugins); diff --git a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java index 5bb393ff70e83..00f08b1fa8eca 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java @@ -8,33 +8,22 @@ package org.elasticsearch.search.internal; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.io.IOException; - public class ConcurrentSearchSingleNodeTests extends ESSingleNodeTestCase { private final boolean concurrentSearch = randomBoolean(); - public void testConcurrentSearch() throws IOException { + public void testConcurrentSearch() { client().admin().indices().prepareCreate("index").get(); - IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService indexService = indicesService.iterator().next(); - IndexShard shard = indexService.getShard(0); - SearchService searchService = getInstanceFromNode(SearchService.class); - ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shard.shardId(), 0L, AliasFilter.EMPTY); - try (SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, TimeValue.MINUS_ONE)) { - ContextIndexSearcher searcher = searchContext.searcher(); - if (concurrentSearch) { - assertEquals(1, searcher.getMinimumDocsPerSlice()); - } else { - assertEquals(50_000, searcher.getMinimumDocsPerSlice()); - } + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + int minDocsPerSlice = SearchService.MINIMUM_DOCS_PER_SLICE.get(clusterService.getSettings()); + if (concurrentSearch) { + assertEquals(1, minDocsPerSlice); + } else { + assertEquals(50_000, minDocsPerSlice); } } diff --git a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java index 29da297ce292e..75d23b3baeabf 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java @@ -8,34 +8,23 @@ package org.elasticsearch.search.internal; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.ESIntegTestCase; -import java.io.IOException; - @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 1) public class ConcurrentSearchTestPluginTests extends ESIntegTestCase { private final boolean concurrentSearch = randomBoolean(); - public void testConcurrentSearch() throws IOException { + public void testConcurrentSearch() { client().admin().indices().prepareCreate("index").get(); - IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class); - IndexService indexService = indicesService.iterator().next(); - IndexShard shard = indexService.getShard(0); - SearchService searchService = internalCluster().getDataNodeInstance(SearchService.class); - ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shard.shardId(), 0L, AliasFilter.EMPTY); - try (SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, TimeValue.MINUS_ONE)) { - ContextIndexSearcher searcher = searchContext.searcher(); - if (concurrentSearch) { - assertEquals(1, searcher.getMinimumDocsPerSlice()); - } else { - assertEquals(50_000, searcher.getMinimumDocsPerSlice()); - } + ClusterService clusterService = internalCluster().getDataNodeInstance(ClusterService.class); + int minDocsPerSlice = SearchService.MINIMUM_DOCS_PER_SLICE.get(clusterService.getSettings()); + if (concurrentSearch) { + assertEquals(1, minDocsPerSlice); + } else { + assertEquals(50_000, minDocsPerSlice); } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java index 8c978c3445526..9875ab03088aa 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java @@ -221,7 +221,6 @@ public static boolean assertCurrentThreadMayAccessBlobStore() { ThreadPool.Names.SNAPSHOT, ThreadPool.Names.GENERIC, ThreadPool.Names.SEARCH, - ThreadPool.Names.SEARCH_WORKER, ThreadPool.Names.SEARCH_THROTTLED, // Cache asynchronous fetching runs on a dedicated thread pool. From 40ddde20248f85118b3476e4d224d8819c5c98f4 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 19 Aug 2024 16:28:53 +0200 Subject: [PATCH 175/417] Remove explicit loop over leaves in favour of calling super.search (#111971) The searchLeaf protected method was recently introduced in IndexSearcher upstream, we do still need to override a bunch of search methods, but we can slightly reduce the surface of the code we override by removing the loop over the leaves, and rely on super instead. --- .../search/internal/ContextIndexSearcher.java | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 4937d253209b9..a352b4f9189b3 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -319,12 +319,9 @@ public T search(Query query, CollectorManager col } /** - * Similar to the lucene implementation, with the following changes made: - * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search worker threads - * so it can be parallelized. Also, it needs to happen in the same thread where doc_values are read, as it consumes them and Lucene - * does not allow consuming them from a different thread. - * 2) handles the ES TimeExceededException - * */ + * Same implementation as the default one in Lucene, with an additional call to postCollection in cased there are no segments. + * The rest is a plain copy from Lucene. + */ private T search(Weight weight, CollectorManager collectorManager, C firstCollector) throws IOException { LeafSlice[] leafSlices = getSlices(); if (leafSlices.length == 0) { @@ -365,12 +362,9 @@ private T search(Weight weight, CollectorManager */ @Override public void search(List leaves, Weight weight, Collector collector) throws IOException { - collector.setWeight(weight); boolean success = false; try { - for (LeafReaderContext ctx : leaves) { // search each subreader - searchLeaf(ctx, weight, collector); - } + super.search(leaves, weight, collector); success = true; } catch (@SuppressWarnings("unused") TimeExceededException e) { timeExceeded = true; From 233c17aa99b0ae5c1c34d3a53dc98c4d502c4f28 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Mon, 26 Aug 2024 15:08:49 -0400 Subject: [PATCH 176/417] Add range and regexp Intervals (#111465) Lucene/pull/13562 introduced IntervalsSource for range and regexp queries. This exposes these features in ES. This is done to achieve parity with Span queries that support regexp and range. Relates to #110491 --- docs/changelog/111465.yaml | 5 + .../query-dsl/intervals-query.asciidoc | 66 ++++ .../extras/MatchOnlyTextFieldMapper.java | 24 ++ .../test/search/230_interval_query.yml | 42 +++ .../index/mapper/MappedFieldType.java | 24 ++ .../index/mapper/PlaceHolderFieldMapper.java | 16 + .../index/mapper/TextFieldMapper.java | 22 ++ .../index/query/IntervalsSourceProvider.java | 305 +++++++++++++++++- .../elasticsearch/search/SearchModule.java | 10 + .../query/IntervalQueryBuilderTests.java | 132 +++++++- .../RangeIntervalsSourceProviderTests.java | 70 ++++ .../RegexpIntervalsSourceProviderTests.java | 61 ++++ 12 files changed, 774 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/111465.yaml create mode 100644 server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java diff --git a/docs/changelog/111465.yaml b/docs/changelog/111465.yaml new file mode 100644 index 0000000000000..2a8df287427a9 --- /dev/null +++ b/docs/changelog/111465.yaml @@ -0,0 +1,5 @@ +pr: 111465 +summary: Add range and regexp Intervals +area: Search +type: enhancement +issues: [] diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc index 1e3380389d861..84869838fe1e6 100644 --- a/docs/reference/query-dsl/intervals-query.asciidoc +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -73,7 +73,9 @@ Valid rules include: * <> * <> * <> +* <> * <> +* <> * <> * <> -- @@ -178,6 +180,36 @@ The `pattern` is normalized using the search analyzer from this field, unless `analyzer` is specified separately. -- +[[intervals-regexp]] +==== `regexp` rule parameters + +The `regexp` rule matches terms using a regular expression pattern. +This pattern can expand to match at most 128 terms. +If the pattern matches more than 128 terms,{es} returns an error. + +`pattern`:: +(Required, string) Regexp pattern used to find matching terms. +For a list of operators supported by the +`regexp` pattern, see <>. + +WARNING: Avoid using wildcard patterns, such as `.*` or `.*?+``. This can +increase the iterations needed to find matching terms and slow search +performance. +-- +`analyzer`:: +(Optional, string) <> used to normalize the `pattern`. +Defaults to the top-level ``'s analyzer. + +`use_field`:: ++ +-- +(Optional, string) If specified, match intervals from this field rather than the +top-level ``. + +The `pattern` is normalized using the search analyzer from this field, unless +`analyzer` is specified separately. +-- + [[intervals-fuzzy]] ==== `fuzzy` rule parameters @@ -214,6 +246,40 @@ The `term` is normalized using the search analyzer from this field, unless `analyzer` is specified separately. -- +[[intervals-range]] +==== `range` rule parameters + +The `range` rule matches terms contained within a provided range. +This range can expand to match at most 128 terms. +If the range matches more than 128 terms,{es} returns an error. + +`gt`:: +(Optional, string) Greater than: match terms greater than the provided term. + +`gte`:: +(Optional, string) Greater than or equal to: match terms greater than or +equal to the provided term. + +`lt`:: +(Optional, string) Less than: match terms less than the provided term. + +`lte`:: +(Optional, string) Less than or equal to: match terms less than or +equal to the provided term. + +NOTE: It is required to provide one of `gt` or `gte` params. +It is required to provide one of `lt` or `lte` params. + + +`analyzer`:: +(Optional, string) <> used to normalize the `pattern`. +Defaults to the top-level ``'s analyzer. + +`use_field`:: +(Optional, string) If specified, match intervals from this field rather than the +top-level ``. + + [[intervals-all_of]] ==== `all_of` rule parameters diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index b3cd3586fca54..93101298f1622 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -304,6 +304,30 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex ); } + @Override + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + return toIntervalsSource( + Intervals.regexp(pattern), + new MatchAllDocsQuery(), // regexp queries can be expensive, what should the approximation be? + context + ); + } + + @Override + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + return toIntervalsSource( + Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper), + new MatchAllDocsQuery(), // range queries can be expensive, what should the approximation be? + context + ); + } + @Override public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements, SearchExecutionContext queryShardContext) throws IOException { diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml index 99bd001bd95e2..e828c9ce8d8a8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml @@ -476,3 +476,45 @@ setup: - match: { hits.hits.0._id: "6" } - match: { hits.hits.1._id: "5" } +--- +"Test regexp": + - requires: + cluster_features: "gte_v8.16.0" + reason: "Implemented in 8.16" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cold + - regexp: + pattern: ou.*ide + - match: { hits.total.value: 3 } + + +--- +"Test range": + - requires: + cluster_features: "gte_v8.16.0" + reason: "Implemented in 8.16" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cold + - range: + gte: out + lte: ouu + - match: { hits.total.value: 3 } + diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 13d258bf24012..08a5260d79d6a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -442,6 +442,30 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex ); } + /** + * Create a regexp {@link IntervalsSource} for the given pattern. + */ + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + throw new IllegalArgumentException( + "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" + ); + } + + /** + * Create a range {@link IntervalsSource} for the given ranges + */ + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + throw new IllegalArgumentException( + "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" + ); + } + /** * An enum used to describe the relation between the range of terms in a * shard when compared with a query range diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java index 4a12ed77b4f26..85a8f45b9efa4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java @@ -247,6 +247,22 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex throw new QueryShardException(context, fail("wildcard intervals query")); } + @Override + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + throw new QueryShardException(context, fail("regexp intervals query")); + } + + @Override + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + throw new QueryShardException(context, fail("range intervals query")); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { throw new IllegalArgumentException(fail("aggregation or sorts")); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 8da769c3543d0..89bae7f6acc98 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -848,6 +848,28 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex return Intervals.wildcard(pattern); } + @Override + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + if (getTextSearchInfo().hasPositions() == false) { + throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + } + return Intervals.regexp(pattern); + } + + @Override + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + if (getTextSearchInfo().hasPositions() == false) { + throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + } + return Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper); + } + private void checkForPositions() { if (getTextSearchInfo().hasPositions() == false) { throw new IllegalStateException("field:[" + name() + "] was indexed without position data; cannot run PhraseQuery"); diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java index 9a326cf927cf6..359b546ff6f3f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java @@ -76,10 +76,16 @@ public static IntervalsSourceProvider fromXContent(XContentParser parser) throws return Wildcard.fromXContent(parser); case "fuzzy": return Fuzzy.fromXContent(parser); + case "regexp": + return Regexp.fromXContent(parser); + case "range": + return Range.fromXContent(parser); } throw new ParsingException( parser.getTokenLocation(), - "Unknown interval type [" + parser.currentName() + "], expecting one of [match, any_of, all_of, prefix, wildcard]" + "Unknown interval type [" + + parser.currentName() + + "], expecting one of [match, any_of, all_of, prefix, wildcard, regexp, range]" ); } @@ -746,6 +752,124 @@ String getUseField() { } } + public static class Regexp extends IntervalsSourceProvider { + + public static final String NAME = "regexp"; + + private final String pattern; + private final String analyzer; + private final String useField; + + public Regexp(String pattern, String analyzer, String useField) { + this.pattern = pattern; + this.analyzer = analyzer; + this.useField = useField; + } + + public Regexp(StreamInput in) throws IOException { + this.pattern = in.readString(); + this.analyzer = in.readOptionalString(); + this.useField = in.readOptionalString(); + } + + @Override + public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) { + NamedAnalyzer analyzer = null; + if (this.analyzer != null) { + analyzer = context.getIndexAnalyzers().get(this.analyzer); + } + if (useField != null) { + fieldType = context.getFieldType(useField); + assert fieldType != null; + } + if (analyzer == null) { + analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + } + BytesRef normalizedPattern = analyzer.normalize(fieldType.name(), pattern); + IntervalsSource source = fieldType.regexpIntervals(normalizedPattern, context); + if (useField != null) { + source = Intervals.fixField(useField, source); + } + return source; + } + + @Override + public void extractFields(Set fields) { + if (useField != null) { + fields.add(useField); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Regexp regexp = (Regexp) o; + return Objects.equals(pattern, regexp.pattern) + && Objects.equals(analyzer, regexp.analyzer) + && Objects.equals(useField, regexp.useField); + } + + @Override + public int hashCode() { + return Objects.hash(pattern, analyzer, useField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(pattern); + out.writeOptionalString(analyzer); + out.writeOptionalString(useField); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field("pattern", pattern); + if (analyzer != null) { + builder.field("analyzer", analyzer); + } + if (useField != null) { + builder.field("use_field", useField); + } + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { + String term = (String) args[0]; + String analyzer = (String) args[1]; + String useField = (String) args[2]; + return new Regexp(term, analyzer, useField); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("pattern")); + PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); + PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); + } + + public static Regexp fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + String getPattern() { + return pattern; + } + + String getAnalyzer() { + return analyzer; + } + + String getUseField() { + return useField; + } + } + public static class Fuzzy extends IntervalsSourceProvider { public static final String NAME = "fuzzy"; @@ -907,6 +1031,185 @@ String getUseField() { } } + public static class Range extends IntervalsSourceProvider { + + public static final String NAME = "range"; + + private final String lowerTerm; + private final String upperTerm; + private final boolean includeLower; + private final boolean includeUpper; + private final String analyzer; + private final String useField; + + public Range(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, String analyzer, String useField) { + this.lowerTerm = lowerTerm; + this.upperTerm = upperTerm; + this.includeLower = includeLower; + this.includeUpper = includeUpper; + this.analyzer = analyzer; + this.useField = useField; + } + + public Range(StreamInput in) throws IOException { + this.lowerTerm = in.readString(); + this.upperTerm = in.readString(); + this.includeLower = in.readBoolean(); + this.includeUpper = in.readBoolean(); + this.analyzer = in.readOptionalString(); + this.useField = in.readOptionalString(); + } + + @Override + public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) { + NamedAnalyzer analyzer = null; + if (this.analyzer != null) { + analyzer = context.getIndexAnalyzers().get(this.analyzer); + } + if (useField != null) { + fieldType = context.getFieldType(useField); + assert fieldType != null; + } + if (analyzer == null) { + analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + } + BytesRef normalizedLowerTerm = analyzer.normalize(fieldType.name(), lowerTerm); + BytesRef normalizedUpperTerm = analyzer.normalize(fieldType.name(), upperTerm); + + IntervalsSource source = fieldType.rangeIntervals( + normalizedLowerTerm, + normalizedUpperTerm, + includeLower, + includeUpper, + context + ); + if (useField != null) { + source = Intervals.fixField(useField, source); + } + return source; + } + + @Override + public void extractFields(Set fields) { + if (useField != null) { + fields.add(useField); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Range range = (Range) o; + return includeLower == range.includeLower + && includeUpper == range.includeUpper + && Objects.equals(lowerTerm, range.lowerTerm) + && Objects.equals(upperTerm, range.upperTerm) + && Objects.equals(analyzer, range.analyzer) + && Objects.equals(useField, range.useField); + } + + @Override + public int hashCode() { + return Objects.hash(lowerTerm, upperTerm, includeLower, includeUpper, analyzer, useField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(lowerTerm); + out.writeString(upperTerm); + out.writeBoolean(includeLower); + out.writeBoolean(includeUpper); + out.writeOptionalString(analyzer); + out.writeOptionalString(useField); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + if (includeLower) { + builder.field("gte", lowerTerm); + } else { + builder.field("gt", lowerTerm); + } + if (includeUpper) { + builder.field("lte", upperTerm); + } else { + builder.field("lt", upperTerm); + } + if (analyzer != null) { + builder.field("analyzer", analyzer); + } + if (useField != null) { + builder.field("use_field", useField); + } + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { + String gte = (String) args[0]; + String gt = (String) args[1]; + String lte = (String) args[2]; + String lt = (String) args[3]; + if ((gte == null && gt == null) || (gte != null && gt != null)) { + throw new IllegalArgumentException("Either [gte] or [gt], one of them must be provided"); + } + if ((lte == null && lt == null) || (lte != null && lt != null)) { + throw new IllegalArgumentException("Either [lte] or [lt], one of them must be provided"); + } + boolean includeLower = gte != null ? true : false; + String lowerTerm = gte != null ? gte : gt; + boolean includeUpper = lte != null ? true : false; + String upperTerm = lte != null ? lte : lt; + String analyzer = (String) args[4]; + String useField = (String) args[5]; + return new Range(lowerTerm, upperTerm, includeLower, includeUpper, analyzer, useField); + }); + + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("gte")); + PARSER.declareString(optionalConstructorArg(), new ParseField("gt")); + PARSER.declareString(optionalConstructorArg(), new ParseField("lte")); + PARSER.declareString(optionalConstructorArg(), new ParseField("lt")); + PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); + PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); + } + + public static Range fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + String getLowerTerm() { + return lowerTerm; + } + + String getUpperTerm() { + return upperTerm; + } + + boolean getIncludeLower() { + return includeLower; + } + + boolean getIncludeUpper() { + return includeUpper; + } + + String getAnalyzer() { + return analyzer; + } + + String getUseField() { + return useField; + } + } + static class ScriptFilterSource extends FilteredIntervalsSource { final IntervalFilterScript script; diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 33c64f3eb6350..777fe85c6ebb1 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -1260,6 +1260,16 @@ public static List getIntervalsSourceProviderNamed IntervalsSourceProvider.class, IntervalsSourceProvider.Fuzzy.NAME, IntervalsSourceProvider.Fuzzy::new + ), + new NamedWriteableRegistry.Entry( + IntervalsSourceProvider.class, + IntervalsSourceProvider.Regexp.NAME, + IntervalsSourceProvider.Regexp::new + ), + new NamedWriteableRegistry.Entry( + IntervalsSourceProvider.class, + IntervalsSourceProvider.Range.NAME, + IntervalsSourceProvider.Range::new ) ); } diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 79cc850c4b8cc..37c7172623b54 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -725,8 +725,72 @@ public void testPrefixes() throws IOException { assertEquals(expected, builder.toQuery(createSearchExecutionContext())); } - public void testWildcard() throws IOException { + public void testRegexp() throws IOException { + String json = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m" + } + } + } + }""", TEXT_FIELD_NAME); + + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.regexp(new BytesRef("te.*m"))); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + String no_positions_json = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m" + } + } + } + } + """, NO_POSITIONS_FIELD); + expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(no_positions_json); + builder1.toQuery(createSearchExecutionContext()); + }); + + String fixed_field_json = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m", + "use_field": "masked_field" + } + } + } + }""", TEXT_FIELD_NAME); + + builder = (IntervalQueryBuilder) parseQuery(fixed_field_json); + expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.regexp(new BytesRef("te.*m")))); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + String fixed_field_json_no_positions = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m", + "use_field": "%s" + } + } + } + }""", TEXT_FIELD_NAME, NO_POSITIONS_FIELD); + expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(fixed_field_json_no_positions); + builder1.toQuery(createSearchExecutionContext()); + }); + } + public void testWildcard() throws IOException { String json = Strings.format(""" { "intervals": { @@ -931,7 +995,71 @@ public void testFuzzy() throws IOException { Intervals.fixField(MASKED_FIELD, buildFuzzySource("term", "term", 2, true, Fuzziness.ONE.asDistance("term"))) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); - } + public void testRange() throws IOException { + String json = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "gte": "aaa", + "lte": "aab" + } + } + } + }""", TEXT_FIELD_NAME); + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), true, true)); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + json = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "gt": "aaa", + "lt": "aab" + } + } + } + }""", TEXT_FIELD_NAME); + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), false, false)); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + String incomplete_range = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "gt": "aaa" + } + } + } + } + """, TEXT_FIELD_NAME); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(incomplete_range); + builder1.toQuery(createSearchExecutionContext()); + }); + assertEquals("Either [lte] or [lt], one of them must be provided", exc.getCause().getMessage()); + + String incomplete_range2 = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "lt": "aaa" + } + } + } + } + """, TEXT_FIELD_NAME); + exc = expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(incomplete_range2); + builder1.toQuery(createSearchExecutionContext()); + }); + assertEquals("Either [gte] or [gt], one of them must be provided", exc.getCause().getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java new file mode 100644 index 0000000000000..73b4be4ec6154 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.index.query.IntervalsSourceProvider.Range; + +public class RangeIntervalsSourceProviderTests extends AbstractXContentSerializingTestCase { + + @Override + protected Range createTestInstance() { + return createRandomRange(); + } + + static Range createRandomRange() { + return new Range( + "a" + randomAlphaOfLengthBetween(1, 10), + "z" + randomAlphaOfLengthBetween(1, 10), + randomBoolean(), + randomBoolean(), + randomBoolean() ? randomAlphaOfLength(10) : null, + randomBoolean() ? randomAlphaOfLength(10) : null + ); + } + + @Override + protected Range mutateInstance(Range instance) { + String lowerTerm = instance.getLowerTerm(); + String upperTerm = instance.getUpperTerm(); + boolean includeLower = instance.getIncludeLower(); + boolean includeUpper = instance.getIncludeUpper(); + String analyzer = instance.getAnalyzer(); + String useField = instance.getUseField(); + switch (between(0, 5)) { + case 0 -> lowerTerm = "a" + lowerTerm; + case 1 -> upperTerm = "z" + upperTerm; + case 2 -> includeLower = includeLower == false; + case 3 -> includeUpper = includeUpper == false; + case 4 -> analyzer = randomAlphaOfLength(5); + case 5 -> useField = useField == null ? randomAlphaOfLength(5) : null; + } + return new Range(lowerTerm, upperTerm, includeLower, includeUpper, analyzer, useField); + } + + @Override + protected Writeable.Reader instanceReader() { + return Range::new; + } + + @Override + protected Range doParseInstance(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + parser.nextToken(); + } + Range range = (Range) IntervalsSourceProvider.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return range; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java new file mode 100644 index 0000000000000..b226a1394c75e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.index.query.IntervalsSourceProvider.Regexp; + +public class RegexpIntervalsSourceProviderTests extends AbstractXContentSerializingTestCase { + + @Override + protected Regexp createTestInstance() { + return createRandomRegexp(); + } + + static Regexp createRandomRegexp() { + return new Regexp( + randomAlphaOfLengthBetween(1, 10), + randomBoolean() ? randomAlphaOfLength(10) : null, + randomBoolean() ? randomAlphaOfLength(10) : null + ); + } + + @Override + protected Regexp mutateInstance(Regexp instance) { + String regexp = instance.getPattern(); + String analyzer = instance.getAnalyzer(); + String useField = instance.getUseField(); + switch (between(0, 2)) { + case 0 -> regexp += "a"; + case 1 -> analyzer = randomAlphaOfLength(5); + case 2 -> useField = useField == null ? randomAlphaOfLength(5) : null; + } + return new Regexp(regexp, analyzer, useField); + } + + @Override + protected Writeable.Reader instanceReader() { + return Regexp::new; + } + + @Override + protected Regexp doParseInstance(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + parser.nextToken(); + } + Regexp regexp = (Regexp) IntervalsSourceProvider.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return regexp; + } +} From c19bfeaeba0e37280acfce487169eefb2c068d4c Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Thu, 22 Aug 2024 12:25:11 +0100 Subject: [PATCH 177/417] Fix test compilation for new Lucene 912 Codec (#112093) This commit fixes a compilation issue by updating the test to use the new Lucene 912 codec. --- .../index/codec/zstd/StoredFieldCodecDuelTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java index 93e9911746d18..92abdb460ef55 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; @@ -34,13 +34,13 @@ public class StoredFieldCodecDuelTests extends ESTestCase { private static final String DOUBLE_FIELD = "double_field_5"; public void testDuelBestSpeed() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } public void testDuelBestCompression() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } From 3ce1f91ba50d67dfb0c4f7207486d4ac4f2152e1 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 23:17:09 +0200 Subject: [PATCH 178/417] [TEST] Fix compile errors in CountedKeywordFieldTypeTests --- .../xpack/countedkeyword/CountedKeywordFieldTypeTests.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java index c29e4513562fc..04599549cc3cc 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java @@ -77,11 +77,7 @@ private CollectionBasedSortedSetDocValues(List docValues) { @Override public long nextOrd() { - currentOrd++; - if (currentOrd >= docValues.size()) { - return NO_MORE_ORDS; - } - return currentOrd; + return ++currentOrd; } @Override From 2a3a47fb6357c32dfee7df39ebf66ee341dce096 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 23:18:08 +0200 Subject: [PATCH 179/417] [TEST] Fix compile errors in AbstractSearchableSnapshotsTestCase --- .../AbstractSearchableSnapshotsTestCase.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java index 41121453e41a4..4ee2bf7e65633 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java @@ -348,8 +348,8 @@ public static Tuple randomChecksumBytes(byte[] bytes) throws IOE * uses a different buffer size for them. */ public static IOContext randomIOContext() { - final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READ, IOContext.READONCE); - assert ioContext.context != IOContext.Context.MERGE; + final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READONCE); + assert ioContext.context() != IOContext.Context.MERGE; return ioContext; } } From fd11aac297b4d310f8133df7d96fdd1a1d24c8c5 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 29 Aug 2024 23:21:21 +0200 Subject: [PATCH 180/417] Fix compile error in SingleValueQueryTests --- .../xpack/esql/querydsl/query/SingleValueQueryTests.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index a6df0e75e2f05..95444c9b2423f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -270,7 +270,10 @@ public List> build(RandomIndexWriter iw) throws IOException { List> fieldValues = new ArrayList<>(100); for (int i = 0; i < 100; i++) { iw.addDocument( - List.of(new LongField("i", i), new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)) + List.of( + new LongField("i", i, Field.Store.NO), + new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO) + ) ); fieldValues.add(List.of()); } From 53bd6eb5feaf4e4feed33c7e08f0af7f4e25a0d0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 30 Aug 2024 06:11:55 +0000 Subject: [PATCH 181/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-9172cc42472 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 878d2d1042eb7..2481aa54e9b2d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-e45b5ebdf88 +lucene = 10.0.0-snapshot-9172cc42472 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 990f4b60c4f77..93a01689a6f44 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2856,129 +2856,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From cb3939957396c7138d22ed531a8d88f627982b50 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 30 Aug 2024 06:11:55 +0000 Subject: [PATCH 182/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-f23711a3e36 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 279171a990da1..211f0237cad6e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2821,127 +2821,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From b49ac93a4172d25a55dad0baf60a59de588ca959 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Fri, 30 Aug 2024 08:35:40 +0200 Subject: [PATCH 183/417] Make docvalue skipper false in FieldTypeTestCase --- .../java/org/elasticsearch/index/mapper/FieldTypeTestCase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index b23eeea6c6c1c..18034fc0c729a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -138,7 +138,7 @@ public FieldInfo getFieldInfoWithName(String name) { randomBoolean(), IndexOptions.NONE, DocValuesType.NONE, - randomBoolean(), + false, -1, new HashMap<>(), 1, From 480c045b01483003a2f9ef11e1097274ef19f385 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 30 Aug 2024 10:24:21 +0200 Subject: [PATCH 184/417] Reword javadoc text for SearchServiceTests#testSlicingBehaviourForParallelCollection Offloading is no longer necessary, but we do want to provide the executor for other operations --- .../java/org/elasticsearch/search/SearchServiceTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 61d0c28de42e6..5b60356df4a28 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -2741,8 +2741,8 @@ public void testEnableSearchWorkerThreads() throws IOException { } /** - * Verify that a single slice is created for requests that don't support parallel collection, while computation - * is still offloaded to the worker threads. Also ensure multiple slices are created for requests that do support + * Verify that a single slice is created for requests that don't support parallel collection, while an executor is still + * provided to the searcher to parallelize other operations. Also ensure multiple slices are created for requests that do support * parallel collection. */ public void testSlicingBehaviourForParallelCollection() throws Exception { From dfa82d772d3b00ddaab2eceda458bfddddbeee53 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 30 Aug 2024 10:31:00 +0200 Subject: [PATCH 185/417] Address SearchServiceTests compile errors caused by removal of IndexSearcher#getExecutor --- .../search/internal/ContextIndexSearcher.java | 16 +++++++++----- .../search/SearchServiceTests.java | 22 +++++++++---------- 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index a352b4f9189b3..a70e0db249541 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -75,6 +75,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { private QueryProfiler profiler; private final MutableQueryTimeout cancellable; + private final boolean hasExecutor; private final int maximumNumberOfSlices; // don't create slices with less than this number of docs private final int minimumDocsPerSlice; @@ -132,6 +133,7 @@ public ContextIndexSearcher( int minimumDocsPerSlice ) throws IOException { super(wrapWithExitableDirectoryReader ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader, executor); + this.hasExecutor = executor != null; setSimilarity(similarity); setQueryCache(queryCache); setQueryCachingPolicy(queryCachingPolicy); @@ -140,6 +142,15 @@ public ContextIndexSearcher( this.maximumNumberOfSlices = maximumNumberOfSlices; } + /** + * Whether an executor was provided at construction time or not. This indicates whether operations that support concurrency + * may be executed concurrently. It is not straightforward to deduct this from {@link #getTaskExecutor()} because {@link IndexSearcher} + * creates a {@link org.apache.lucene.search.TaskExecutor} anyways. + */ + public boolean hasExecutor() { + return hasExecutor; + } + @Override protected LeafSlice[] slices(List leaves) { // we offload to the executor unconditionally, including requests that don't support concurrency @@ -148,11 +159,6 @@ protected LeafSlice[] slices(List leaves) { return leafSlices; } - // package private for testing - int getMinimumDocsPerSlice() { - return minimumDocsPerSlice; - } - public void setProfiler(QueryProfiler profiler) { this.profiler = profiler; } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 49701ca774c54..87839561c33d5 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -2713,7 +2713,7 @@ public void testEnableSearchWorkerThreads() throws IOException { SearchShardTask task = new SearchShardTask(0, "type", "action", "description", null, emptyMap()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } try { @@ -2724,7 +2724,7 @@ public void testEnableSearchWorkerThreads() throws IOException { .get(); assertTrue(response.isAcknowledged()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNull(searchContext.searcher().getExecutor()); + assertFalse(searchContext.searcher().hasExecutor()); } } finally { // reset original default setting @@ -2734,15 +2734,15 @@ public void testEnableSearchWorkerThreads() throws IOException { .setPersistentSettings(Settings.builder().putNull(SEARCH_WORKER_THREADS_ENABLED.getKey()).build()) .get(); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } } } } /** - * Verify that a single slice is created for requests that don't support parallel collection, while computation - * is still offloaded to the worker threads. Also ensure multiple slices are created for requests that do support + * Verify that a single slice is created for requests that don't support parallel collection, while an executor is still + * provided to the searcher to parallelize other operations. Also ensure multiple slices are created for requests that do support * parallel collection. */ public void testSlicingBehaviourForParallelCollection() throws Exception { @@ -2777,7 +2777,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2807,7 +2807,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2837,7 +2837,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.FETCH, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( @@ -2853,7 +2853,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.NONE, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( @@ -2876,7 +2876,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); searcher.search(termQuery, new TotalHitCountCollectorManager()); assertBusy( @@ -2898,7 +2898,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( From 54ed24b623a54a6d4eb8f01c05d50ebd36e2a0d3 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 30 Aug 2024 10:17:57 +0100 Subject: [PATCH 186/417] TimeLimitingCollector has been removed. --- .../src/main/resources/forbidden/es-server-signatures.txt | 4 ---- 1 file changed, 4 deletions(-) diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index 7475e77bc0805..a9095480fe294 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -58,10 +58,6 @@ org.apache.lucene.util.Version#parseLeniently(java.lang.String) org.apache.lucene.index.NoMergePolicy#INSTANCE @ explicit use of NoMergePolicy risks forgetting to configure NoMergeScheduler; use org.elasticsearch.common.lucene.Lucene#indexWriterConfigWithNoMerging() instead. -@defaultMessage Spawns a new thread which is solely under lucenes control use ThreadPool#relativeTimeInMillis instead -org.apache.lucene.search.TimeLimitingCollector#getGlobalTimerThread() -org.apache.lucene.search.TimeLimitingCollector#getGlobalCounter() - @defaultMessage Don't interrupt threads use FutureUtils#cancel(Future) instead java.util.concurrent.Future#cancel(boolean) From 749020c27413a825f6ce2934a1df348d00c6eaf4 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 30 Aug 2024 10:48:20 +0100 Subject: [PATCH 187/417] Update Version in LiveVersionMapTests and SegmentTests --- .../index/engine/LiveVersionMapTests.java | 27 +++++++------------ .../index/engine/SegmentTests.java | 2 +- 2 files changed, 11 insertions(+), 18 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index 8d357413b09cd..58c342715b50b 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.Constants; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Tuple; @@ -70,22 +69,16 @@ public void testRamBytesUsed() throws Exception { } actualRamBytesUsed = RamUsageTester.ramUsed(map); estimatedRamBytesUsed = map.ramBytesUsed(); - long tolerance; - if (Constants.JRE_IS_MINIMUM_JAVA9) { - // With Java 9, RamUsageTester computes the memory usage of maps as - // the memory usage of an array that would contain exactly all keys - // and values. This is an under-estimation of the actual memory - // usage since it ignores the impact of the load factor and of the - // linked list/tree that is used to resolve collisions. So we use a - // bigger tolerance. - // less than 50% off - tolerance = actualRamBytesUsed / 2; - } else { - // Java 8 is more accurate by doing reflection into the actual JDK classes - // so we give it a lower error bound. - // less than 25% off - tolerance = actualRamBytesUsed / 4; - } + + // Since Java 9, RamUsageTester computes the memory usage of maps as + // the memory usage of an array that would contain exactly all keys + // and values. This is an under-estimation of the actual memory + // usage since it ignores the impact of the load factor and of the + // linked list/tree that is used to resolve collisions. So we use a + // bigger tolerance. + // less than 50% off + long tolerance = actualRamBytesUsed / 2; + assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, tolerance); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java index 7e45e5f91489b..6c11cb443f464 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java @@ -79,7 +79,7 @@ static Segment randomSegment() { segment.sizeInBytes = randomNonNegativeLong(); segment.docCount = randomIntBetween(1, Integer.MAX_VALUE); segment.delDocCount = randomIntBetween(0, segment.docCount); - segment.version = Version.LUCENE_8_0_0; + segment.version = Version.LUCENE_9_0_0; segment.compound = randomBoolean(); segment.mergeId = randomAlphaOfLengthBetween(1, 10); segment.segmentSort = randomIndexSort(); From 26456b1d57eec6222ae5e546bb5c868a946bac79 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 30 Aug 2024 11:57:05 +0200 Subject: [PATCH 188/417] Resolve compile errors in StemmerTokenFilterFactory --- .../common/StemmerTokenFilterFactory.java | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 69b0921a4144f..f745e827be7bd 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -37,7 +37,6 @@ import org.apache.lucene.analysis.lv.LatvianStemFilter; import org.apache.lucene.analysis.miscellaneous.EmptyTokenStream; import org.apache.lucene.analysis.no.NorwegianLightStemFilter; -import org.apache.lucene.analysis.no.NorwegianLightStemmer; import org.apache.lucene.analysis.no.NorwegianMinimalStemFilter; import org.apache.lucene.analysis.pt.PortugueseLightStemFilter; import org.apache.lucene.analysis.pt.PortugueseMinimalStemFilter; @@ -59,14 +58,11 @@ import org.tartarus.snowball.ext.EstonianStemmer; import org.tartarus.snowball.ext.FinnishStemmer; import org.tartarus.snowball.ext.FrenchStemmer; -import org.tartarus.snowball.ext.German2Stemmer; import org.tartarus.snowball.ext.GermanStemmer; import org.tartarus.snowball.ext.HungarianStemmer; import org.tartarus.snowball.ext.IrishStemmer; import org.tartarus.snowball.ext.ItalianStemmer; -import org.tartarus.snowball.ext.KpStemmer; import org.tartarus.snowball.ext.LithuanianStemmer; -import org.tartarus.snowball.ext.LovinsStemmer; import org.tartarus.snowball.ext.NorwegianStemmer; import org.tartarus.snowball.ext.PortugueseStemmer; import org.tartarus.snowball.ext.RomanianStemmer; @@ -116,8 +112,8 @@ public TokenStream create(TokenStream tokenStream) { } else if ("dutch".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new DutchStemmer()); } else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new KpStemmer()); - + // NOCOMMIT KPStemmer has been removed, what is the migration path for users relying on it? + throw new UnsupportedOperationException(); // English stemmers } else if ("english".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); @@ -126,7 +122,8 @@ public TokenStream create(TokenStream tokenStream) { || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); } else if ("lovins".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new LovinsStemmer()); + // NOCOMMIT LovinsStemmer has been removed, what is the migration path for users relying on it? + throw new UnsupportedOperationException(); } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); } else if ("porter2".equalsIgnoreCase(language)) { @@ -166,7 +163,8 @@ public TokenStream create(TokenStream tokenStream) { } else if ("german".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("german2".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new German2Stemmer()); + // NOCOMMIT how about bw comp for users relying on german2 stemmer that is now folded into german stemmer? + return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) { return new GermanLightStemFilter(tokenStream); } else if ("minimal_german".equalsIgnoreCase(language) || "minimalGerman".equalsIgnoreCase(language)) { @@ -212,9 +210,9 @@ public TokenStream create(TokenStream tokenStream) { // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { - return new NorwegianLightStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); + return new NorwegianLightStemFilter(tokenStream, 2); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { - return new NorwegianMinimalStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); + return new NorwegianMinimalStemFilter(tokenStream, 2); // Persian stemmers } else if ("persian".equalsIgnoreCase(language)) { From 70175882e78a61fdd65cf0f48bdd894c47fc659b Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 30 Aug 2024 11:13:22 +0100 Subject: [PATCH 189/417] Add Lucene 10.0.0 IndevVersion --- .../src/main/java/org/elasticsearch/index/IndexVersions.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 4c7a1dfeaefb6..b94321b8aa886 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -124,6 +124,9 @@ private static Version parseUnchecked(String version) { public static final IndexVersion INDEX_SORTING_ON_NESTED = def(8_512_00_0, Version.LUCENE_9_11_1); public static final IndexVersion LENIENT_UPDATEABLE_SYNONYMS = def(8_513_00_0, Version.LUCENE_9_11_1); public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_600_00_0, Version.LUCENE_9_12_0); + + public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ From 69c1658cebc02f3f0eee529dc2d9dcc4d08702ea Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 30 Aug 2024 11:54:23 +0100 Subject: [PATCH 190/417] Determinize the system index indexPatternAutomaton --- .../java/org/elasticsearch/indices/SystemIndexDescriptor.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index f200abb7c1b66..01ce14b41cac6 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -40,6 +40,8 @@ import java.util.Objects; import java.util.Set; +import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; + /** * Uses a pattern string to define a protected space for indices belonging to a system feature, and, if needed, provides metadata for * managing indices that match the pattern. @@ -356,7 +358,7 @@ protected SystemIndexDescriptor( this.primaryIndex = primaryIndex; this.aliasName = aliasName; - final Automaton automaton = buildAutomaton(indexPattern, aliasName); + final Automaton automaton = Operations.determinize(buildAutomaton(indexPattern, aliasName), DEFAULT_DETERMINIZE_WORK_LIMIT); this.indexPatternAutomaton = new CharacterRunAutomaton(automaton); if (primaryIndex != null && indexPatternAutomaton.run(primaryIndex) == false) { throw new IllegalArgumentException("primary index does not match the index pattern!"); From 6e2f8ef471ff7969208d1980907848c09af9b025 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 30 Aug 2024 14:38:19 +0200 Subject: [PATCH 191/417] Fix compile error in AutomataMatch around ByteRunAutomaton creation --- .../esql/expression/function/scalar/string/AutomataMatch.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java index 09166f0cff7a8..0af22a357aeca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java @@ -37,7 +37,7 @@ public static EvalOperator.ExpressionEvaluator.Factory toEvaluator( * we couldn't get a nice toDot - so we call UTF32ToUTF8 ourselves. */ Automaton automaton = Operations.determinize(new UTF32ToUTF8().convert(utf32Automaton), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - ByteRunAutomaton run = new ByteRunAutomaton(automaton, true, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + ByteRunAutomaton run = new ByteRunAutomaton(automaton, true); return new AutomataMatchEvaluator.Factory(source, field, run, toDot(automaton)); } From 4222481a5a42e6cfed7a29cfd668fcf1221606d9 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 30 Aug 2024 14:43:26 +0200 Subject: [PATCH 192/417] Add missing determinize call in RLikePattern --- .../esql/core/expression/predicate/regex/RLikePattern.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java index 4257285ba8bd7..b306bb9838a03 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,7 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize(new RegExp(regexpPattern).toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override From d8cbc15da1445688c0a1299b3a0c90adcaa49b0d Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 30 Aug 2024 14:45:40 +0200 Subject: [PATCH 193/417] Add missing determinize call in ExpressionBuilder#visitQualifiedNamePattern --- .../org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 0352afdee4622..edb728f7aeca5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -383,7 +383,7 @@ public NamedExpression visitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePat // use the fast run variant result = new UnresolvedNamePattern( src, - new CharacterRunAutomaton(Operations.concatenate(list)), + new CharacterRunAutomaton(Operations.determinize(Operations.concatenate(list), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)), patternString.toString(), nameString.toString() ); From 6cd8dce87fbbe19c2af063611c1db70e76dc282f Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 30 Aug 2024 15:21:23 +0200 Subject: [PATCH 194/417] Replace NOCOMMIT comments with //TODO Lucene 10 upgrade to be able to run tests in CI --- .../analysis/common/StemmerTokenFilterFactory.java | 8 +++++--- .../java/org/elasticsearch/percolator/QueryAnalyzer.java | 2 +- .../java/org/elasticsearch/index/query/RegexpFlag.java | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index f745e827be7bd..12815d93cee41 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -112,7 +112,7 @@ public TokenStream create(TokenStream tokenStream) { } else if ("dutch".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new DutchStemmer()); } else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - // NOCOMMIT KPStemmer has been removed, what is the migration path for users relying on it? + //TODO Lucene 10 upgrade: KPStemmer has been removed, what is the migration path for users relying on it? throw new UnsupportedOperationException(); // English stemmers } else if ("english".equalsIgnoreCase(language)) { @@ -122,7 +122,7 @@ public TokenStream create(TokenStream tokenStream) { || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); } else if ("lovins".equalsIgnoreCase(language)) { - // NOCOMMIT LovinsStemmer has been removed, what is the migration path for users relying on it? + //TODO Lucene 10 upgrade: LovinsStemmer has been removed, what is the migration path for users relying on it? throw new UnsupportedOperationException(); } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); @@ -163,7 +163,7 @@ public TokenStream create(TokenStream tokenStream) { } else if ("german".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("german2".equalsIgnoreCase(language)) { - // NOCOMMIT how about bw comp for users relying on german2 stemmer that is now folded into german stemmer? + //TODO Lucene 10 upgrade: how about bw comp for users relying on german2 stemmer that is now folded into german stemmer? return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) { return new GermanLightStemFilter(tokenStream); @@ -210,8 +210,10 @@ public TokenStream create(TokenStream tokenStream) { // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { + //TODO Lucene 10 upgrade: NorwegianLightStemmer is now package private, we no longer have access to the flags constants return new NorwegianLightStemFilter(tokenStream, 2); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { + //TODO Lucene 10 upgrade: NorwegianLightStemmer is now package private, we no longer have access to the flags constants return new NorwegianMinimalStemFilter(tokenStream, 2); // Persian stemmers diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 1bac5368bbae5..1eae4d84f3533 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -197,7 +197,7 @@ public void consumeTerms(Query query, Term... termsToConsume) { @Override public void consumeTermsMatching(Query query, String field, Supplier automaton) { if (query instanceof TermInSetQuery q) { - // NOCOMMIT this is a workaround that only gets one term + //TODO Lucene 10 upgrade: this is a workaround that only gets one term Term term = NestedHelper.getTermInSetTerm(q); Set qe = new HashSet<>(); qe.add(new QueryExtraction(term)); diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index b8334abec3d95..f32871dc899cf 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -36,7 +36,7 @@ public enum RegexpFlag { /** * Enables complement expression of the form: {@code ~<expression>} - * NOCOMMIT: Lucenes RegExp class has removed the COMPLEMENT flag in https://issues.apache.org/jira/browse/LUCENE-10010 + * TODO Lucene 10 upgrade: Lucenes RegExp class has removed the COMPLEMENT flag in https://issues.apache.org/jira/browse/LUCENE-10010 * I'm currently not sure if it still supports the "~" operator but we need an enum constant for * parsing our own flag syntax, so leaving a tombstone here for now */ From 94c21b65bc4fd01fa7403cf77e55b8909529a16d Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 30 Aug 2024 22:16:48 +0200 Subject: [PATCH 195/417] spotless --- .../analysis/common/StemmerTokenFilterFactory.java | 10 +++++----- .../org/elasticsearch/percolator/QueryAnalyzer.java | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 12815d93cee41..c87c9dde45050 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -112,7 +112,7 @@ public TokenStream create(TokenStream tokenStream) { } else if ("dutch".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new DutchStemmer()); } else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - //TODO Lucene 10 upgrade: KPStemmer has been removed, what is the migration path for users relying on it? + // TODO Lucene 10 upgrade: KPStemmer has been removed, what is the migration path for users relying on it? throw new UnsupportedOperationException(); // English stemmers } else if ("english".equalsIgnoreCase(language)) { @@ -122,7 +122,7 @@ public TokenStream create(TokenStream tokenStream) { || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); } else if ("lovins".equalsIgnoreCase(language)) { - //TODO Lucene 10 upgrade: LovinsStemmer has been removed, what is the migration path for users relying on it? + // TODO Lucene 10 upgrade: LovinsStemmer has been removed, what is the migration path for users relying on it? throw new UnsupportedOperationException(); } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); @@ -163,7 +163,7 @@ public TokenStream create(TokenStream tokenStream) { } else if ("german".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("german2".equalsIgnoreCase(language)) { - //TODO Lucene 10 upgrade: how about bw comp for users relying on german2 stemmer that is now folded into german stemmer? + // TODO Lucene 10 upgrade: how about bw comp for users relying on german2 stemmer that is now folded into german stemmer? return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) { return new GermanLightStemFilter(tokenStream); @@ -210,10 +210,10 @@ public TokenStream create(TokenStream tokenStream) { // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { - //TODO Lucene 10 upgrade: NorwegianLightStemmer is now package private, we no longer have access to the flags constants + // TODO Lucene 10 upgrade: NorwegianLightStemmer is now package private, we no longer have access to the flags constants return new NorwegianLightStemFilter(tokenStream, 2); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { - //TODO Lucene 10 upgrade: NorwegianLightStemmer is now package private, we no longer have access to the flags constants + // TODO Lucene 10 upgrade: NorwegianLightStemmer is now package private, we no longer have access to the flags constants return new NorwegianMinimalStemFilter(tokenStream, 2); // Persian stemmers diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 1eae4d84f3533..64a7ca1cbd911 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -197,7 +197,7 @@ public void consumeTerms(Query query, Term... termsToConsume) { @Override public void consumeTermsMatching(Query query, String field, Supplier automaton) { if (query instanceof TermInSetQuery q) { - //TODO Lucene 10 upgrade: this is a workaround that only gets one term + // TODO Lucene 10 upgrade: this is a workaround that only gets one term Term term = NestedHelper.getTermInSetTerm(q); Set qe = new HashSet<>(); qe.add(new QueryExtraction(term)); From 4e6c3545319d0464e2820de90a17e2240143098e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 31 Aug 2024 06:11:22 +0000 Subject: [PATCH 196/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-ce4f56e74ad --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 2481aa54e9b2d..29c5f37c024ee 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-9172cc42472 +lucene = 10.0.0-snapshot-ce4f56e74ad bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 520730b9e906f..ddba97fcc27e6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2819,129 +2819,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 3be4e6536b9312b641a8ae167f29845947868e93 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 31 Aug 2024 06:12:09 +0000 Subject: [PATCH 197/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-f23711a3e36 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 211f0237cad6e..fdf8c10c574a2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2821,127 +2821,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 205b95e091c50d9d22ddf44ef04751f4f6ae7fcc Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sat, 31 Aug 2024 20:51:20 +0100 Subject: [PATCH 198/417] Fix NestedHelper --- .../index/search/NestedHelper.java | 23 ++++++++----------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index 22bcd5b9afd39..34252f0d22bf4 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -178,20 +178,15 @@ boolean mightMatchNonNestedDocs(String field, String nestedPath) { } public static Term getTermInSetTerm(TermInSetQuery tisQuery) { - try { - if (tisQuery.getTermsCount() == 1) { - final SetOnce collectedTerm = new SetOnce<>(); - tisQuery.visit(new QueryVisitor() { - @Override - public void consumeTerms(Query query, Term... terms) { - collectedTerm.set(terms[0]); - } - }); - return collectedTerm.get(); - } - return null; - } catch (IOException e) { - // TODO should never happen, remove throwing IOException from TermInSetQuery in Lucene + if (tisQuery.getTermsCount() == 1) { + final SetOnce collectedTerm = new SetOnce<>(); + tisQuery.visit(new QueryVisitor() { + @Override + public void consumeTerms(Query query, Term... terms) { + collectedTerm.set(terms[0]); + } + }); + return collectedTerm.get(); } return null; } From 10a85aae0a932b23e314c67214e75a0331724b39 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 1 Sep 2024 06:12:14 +0000 Subject: [PATCH 199/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-ce4f56e74ad --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 41458a6664600..2cab450f0580f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From c6bf30f8629dc1fb821ab847a4e506d1537a0b8c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 1 Sep 2024 06:12:14 +0000 Subject: [PATCH 200/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-f23711a3e36 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4618acb778735..87d3188970581 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From feb12981d80c2bdf22ce0d3b7648cad948dafe32 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 1 Sep 2024 09:38:45 +0100 Subject: [PATCH 201/417] Unused import --- .../main/java/org/elasticsearch/index/search/NestedHelper.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index 34252f0d22bf4..7fb7481c2297d 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -26,7 +26,6 @@ import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; -import java.io.IOException; import java.util.function.Predicate; /** Utility class to filter parent and children clauses when building nested From a27a97ee0d64b9a09372d3c090b9bfa1d8eac9ce Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 1 Sep 2024 12:16:45 +0100 Subject: [PATCH 202/417] Fix term vector access in IT --- .../org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 15afd6897a40e..2f758d686023a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -138,7 +138,7 @@ private void hitExecute(FetchContext context, HitContext hitContext) throws IOEx hitField = new DocumentField(NAME, new ArrayList<>(1)); hitContext.hit().setDocumentField(NAME, hitField); } - Terms terms = hitContext.reader().getTermVector(hitContext.docId(), field); + Terms terms = hitContext.reader().termVectors().get(hitContext.docId(), field); if (terms != null) { TermsEnum te = terms.iterator(); Map tv = new HashMap<>(); From 2fe4b6001c72dec91f6120130bc3d44d6ac631e7 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 1 Sep 2024 14:44:32 +0100 Subject: [PATCH 203/417] Fix SearchQueryIT javadoc failure --- .../java/org/elasticsearch/search/query/SearchQueryIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 384395bcb78e7..200b228e86b1c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -9,7 +9,7 @@ package org.elasticsearch.search.query; import org.apache.lucene.analysis.pattern.PatternReplaceCharFilter; -import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.tests.analysis.MockTokenizer; @@ -1922,7 +1922,7 @@ public Map> getTokenizers() { } /** - * Test correct handling {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexReader, MultiTermQuery)}. That rewrite method is e.g. + * Test correct handling {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexSearcher, MultiTermQuery)}. That rewrite method is e.g. * set for fuzzy queries with "constant_score" rewrite nested inside a `span_multi` query and would cause NPEs due to an unset * {@link AttributeSource}. */ From faf42c5abe6e7e857f48dbed3cdf28bb8c74bbfe Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 1 Sep 2024 20:46:57 +0100 Subject: [PATCH 204/417] spotless --- .../java/org/elasticsearch/search/query/SearchQueryIT.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 200b228e86b1c..20250d210c201 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1922,7 +1922,9 @@ public Map> getTokenizers() { } /** - * Test correct handling {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexSearcher, MultiTermQuery)}. That rewrite method is e.g. + * Test correct handling + * {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexSearcher, MultiTermQuery)}. + * That rewrite method is e.g. * set for fuzzy queries with "constant_score" rewrite nested inside a `span_multi` query and would cause NPEs due to an unset * {@link AttributeSource}. */ From 1820f0f34b4aa886e81bca8cbb3af012ea9a8abb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 2 Sep 2024 06:11:22 +0000 Subject: [PATCH 205/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-ce4f56e74ad --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2cab450f0580f..32407b2940706 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 7a4f30ba2bbc38115901ac0ed76d61ad1fbf2269 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 2 Sep 2024 06:11:22 +0000 Subject: [PATCH 206/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-f23711a3e36 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 87d3188970581..c3d586a6e3a73 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 9c123809bd756fd42f73776444b4148ef07e4886 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Mon, 2 Sep 2024 12:03:54 +0100 Subject: [PATCH 207/417] Add Awaits fix for ESQL LogicalPlanOptimizerTest --- .../xpack/esql/optimizer/LogicalPlanOptimizerTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 5268eb4349af8..39f81b6c4cfba 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -1926,6 +1926,7 @@ public void testSimplifyRLikeNoWildcard() { assertTrue(filter.child() instanceof EsRelation); } + @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testSimplifyRLikeMatchAll() { LogicalPlan plan = optimizedPlan(""" from test From 35f980af2209f505c54ff06a6ad8653d9ad68812 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Mon, 2 Sep 2024 12:13:31 +0100 Subject: [PATCH 208/417] More ESQL Regex awaits fixes --- .../xpack/esql/optimizer/LogicalPlanOptimizerTests.java | 1 + .../xpack/esql/optimizer/rules/ReplaceRegexMatchTests.java | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 39f81b6c4cfba..16d8861f4621e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -1926,6 +1926,7 @@ public void testSimplifyRLikeNoWildcard() { assertTrue(filter.child() instanceof EsRelation); } + //TODO Lucene 10 upgrade @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testSimplifyRLikeMatchAll() { LogicalPlan plan = optimizedPlan(""" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatchTests.java index 62b13e6c9cc03..aebd9807541a6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatchTests.java @@ -26,6 +26,8 @@ public class ReplaceRegexMatchTests extends ESTestCase { + //TODO Lucene 10 upgrade + @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testMatchAllLikeToExist() { for (String s : asList("%", "%%", "%%%")) { LikePattern pattern = new LikePattern(s, (char) 0); @@ -38,6 +40,8 @@ public void testMatchAllLikeToExist() { } } + //TODO Lucene 10 upgrade + @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testMatchAllWildcardLikeToExist() { for (String s : asList("*", "**", "***")) { WildcardPattern pattern = new WildcardPattern(s); @@ -50,6 +54,8 @@ public void testMatchAllWildcardLikeToExist() { } } + //TODO Lucene 10 upgrade + @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testMatchAllRLikeToExist() { RLikePattern pattern = new RLikePattern(".*"); FieldAttribute fa = getFieldAttribute(); From c5cc7407b7bde07303718c652e0867f7e8ecc8b3 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Mon, 2 Sep 2024 12:56:28 +0100 Subject: [PATCH 209/417] Fix CompositeValuesCollectorQueueTests mock and spotless --- .../CompositeValuesCollectorQueueTests.java | 148 +++++++++++++++++- .../optimizer/LogicalPlanOptimizerTests.java | 2 +- .../rules/ReplaceRegexMatchTests.java | 6 +- 3 files changed, 144 insertions(+), 12 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 364355ac81054..ecdf3a79bd9f2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -14,14 +14,29 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.TextField; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafMetaData; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.StoredFields; +import org.apache.lucene.index.TermVectors; +import org.apache.lucene.index.Terms; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.search.KnnCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -53,8 +68,6 @@ import static org.elasticsearch.index.mapper.NumberFieldMapper.NumberType.LONG; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class CompositeValuesCollectorQueueTests extends AggregatorTestCase { static class ClassAndName { @@ -70,11 +83,8 @@ static class ClassAndName { private IndexReader indexReader; @Before - public void setUpMocks() { - indexReader = mock(IndexReader.class); - IndexReaderContext indexReaderContext = mock(IndexReaderContext.class); - when(indexReaderContext.leaves()).thenReturn(List.of()); - when(indexReader.getContext()).thenReturn(indexReaderContext); + public void set() { + indexReader = new DummyReader(); } public void testRandomLong() throws IOException { @@ -424,4 +434,126 @@ private static void createListCombinations( } } } + + static class DummyReader extends LeafReader { + @Override + public CacheHelper getCoreCacheHelper() { + return null; + } + + @Override + public Terms terms(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public BinaryDocValues getBinaryDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedDocValues getSortedDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedSetDocValues getSortedSetDocValues(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNormValues(String field) throws IOException { + return null; + } + + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return null; + } + + @Override + public FloatVectorValues getFloatVectorValues(String field) throws IOException { + return null; + } + + @Override + public ByteVectorValues getByteVectorValues(String field) throws IOException { + return null; + } + + @Override + public void searchNearestVectors(String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public void searchNearestVectors(String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public FieldInfos getFieldInfos() { + return null; + } + + @Override + public Bits getLiveDocs() { + return null; + } + + @Override + public PointValues getPointValues(String field) throws IOException { + return null; + } + + @Override + public void checkIntegrity() throws IOException { + + } + + @Override + public LeafMetaData getMetaData() { + return null; + } + + @Override + public TermVectors termVectors() throws IOException { + return null; + } + + @Override + public int numDocs() { + return 0; + } + + @Override + public int maxDoc() { + return 0; + } + + @Override + public StoredFields storedFields() throws IOException { + return null; + } + + @Override + protected void doClose() throws IOException { + + } + + @Override + public CacheHelper getReaderCacheHelper() { + return null; + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 16d8861f4621e..c160ba6168957 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -1926,7 +1926,7 @@ public void testSimplifyRLikeNoWildcard() { assertTrue(filter.child() instanceof EsRelation); } - //TODO Lucene 10 upgrade + // TODO Lucene 10 upgrade @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testSimplifyRLikeMatchAll() { LogicalPlan plan = optimizedPlan(""" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatchTests.java index aebd9807541a6..b673d3c912277 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatchTests.java @@ -26,7 +26,7 @@ public class ReplaceRegexMatchTests extends ESTestCase { - //TODO Lucene 10 upgrade + // TODO Lucene 10 upgrade @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testMatchAllLikeToExist() { for (String s : asList("%", "%%", "%%%")) { @@ -40,7 +40,7 @@ public void testMatchAllLikeToExist() { } } - //TODO Lucene 10 upgrade + // TODO Lucene 10 upgrade @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testMatchAllWildcardLikeToExist() { for (String s : asList("*", "**", "***")) { @@ -54,7 +54,7 @@ public void testMatchAllWildcardLikeToExist() { } } - //TODO Lucene 10 upgrade + // TODO Lucene 10 upgrade @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testMatchAllRLikeToExist() { RLikePattern pattern = new RLikePattern(".*"); From 6eb5b29eb8aca9db423efd11b88658bb3ab9a9e7 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 2 Sep 2024 14:42:22 +0200 Subject: [PATCH 210/417] Add DocValueSkipper to ES87TSDBDocValuesFormat --- .../codec/tsdb/ES87TSDBDocValuesConsumer.java | 209 ++++++++++++++++-- .../codec/tsdb/ES87TSDBDocValuesFormat.java | 46 +++- .../codec/tsdb/ES87TSDBDocValuesProducer.java | 127 ++++++++++- ...ValuesFormatVariableSkipIntervalTests.java | 195 ++++++++++++++++ 4 files changed, 561 insertions(+), 16 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java index 7adb1e8ae2c3b..ae91e97a3f333 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java @@ -40,9 +40,13 @@ import org.elasticsearch.core.IOUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_LEVEL_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SORTED_SET; final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { @@ -50,9 +54,16 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { IndexOutput data, meta; final int maxDoc; private byte[] termsDictBuffer; - - ES87TSDBDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) - throws IOException { + private final int skipIndexIntervalSize; + + ES87TSDBDocValuesConsumer( + SegmentWriteState state, + int skipIndexIntervalSize, + String dataCodec, + String dataExtension, + String metaCodec, + String metaExtension + ) throws IOException { this.termsDictBuffer = new byte[1 << 14]; boolean success = false; try { @@ -75,6 +86,7 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { state.segmentSuffix ); maxDoc = state.segmentInfo.maxDoc(); + this.skipIndexIntervalSize = skipIndexIntervalSize; success = true; } finally { if (success == false) { @@ -87,12 +99,17 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.NUMERIC); - writeField(field, new EmptyDocValuesProducer() { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { return DocValues.singleton(valuesProducer.getNumeric(field)); } - }, -1); + }; + if (field.hasDocValuesSkipIndex()) { + writeSkipIndex(field, producer); + } + + writeField(field, producer, -1); } private long[] writeField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { @@ -262,13 +279,11 @@ public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) th public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.SORTED); - doAddSortedField(field, valuesProducer); + doAddSortedField(field, valuesProducer, false); } - private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { - SortedDocValues sorted = valuesProducer.getSorted(field); - int maxOrd = sorted.getValueCount(); - writeField(field, new EmptyDocValuesProducer() { + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer, boolean addTypeByte) throws IOException { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { SortedDocValues sorted = valuesProducer.getSorted(field); @@ -305,7 +320,16 @@ public long cost() { }; return DocValues.singleton(sortedOrds); } - }, maxOrd); + }; + if (field.hasDocValuesSkipIndex()) { + writeSkipIndex(field, producer); + } + if (addTypeByte) { + meta.writeByte((byte) 0); // multiValued (0 = singleValued) + } + SortedDocValues sorted = valuesProducer.getSorted(field); + int maxOrd = sorted.getValueCount(); + writeField(field, producer, maxOrd); addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); } @@ -458,6 +482,12 @@ public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProdu } private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { + if (field.hasDocValuesSkipIndex()) { + writeSkipIndex(field, valuesProducer); + } + if (maxOrd > -1) { + meta.writeByte((byte) 1); // multiValued (1 = multiValued) + } long[] stats = writeField(field, valuesProducer, maxOrd); int numDocsWithField = Math.toIntExact(stats[0]); long numValues = stats[1]; @@ -509,16 +539,14 @@ public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) meta.writeByte(SORTED_SET); if (isSingleValued(valuesProducer.getSortedSet(field))) { - meta.writeByte((byte) 0); // multiValued (0 = singleValued) doAddSortedField(field, new EmptyDocValuesProducer() { @Override public SortedDocValues getSorted(FieldInfo field) throws IOException { return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); } - }); + }, true); return; } - meta.writeByte((byte) 1); // multiValued (1 = multiValued) SortedSetDocValues values = valuesProducer.getSortedSet(field); long maxOrd = values.getValueCount(); @@ -602,4 +630,157 @@ public void close() throws IOException { meta = data = null; } } + + private static class SkipAccumulator { + int minDocID; + int maxDocID; + int docCount; + long minValue; + long maxValue; + + SkipAccumulator(int docID) { + minDocID = docID; + minValue = Long.MAX_VALUE; + maxValue = Long.MIN_VALUE; + docCount = 0; + } + + boolean isDone(int skipIndexIntervalSize, int valueCount, long nextValue, int nextDoc) { + if (docCount < skipIndexIntervalSize) { + return false; + } + // Once we reach the interval size, we will keep accepting documents if + // - next doc value is not a multi-value + // - current accumulator only contains a single value and next value is the same value + // - the accumulator is dense and the next doc keeps the density (no gaps) + return valueCount > 1 || minValue != maxValue || minValue != nextValue || docCount != nextDoc - minDocID; + } + + void accumulate(long value) { + minValue = Math.min(minValue, value); + maxValue = Math.max(maxValue, value); + } + + void accumulate(SkipAccumulator other) { + assert minDocID <= other.minDocID && maxDocID < other.maxDocID; + maxDocID = other.maxDocID; + minValue = Math.min(minValue, other.minValue); + maxValue = Math.max(maxValue, other.maxValue); + docCount += other.docCount; + } + + void nextDoc(int docID) { + maxDocID = docID; + ++docCount; + } + + public static SkipAccumulator merge(List list, int index, int length) { + SkipAccumulator acc = new SkipAccumulator(list.get(index).minDocID); + for (int i = 0; i < length; i++) { + acc.accumulate(list.get(index + i)); + } + return acc; + } + } + + private void writeSkipIndex(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + assert field.hasDocValuesSkipIndex(); + final long start = data.getFilePointer(); + final SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + long globalMaxValue = Long.MIN_VALUE; + long globalMinValue = Long.MAX_VALUE; + int globalDocCount = 0; + int maxDocId = -1; + final List accumulators = new ArrayList<>(); + SkipAccumulator accumulator = null; + final int maxAccumulators = 1 << (SKIP_INDEX_LEVEL_SHIFT * (SKIP_INDEX_MAX_LEVEL - 1)); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + final long firstValue = values.nextValue(); + if (accumulator != null && accumulator.isDone(skipIndexIntervalSize, values.docValueCount(), firstValue, doc)) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + accumulator = null; + if (accumulators.size() == maxAccumulators) { + writeLevels(accumulators); + accumulators.clear(); + } + } + if (accumulator == null) { + accumulator = new SkipAccumulator(doc); + accumulators.add(accumulator); + } + accumulator.nextDoc(doc); + accumulator.accumulate(firstValue); + for (int i = 1, end = values.docValueCount(); i < end; ++i) { + accumulator.accumulate(values.nextValue()); + } + } + + if (accumulators.isEmpty() == false) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + writeLevels(accumulators); + } + meta.writeLong(start); // record the start in meta + meta.writeLong(data.getFilePointer() - start); // record the length + assert globalDocCount == 0 || globalMaxValue >= globalMinValue; + meta.writeLong(globalMaxValue); + meta.writeLong(globalMinValue); + assert globalDocCount <= maxDocId + 1; + meta.writeInt(globalDocCount); + meta.writeInt(maxDocId); + } + + private void writeLevels(List accumulators) throws IOException { + final List> accumulatorsLevels = new ArrayList<>(SKIP_INDEX_MAX_LEVEL); + accumulatorsLevels.add(accumulators); + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL - 1; i++) { + accumulatorsLevels.add(buildLevel(accumulatorsLevels.get(i))); + } + int totalAccumulators = accumulators.size(); + for (int index = 0; index < totalAccumulators; index++) { + // compute how many levels we need to write for the current accumulator + final int levels = getLevels(index, totalAccumulators); + // write the number of levels + data.writeByte((byte) levels); + // write intervals in reverse order. This is done so we don't + // need to read all of them in case of slipping + for (int level = levels - 1; level >= 0; level--) { + final SkipAccumulator accumulator = accumulatorsLevels.get(level).get(index >> (SKIP_INDEX_LEVEL_SHIFT * level)); + data.writeInt(accumulator.maxDocID); + data.writeInt(accumulator.minDocID); + data.writeLong(accumulator.maxValue); + data.writeLong(accumulator.minValue); + data.writeInt(accumulator.docCount); + } + } + } + + private static List buildLevel(List accumulators) { + final int levelSize = 1 << SKIP_INDEX_LEVEL_SHIFT; + final List collector = new ArrayList<>(); + for (int i = 0; i < accumulators.size() - levelSize + 1; i += levelSize) { + collector.add(SkipAccumulator.merge(accumulators, i, levelSize)); + } + return collector; + } + + private static int getLevels(int index, int size) { + if (Integer.numberOfTrailingZeros(index) >= SKIP_INDEX_LEVEL_SHIFT) { + // TODO: can we do it in constant time rather than linearly with SKIP_INDEX_MAX_LEVEL? + final int left = size - index; + for (int level = SKIP_INDEX_MAX_LEVEL - 1; level > 0; level--) { + final int numberIntervals = 1 << (SKIP_INDEX_LEVEL_SHIFT * level); + if (left >= numberIntervals && index % numberIntervals == 0) { + return level + 1; + } + } + } + return 1; + } + } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java index c5f597f27eb98..52d64c555b07b 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java @@ -42,13 +42,57 @@ public class ES87TSDBDocValuesFormat extends org.apache.lucene.codecs.DocValuesF static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; + // number of documents in an interval + private static final int DEFAULT_SKIP_INDEX_INTERVAL_SIZE = 4096; + // bytes on an interval: + // * 1 byte : number of levels + // * 16 bytes: min / max value, + // * 8 bytes: min / max docID + // * 4 bytes: number of documents + private static final long SKIP_INDEX_INTERVAL_BYTES = 29L; + // number of intervals represented as a shift to create a new level, this is 1 << 3 == 8 + // intervals. + static final int SKIP_INDEX_LEVEL_SHIFT = 3; + // max number of levels + // Increasing this number, it increases how much heap we need at index time. + // we currently need (1 * 8 * 8 * 8) = 512 accumulators on heap + static final int SKIP_INDEX_MAX_LEVEL = 4; + // number of bytes to skip when skipping a level. It does not take into account the + // current interval that is being read. + static final long[] SKIP_INDEX_JUMP_LENGTH_PER_LEVEL = new long[SKIP_INDEX_MAX_LEVEL]; + + static { + // Size of the interval minus read bytes (1 byte for level and 4 bytes for maxDocID) + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[0] = SKIP_INDEX_INTERVAL_BYTES - 5L; + for (int level = 1; level < SKIP_INDEX_MAX_LEVEL; level++) { + // jump from previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] = SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level - 1]; + // nodes added by new level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] += (1 << (level * SKIP_INDEX_LEVEL_SHIFT)) * SKIP_INDEX_INTERVAL_BYTES; + // remove the byte levels added in the previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] -= (1 << ((level - 1) * SKIP_INDEX_LEVEL_SHIFT)); + } + } + + private final int skipIndexIntervalSize; + + /** Default constructor. */ public ES87TSDBDocValuesFormat() { + this(DEFAULT_SKIP_INDEX_INTERVAL_SIZE); + } + + /** Doc values fields format with specified skipIndexIntervalSize. */ + public ES87TSDBDocValuesFormat(int skipIndexIntervalSize) { super(CODEC_NAME); + if (skipIndexIntervalSize < 2) { + throw new IllegalArgumentException("skipIndexIntervalSize must be > 1, got [" + skipIndexIntervalSize + "]"); + } + this.skipIndexIntervalSize = skipIndexIntervalSize; } @Override public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { - return new ES87TSDBDocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + return new ES87TSDBDocValuesConsumer(state, skipIndexIntervalSize, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index 35d2924dc60ba..b1fa1beb53c6c 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -27,6 +27,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.DataInput; @@ -43,6 +44,8 @@ import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_JUMP_LENGTH_PER_LEVEL; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_SHIFT; public class ES87TSDBDocValuesProducer extends DocValuesProducer { @@ -51,6 +54,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { private final Map sorted = new HashMap<>(); private final Map sortedSets = new HashMap<>(); private final Map sortedNumerics = new HashMap<>(); + private final Map skippers = new HashMap<>(); private final IndexInput data; private final int maxDoc; @@ -701,7 +705,112 @@ public long cost() { @Override public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { - return null; + final DocValuesSkipperEntry entry = skippers.get(field.name); + + final IndexInput input = data.slice("doc value skipper", entry.offset, entry.length); + // Prefetch the first page of data. Following pages are expected to get prefetched through + // read-ahead. + if (input.length() > 0) { + input.prefetch(0, 1); + } + // TODO: should we write to disk the actual max level for this segment? + return new DocValuesSkipper() { + final int[] minDocID = new int[SKIP_INDEX_MAX_LEVEL]; + final int[] maxDocID = new int[SKIP_INDEX_MAX_LEVEL]; + + { + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = -1; + } + } + + final long[] minValue = new long[SKIP_INDEX_MAX_LEVEL]; + final long[] maxValue = new long[SKIP_INDEX_MAX_LEVEL]; + final int[] docCount = new int[SKIP_INDEX_MAX_LEVEL]; + int levels = 1; + + @Override + public void advance(int target) throws IOException { + if (target > entry.maxDocId) { + // skipper is exhausted + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = DocIdSetIterator.NO_MORE_DOCS; + } + } else { + // find next interval + assert target > maxDocID[0] : "target must be bigger that current interval"; + while (true) { + levels = input.readByte(); + assert levels <= SKIP_INDEX_MAX_LEVEL && levels > 0 : "level out of range [" + levels + "]"; + boolean valid = true; + // check if current interval is competitive or we can jump to the next position + for (int level = levels - 1; level >= 0; level--) { + if ((maxDocID[level] = input.readInt()) < target) { + input.skipBytes(SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level]); // the jump for the level + valid = false; + break; + } + minDocID[level] = input.readInt(); + maxValue[level] = input.readLong(); + minValue[level] = input.readLong(); + docCount[level] = input.readInt(); + } + if (valid) { + // adjust levels + while (levels < SKIP_INDEX_MAX_LEVEL && maxDocID[levels] >= target) { + levels++; + } + break; + } + } + } + } + + @Override + public int numLevels() { + return levels; + } + + @Override + public int minDocID(int level) { + return minDocID[level]; + } + + @Override + public int maxDocID(int level) { + return maxDocID[level]; + } + + @Override + public long minValue(int level) { + return minValue[level]; + } + + @Override + public long maxValue(int level) { + return maxValue[level]; + } + + @Override + public int docCount(int level) { + return docCount[level]; + } + + @Override + public long minValue() { + return entry.minValue; + } + + @Override + public long maxValue() { + return entry.maxValue; + } + + @Override + public int docCount() { + return entry.docCount; + } + }; } @Override @@ -721,6 +830,9 @@ private void readFields(IndexInput meta, FieldInfos infos) throws IOException { throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); } byte type = meta.readByte(); + if (info.hasDocValuesSkipIndex()) { + skippers.put(info.name, readDocValueSkipperMeta(meta)); + } if (type == ES87TSDBDocValuesFormat.NUMERIC) { numerics.put(info.name, readNumeric(meta)); } else if (type == ES87TSDBDocValuesFormat.BINARY) { @@ -743,6 +855,17 @@ private static NumericEntry readNumeric(IndexInput meta) throws IOException { return entry; } + private static DocValuesSkipperEntry readDocValueSkipperMeta(IndexInput meta) throws IOException { + long offset = meta.readLong(); + long length = meta.readLong(); + long maxValue = meta.readLong(); + long minValue = meta.readLong(); + int docCount = meta.readInt(); + int maxDocID = meta.readInt(); + + return new DocValuesSkipperEntry(offset, length, minValue, maxValue, docCount, maxDocID); + } + private static void readNumeric(IndexInput meta, NumericEntry entry) throws IOException { entry.docsWithFieldOffset = meta.readLong(); entry.docsWithFieldLength = meta.readLong(); @@ -1253,6 +1376,8 @@ private void set() { } } + private record DocValuesSkipperEntry(long offset, long length, long minValue, long maxValue, int docCount, int maxDocId) {} + private static class NumericEntry { long docsWithFieldOffset; long docsWithFieldLength; diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java new file mode 100644 index 0000000000000..b8257db1b7a1a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.index.codec.tsdb; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.TestUtil; + +import java.io.IOException; +import java.util.Arrays; + +/** Tests ES87TSDBDocValuesFormat with custom skipper interval size. */ +public class ES87TSDBDocValuesFormatVariableSkipIntervalTests extends BaseDocValuesFormatTestCase { + + @Override + protected Codec getCodec() { + // small interval size to test with many intervals + return TestUtil.alwaysDocValuesFormat(new ES87TSDBDocValuesFormat(random().nextInt(4, 16))); + } + + public void testSkipIndexIntervalSize() { + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> new ES87TSDBDocValuesFormat(random().nextInt(Integer.MIN_VALUE, 2)) + ); + assertTrue(ex.getMessage().contains("skipIndexIntervalSize must be > 1")); + } + + public void testSkipperAllEqualValue() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int numDocs = atLeast(100); + for (int i = 0; i < numDocs; i++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + skipper.advance(0); + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs, skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on different value + public void testSkipperFewValuesSorted() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + boolean reverse = random().nextBoolean(); + config.setIndexSort(new Sort(new SortField("dv", SortField.Type.LONG, reverse))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int intervals = random().nextInt(2, 10); + final int[] numDocs = new int[intervals]; + for (int i = 0; i < intervals; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", i)); + writer.addDocument(doc); + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + if (reverse) { + for (int i = intervals - 1; i >= 0; i--) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } else { + for (int i = 0; i < intervals; i++) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on empty doc values + public void testSkipperAllEqualValueWithGaps() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + // add doc with empty "dv" + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on multi-values + public void testSkipperAllEqualValueWithMultiValues() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + int docs = random().nextInt(10) + 16; + numDocs[i] += docs; + for (int j = 0; j < docs; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + if (i != gaps - 1) { + // add doc with mutivalues + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + numDocs[i + 1] = 1; + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } +} From 40e35ad3a0d616c40384a650f9be8c9e9ca375f2 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 2 Sep 2024 14:56:41 +0200 Subject: [PATCH 211/417] Change IndexReader with LeafReader for mocking --- .../bucket/composite/SingleDimensionValuesSourceTests.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java index 4f5700cff91d5..b20d8d95e1ac6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -257,7 +258,7 @@ public void testNumericSorted() { } private static IndexReader mockIndexReader(int maxDoc, int numDocs) { - IndexReader reader = mock(IndexReader.class); + IndexReader reader = mock(LeafReader.class); when(reader.hasDeletions()).thenReturn(maxDoc - numDocs > 0); when(reader.maxDoc()).thenReturn(maxDoc); when(reader.numDocs()).thenReturn(numDocs); From cdac9a55ef1cd50f58b80e2d4b6aed6905b435ae Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 2 Sep 2024 15:07:15 +0200 Subject: [PATCH 212/417] Fix KeyedFlattenedLeafFieldData iteration --- .../index/mapper/flattened/KeyedFlattenedLeafFieldData.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java index c7bd0645010d0..3b6a35738085b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java @@ -218,7 +218,7 @@ public boolean advanceExact(int target) throws IOException { if (delegate.advanceExact(target)) { int count = 0; - while (true) { + for (int i = 0; i < delegate.docValueCount(); i++) { long ord = delegate.nextOrd(); if (ord > maxOrd) { break; From 36e153c349e2301c4684a4f1743b00000291abc7 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 2 Sep 2024 15:22:05 +0200 Subject: [PATCH 213/417] Fix KeyedFlattenedLeafFieldData iteration second try --- .../index/mapper/flattened/KeyedFlattenedLeafFieldData.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java index 3b6a35738085b..15bdc9a0f9011 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java @@ -218,7 +218,7 @@ public boolean advanceExact(int target) throws IOException { if (delegate.advanceExact(target)) { int count = 0; - for (int i = 0; i < delegate.docValueCount(); i++) { + for (int i = 0; i < delegate.getValueCount(); i++) { long ord = delegate.nextOrd(); if (ord > maxOrd) { break; From 34ec3a93fab3ae05d490dc3bc44870c8175fff60 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 2 Sep 2024 15:29:25 +0200 Subject: [PATCH 214/417] Fix KeyedFlattenedLeafFieldDataTests --- .../mapper/flattened/KeyedFlattenedLeafFieldDataTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java index 0181b68b5b18a..353d9f524afec 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java @@ -192,7 +192,7 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - assert index == documentOrds.length; + assertTrue(index < documentOrds.length); return documentOrds[index++]; } From db740b6b79a1b8366377231193a5c91b46c969ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 2 Sep 2024 15:54:36 +0200 Subject: [PATCH 215/417] Fix QueryStringQueryBuilderTests Exception message changed slightly. --- .../index/query/QueryStringQueryBuilderTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index f3839a08f7995..e48df80a603d9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -748,7 +748,7 @@ public void testToQueryRegExpQueryTooComplex() throws Exception { TooComplexToDeterminizeException.class, () -> queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10000 effort.")); } @@ -774,7 +774,7 @@ public void testToQueryRegExpQueryMaxDeterminizedStatesParsing() throws Exceptio TooComplexToDeterminizeException.class, () -> queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10 effort.")); } From eff329fa0c19ecd5a7a8c9a17cbe73220c8e784b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 2 Sep 2024 16:04:23 +0200 Subject: [PATCH 216/417] Fix SystemIndexDescriptor RegEx automata are no longer determinized by default. --- .../java/org/elasticsearch/indices/SystemIndexDescriptor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index 01ce14b41cac6..aa724af1c2e74 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -889,7 +889,7 @@ static Automaton buildAutomaton(String pattern, String alias) { final Automaton aliasAutomaton = new RegExp(aliasAsRegex).toAutomaton(); - return Operations.union(patternAutomaton, aliasAutomaton); + return Operations.determinize(Operations.union(patternAutomaton, aliasAutomaton), DEFAULT_DETERMINIZE_WORK_LIMIT); } /** From adc0f336b415ab1249b104e2f9a5f49e93e18241 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 2 Sep 2024 16:13:38 +0200 Subject: [PATCH 217/417] Fix Regex class automaton determinization --- .../src/main/java/org/elasticsearch/common/regex/Regex.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/regex/Regex.java b/server/src/main/java/org/elasticsearch/common/regex/Regex.java index f292ac5032f7e..5b29d56f60616 100644 --- a/server/src/main/java/org/elasticsearch/common/regex/Regex.java +++ b/server/src/main/java/org/elasticsearch/common/regex/Regex.java @@ -68,7 +68,7 @@ public static Automaton simpleMatchToAutomaton(String pattern) { previous = i + 1; } automata.add(Automata.makeString(pattern.substring(previous))); - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** @@ -112,7 +112,7 @@ public static Automaton simpleMatchToAutomaton(String... patterns) { prefixAutomaton.add(Automata.makeAnyString()); automata.add(Operations.concatenate(prefixAutomaton)); } - return Operations.union(automata); + return Operations.determinize(Operations.union(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** From d070e8da6c7eddcd617cbc74faadd88fde9fa0d6 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 2 Sep 2024 16:40:38 +0200 Subject: [PATCH 218/417] Fix docValueCount in KeyedFlattenedLeafFieldData --- .../index/mapper/flattened/KeyedFlattenedLeafFieldData.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java index 15bdc9a0f9011..3b6a35738085b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java @@ -218,7 +218,7 @@ public boolean advanceExact(int target) throws IOException { if (delegate.advanceExact(target)) { int count = 0; - for (int i = 0; i < delegate.getValueCount(); i++) { + for (int i = 0; i < delegate.docValueCount(); i++) { long ord = delegate.nextOrd(); if (ord > maxOrd) { break; From 3352358d56f35ca2eacacb4abc52e6162437cca0 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 2 Sep 2024 18:28:12 +0200 Subject: [PATCH 219/417] Fix TimeSeriesRateAggregatorTests --- .../rate/TimeSeriesRateAggregatorTests.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java index 885e02a8b5e6a..f517c03468bc2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java @@ -69,8 +69,12 @@ public void testSimple() throws IOException { }; AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")); testCase(iw -> { - iw.addDocuments(docs(1000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(1000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } @@ -109,8 +113,12 @@ public void testNestedWithinDateHistogram() throws IOException { AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")) .withSplitLeavesIntoSeperateAggregators(false); testCase(iw -> { - iw.addDocuments(docs(2000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(2000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } From e638acfff5fef84ef35cb378b13bdd32d3b31833 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 2 Sep 2024 21:21:03 +0200 Subject: [PATCH 220/417] Fix undeterminized automaton in UnmappedFieldFetcher --- .../search/fetch/subphase/UnmappedFieldFetcher.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java index 2627177fcf541..4740af9e4c5d2 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java @@ -75,7 +75,7 @@ private static CharacterRunAutomaton nestedChildrenAutomaton(List nested for (String child : nestedChildren) { automata.add(Operations.concatenate(Automata.makeString(child + "."), Automata.makeAnyString())); } - return new CharacterRunAutomaton(Operations.union(automata)); + return new CharacterRunAutomaton(Operations.determinize(Operations.union(automata), AUTOMATON_MAX_DETERMINIZED_STATES)); } // Builds an automaton that will match any field that conforms to one of the input patterns From 5a831982d24472bb8557e974f107223ac414b7df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 2 Sep 2024 21:42:46 +0200 Subject: [PATCH 221/417] Fix automaton determinization in AutomatonQueries --- .../elasticsearch/common/lucene/search/AutomatonQueries.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java index 670b9d53a68ea..4e615d797262e 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java @@ -96,7 +96,7 @@ public static Automaton toCaseInsensitiveWildcardAutomaton(Term wildcardquery) { i += length; } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } protected static Automaton toCaseInsensitiveString(BytesRef br) { From eb276e63e4fda2ea49e22add31962cab8ada4058 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 2 Sep 2024 22:08:56 +0200 Subject: [PATCH 222/417] Fix IOContext in Store --- server/src/main/java/org/elasticsearch/index/store/Store.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index f0a645b3cf110..a2e1fd6cf11f9 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -147,7 +147,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * Specific {@link IOContext} indicating that we will read only the Lucene file footer (containing the file checksum) * See {@link MetadataSnapshot#checksumFromLuceneFile}. */ - public static final IOContext READONCE_CHECKSUM = new IOContext(IOContext.Context.DEFAULT, null, null, ReadAdvice.SEQUENTIAL); + public static final IOContext READONCE_CHECKSUM = IOContext.READONCE; private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; From 73bbe0a9505da60785f3080354fa418805da6862 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 3 Sep 2024 06:11:33 +0000 Subject: [PATCH 223/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-09dd985bef9 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 90659d580a9fe..63fc510e9a20b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-f23711a3e36 +lucene = 9.12.0-snapshot-09dd985bef9 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index c3d586a6e3a73..f1a1a97ebfc6c 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 4bfdcf90c5e3cd76401885f6a6c95eb2cbaf9733 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 3 Sep 2024 06:12:03 +0000 Subject: [PATCH 224/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-68cc8734ca2 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 29c5f37c024ee..87f57b4478cef 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-ce4f56e74ad +lucene = 10.0.0-snapshot-68cc8734ca2 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 32407b2940706..441fb93241789 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From c12da0a6f7b082418eaa6b5286eb3f7438497223 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 3 Sep 2024 13:51:37 +0200 Subject: [PATCH 225/417] Mute MetadataCreateIndexServiceTests.testValidateDotIndex because of Lucene RegExp tilde operator changes --- .../metadata/MetadataCreateIndexServiceTests.java | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index f7d343b43b29c..0f2217fcb6936 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -8,6 +8,10 @@ package org.elasticsearch.cluster.metadata; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.TransportVersion; @@ -612,9 +616,20 @@ public void testCalculateNumRoutingShards() { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/112453") public void testValidateDotIndex() { List systemIndexDescriptors = new ArrayList<>(); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-one*", "test")); + //TODO Lucene 10 upgrade + // The "~" operator in Rexeg Automata doesn't seem to work as expected any more without minimization + Automaton patternAutomaton = new RegExp("\\.test-~(one.*)").toAutomaton(); + assertTrue( + new CharacterRunAutomaton(Operations.determinize(patternAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)).run( + ".test-~(one.*)" + ) + ); + // TODO remove this smoke test ^^^ once the issue is fixed + systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-~(one*)", "test")); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".pattern-test*", "test-1")); From 7e409ba47914aaa141af8eae3460b61346d15d8a Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Tue, 3 Sep 2024 14:05:01 +0100 Subject: [PATCH 226/417] Add Lucene 10 todo to Store --- server/src/main/java/org/elasticsearch/index/store/Store.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index a2e1fd6cf11f9..988fecfcf73eb 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -147,6 +147,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * Specific {@link IOContext} indicating that we will read only the Lucene file footer (containing the file checksum) * See {@link MetadataSnapshot#checksumFromLuceneFile}. */ + // TODO Lucene 10 upgrade public static final IOContext READONCE_CHECKSUM = IOContext.READONCE; private final AtomicBoolean isClosed = new AtomicBoolean(false); From 432df13e98e2161402456ba33c5dd9051cce942b Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Tue, 3 Sep 2024 14:06:16 +0100 Subject: [PATCH 227/417] spotless --- server/src/main/java/org/elasticsearch/index/store/Store.java | 1 - .../cluster/metadata/MetadataCreateIndexServiceTests.java | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 988fecfcf73eb..84166c3103dc5 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -32,7 +32,6 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.Lock; import org.apache.lucene.store.NIOFSDirectory; -import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 0f2217fcb6936..493d15dffa44b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -620,7 +620,7 @@ public void testCalculateNumRoutingShards() { public void testValidateDotIndex() { List systemIndexDescriptors = new ArrayList<>(); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-one*", "test")); - //TODO Lucene 10 upgrade + // TODO Lucene 10 upgrade // The "~" operator in Rexeg Automata doesn't seem to work as expected any more without minimization Automaton patternAutomaton = new RegExp("\\.test-~(one.*)").toAutomaton(); assertTrue( From 52429c46a2338059fff97f4682f2f54e8b453d2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 3 Sep 2024 15:42:19 +0200 Subject: [PATCH 228/417] Add romaniannormalization to list of known token filters --- .../elasticsearch/indices/analysis/AnalysisFactoryTestCase.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index e0c03d831bcd2..ccba1d2d43110 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -118,6 +118,7 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { entry("portugueselightstem", MovedToAnalysisCommon.class), entry("portugueseminimalstem", MovedToAnalysisCommon.class), entry("reversestring", MovedToAnalysisCommon.class), + entry("romaniannormalization", MovedToAnalysisCommon.class), entry("russianlightstem", MovedToAnalysisCommon.class), entry("scandinavianfolding", MovedToAnalysisCommon.class), entry("scandinaviannormalization", MovedToAnalysisCommon.class), From 14b8525d3452b7c97c7b0588d2eaf20d6d17c89f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 3 Sep 2024 16:35:45 +0200 Subject: [PATCH 229/417] Fix CategoryContextMappingTests --- .../suggest/completion/CategoryContextMappingTests.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 76b6097d7012a..ab761f8fcbf41 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -796,7 +796,9 @@ public void testParsingContextFromDocument() { LuceneDocument document = new LuceneDocument(); KeywordFieldMapper.KeywordFieldType keyword = new KeywordFieldMapper.KeywordFieldType("category"); - document.add(new KeywordFieldMapper.KeywordField(keyword.name(), new BytesRef("category1"), new FieldType())); + document.add( + new KeywordFieldMapper.KeywordField(keyword.name(), new BytesRef("category1"), KeywordFieldMapper.Defaults.FIELD_TYPE) + ); // Ignore doc values document.add(new SortedSetDocValuesField(keyword.name(), new BytesRef("category1"))); Set context = mapping.parseContext(document); From bf1f92db6cb025497294c90deb1cfa656692a674 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 4 Sep 2024 06:12:35 +0000 Subject: [PATCH 230/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-09dd985bef9 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f1a1a97ebfc6c..f9b711acc1c20 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 35f1b244b91675270ebd1c3cf2b7d0897d98395c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 4 Sep 2024 06:12:44 +0000 Subject: [PATCH 231/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-c21bc5405be --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 87f57b4478cef..543d37c29f754 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-68cc8734ca2 +lucene = 10.0.0-snapshot-c21bc5405be bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 441fb93241789..cf9b37808b71b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 22609ed4cc9358ce5d4148cd3172a4f72e50b548 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 4 Sep 2024 09:07:55 +0200 Subject: [PATCH 232/417] fix MultiValueMode --- .../java/org/elasticsearch/search/MultiValueMode.java | 8 ++++---- .../suggest/completion/CategoryContextMappingTests.java | 1 - 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java index 079900e1aefc0..e213f4cdd1b60 100644 --- a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -476,11 +476,11 @@ protected BytesRef pick( @Override protected int pick(SortedSetDocValues values) throws IOException { - long maxOrd = -1; - for (int i = 0; i < values.docValueCount(); i++) { - maxOrd = values.nextOrd(); + int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextOrd(); } - return Math.toIntExact(maxOrd); + return Math.toIntExact(values.nextOrd()); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index ab761f8fcbf41..e9df5f5fc7bfa 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; From 7779d35ecaadd1cd8f7739d729c0da93aa40f243 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 3 Sep 2024 22:59:04 +0200 Subject: [PATCH 233/417] Fix MatchBoolPrefixQueryBuilderTests --- .../index/query/MatchBoolPrefixQueryBuilderTests.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java index a0bac9340ae72..b622dfc5a41c5 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -265,10 +265,12 @@ private static void assertBooleanQuery(Query actual, List expectedClauseQ assertThat(actual, instanceOf(BooleanQuery.class)); final BooleanQuery actualBooleanQuery = (BooleanQuery) actual; assertThat(actualBooleanQuery.clauses(), hasSize(expectedClauseQueries.size())); - assertThat(actualBooleanQuery.clauses(), everyItem(hasProperty("occur", equalTo(BooleanClause.Occur.SHOULD)))); for (int i = 0; i < actualBooleanQuery.clauses().size(); i++) { - final Query clauseQuery = actualBooleanQuery.clauses().get(i).query(); + BooleanClause clause = actualBooleanQuery.clauses().get(i); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + final Query clauseQuery = clause.query(); + assertThat(clauseQuery, equalTo(expectedClauseQueries.get(i))); } } From d29974303e10e7b742603a18e47c40bf2ccc9e41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 4 Sep 2024 10:35:36 +0200 Subject: [PATCH 234/417] Don't mark romaniannormalization token filter as exposed --- .../indices/analysis/AnalysisFactoryTestCase.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index ccba1d2d43110..b2ac058c9e4ef 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -118,7 +118,6 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { entry("portugueselightstem", MovedToAnalysisCommon.class), entry("portugueseminimalstem", MovedToAnalysisCommon.class), entry("reversestring", MovedToAnalysisCommon.class), - entry("romaniannormalization", MovedToAnalysisCommon.class), entry("russianlightstem", MovedToAnalysisCommon.class), entry("scandinavianfolding", MovedToAnalysisCommon.class), entry("scandinaviannormalization", MovedToAnalysisCommon.class), @@ -198,7 +197,9 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { entry("daitchmokotoffsoundex", Void.class), entry("persianstem", Void.class), // not exposed - entry("word2vecsynonym", Void.class) + entry("word2vecsynonym", Void.class), + // not exposed + entry("romaniannormalization", Void.class) ); static final Map> KNOWN_CHARFILTERS = Map.of( From 992e3bc6f0b9e479442cb3903b483e1ecf1370c1 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 4 Sep 2024 11:41:04 +0200 Subject: [PATCH 235/417] fix MultiOrdinals #docValueCount --- .../elasticsearch/index/fielddata/ordinals/MultiOrdinals.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java index dfc17defd6612..cff98fa6d84a0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java @@ -152,6 +152,7 @@ private static class MultiDocs extends AbstractSortedSetDocValues { private long currentOffset; private long currentEndOffset; + private int count; MultiDocs(MultiOrdinals ordinals, ValuesHolder values) { this.valueCount = ordinals.valueCount; @@ -169,6 +170,7 @@ public long getValueCount() { public boolean advanceExact(int docId) { currentOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; currentEndOffset = endOffsets.get(docId); + count = Math.toIntExact(currentEndOffset - currentOffset); return currentOffset != currentEndOffset; } @@ -180,7 +182,7 @@ public long nextOrd() { @Override public int docValueCount() { - return Math.toIntExact(currentEndOffset - currentOffset); + return count; } @Override From 4e9bd3f1e9da30bba43097bd0c3f837970f15493 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 4 Sep 2024 12:05:20 +0200 Subject: [PATCH 236/417] Fix AbstractStringFieldDataTestCase#testGlobalOrdinals --- .../index/fielddata/AbstractStringFieldDataTestCase.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index 007882290bd39..2399ee62bf251 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -497,13 +497,11 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(5L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04")); - ord = values.nextOrd(); assertFalse(values.advanceExact(1)); assertTrue(values.advanceExact(2)); ord = values.nextOrd(); assertThat(ord, equalTo(4L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("03")); - ord = values.nextOrd(); // Second segment leaf = topLevelReader.leaves().get(1); @@ -519,7 +517,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(7L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06")); - ord = values.nextOrd(); assertTrue(values.advanceExact(1)); ord = values.nextOrd(); assertThat(ord, equalTo(7L)); @@ -530,7 +527,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(9L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08")); - ord = values.nextOrd(); assertFalse(values.advanceExact(2)); assertTrue(values.advanceExact(3)); ord = values.nextOrd(); @@ -542,7 +538,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(11L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("10")); - ord = values.nextOrd(); // Third segment leaf = topLevelReader.leaves().get(2); @@ -558,7 +553,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(2L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!10")); - ord = values.nextOrd(); } public void testTermsEnum() throws Exception { From d440438c7afe58d1d0e180f491aabd9ebf078181 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 4 Sep 2024 12:33:23 +0200 Subject: [PATCH 237/417] Fix compile issue due to missing Scorable#docID method --- .../common/lucene/search/function/ScriptScoreFunction.java | 2 +- .../src/main/java/org/elasticsearch/script/ScoreScript.java | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 2729106dccbbf..641b276cc709e 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -63,7 +63,7 @@ public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOEx if (script.needs_termStats()) { assert termStatsFactory != null; - leafScript._setTermStats(termStatsFactory.apply(ctx, scorer::docID)); + leafScript._setTermStats(termStatsFactory.apply(ctx, leafScript::docId)); } return new LeafScoreFunction() { diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index 61d225c069c68..9e870fca6aef1 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -115,6 +115,11 @@ public void setDocument(int docid) { this.docId = docid; } + /** Get the current document. */ + public int docId() { + return docId; + } + public void setScorer(Scorable scorer) { this.scoreSupplier = () -> { try { From 5ef57b267aec55d9018a48b146537ced9a5e508c Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 4 Sep 2024 12:29:55 +0100 Subject: [PATCH 238/417] spotless --- .../index/query/MatchBoolPrefixQueryBuilderTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java index b622dfc5a41c5..18980e739fea0 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -36,7 +36,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalToIgnoringCase; -import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.hasSize; public class MatchBoolPrefixQueryBuilderTests extends AbstractQueryTestCase { From 8e396592d8243d6fd5a40d3972319451567283fa Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 4 Sep 2024 12:30:15 +0100 Subject: [PATCH 239/417] Fix MultiBucketColletorTests --- .../search/aggregations/MultiBucketCollectorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index 58d732a1706d7..830cb0b98e90e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -290,6 +290,7 @@ public void testCacheScores() throws IOException { randomBoolean(), Arrays.asList(scoringBucketCollector1, scoringBucketCollector2) ).getLeafCollector(null); + leafBucketCollector.setScorer(scorable); leafBucketCollector.collect(0, 0); // Even though both leaf collectors called scorable.score(), it only got called once thanks to caching assertEquals(1, scorable.numScoreCalls); From 5841c974f8c805c86889ab4b9b0e461b6c380234 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 4 Sep 2024 14:44:47 +0200 Subject: [PATCH 240/417] Fix or mute remaining failing :server test --- .../elasticsearch/index/analysis/PreBuiltAnalyzerTests.java | 3 ++- .../org/elasticsearch/index/engine/InternalEngineTests.java | 2 ++ .../elasticsearch/index/query/IntervalQueryBuilderTests.java | 3 +++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index fa55d24bdbc48..c5ae2734a16a2 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -80,7 +80,8 @@ public void testThatAnalyzersAreUsedInMapping() throws IOException { PreBuiltAnalyzers randomPreBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt]; String analyzerName = randomPreBuiltAnalyzer.name().toLowerCase(Locale.ROOT); - IndexVersion randomVersion = IndexVersionUtils.randomVersion(random()); + // TODO Lucene 10 upgrade, after removal of old IndexVersions, return to "IndexVersionUtils.randomVersion(random())" + IndexVersion randomVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, randomVersion).build(); NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider( diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 779cf8d90c883..af7b3fb3d24bf 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -6748,6 +6748,8 @@ void assertLuceneOperations(InternalEngine engine, long expectedAppends, long ex assertThat(message, engine.getNumDocDeletes(), equalTo(expectedDeletes)); } + // TODO Lucene 10 upgrade, we need to remove old IndexVersions for this to work + @AwaitsFix(bugUrl = "") public void testStoreHonorsLuceneVersion() throws IOException { // this expects a big IndexVersion bump when the lucene major version is bumped IndexVersion lowestCompatiblePreviousVersion = IndexVersion.fromId((IndexVersion.current().id() / 1_000_000) * 1_000_000); diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 37c7172623b54..47a5b86fb9793 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -725,6 +725,9 @@ public void testPrefixes() throws IOException { assertEquals(expected, builder.toQuery(createSearchExecutionContext())); } + // TODO Lucene 10 upgrade, this fails because automata are not evaluated as "equal" any more, needs more investigation + // and potentially an issue / fix in Lucene + @AwaitsFix(bugUrl = "") public void testRegexp() throws IOException { String json = Strings.format(""" { From 481f78e1df22e7b8d6768eceac5d5a52ec94d333 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 4 Sep 2024 15:00:50 +0200 Subject: [PATCH 241/417] Fix APMTracer by determinizing automata --- .../elasticsearch/telemetry/apm/internal/tracing/APMTracer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index 0b020a24eeffb..201d967dacf0c 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -438,7 +438,7 @@ private static CharacterRunAutomaton buildAutomaton(List includePatterns ? includeAutomaton : Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CharacterRunAutomaton(finalAutomaton); + return new CharacterRunAutomaton(Operations.determinize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton patternsToAutomaton(List patterns) { From 693df5b3edfe7ceae9479a46723fe9009d9258dc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 5 Sep 2024 06:11:19 +0000 Subject: [PATCH 242/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-b91b4136aff --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 543d37c29f754..8ba5dcd92e1c8 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-c21bc5405be +lucene = 10.0.0-snapshot-b91b4136aff bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cf9b37808b71b..228f4f0962eb8 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 97fe5767ad36f28a65bb924df523dda0e24656b2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 5 Sep 2024 06:11:19 +0000 Subject: [PATCH 243/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-56468ea3bb8 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 63fc510e9a20b..ce7b9e2e77e57 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-09dd985bef9 +lucene = 9.12.0-snapshot-56468ea3bb8 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f9b711acc1c20..89529592bf8d5 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e02cb0f8bc9e8725a384d33df8d23ec80d739e98 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 5 Sep 2024 09:24:04 +0100 Subject: [PATCH 244/417] Add awaits fix bugURL --- .../elasticsearch/index/query/IntervalQueryBuilderTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 47a5b86fb9793..0eb18ecca40bc 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -727,7 +727,7 @@ public void testPrefixes() throws IOException { // TODO Lucene 10 upgrade, this fails because automata are not evaluated as "equal" any more, needs more investigation // and potentially an issue / fix in Lucene - @AwaitsFix(bugUrl = "") + @AwaitsFix(bugUrl = "https://github.com/apache/lucene/pull/13718") public void testRegexp() throws IOException { String json = Strings.format(""" { From 4bc8e009adb66dab5a13d16412bfb26eef592c80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 5 Sep 2024 17:44:32 +0200 Subject: [PATCH 245/417] Removing IndexVersions.V_7_0_0 and IndexVersions.V_7_1_0 --- .../common/CommonAnalysisPluginTests.java | 74 +-------------- .../common/EdgeNGramTokenizerTests.java | 31 +----- .../common/SynonymsAnalysisTests.java | 6 +- ...DelimiterGraphTokenFilterFactoryTests.java | 71 ++++---------- .../mapper/LegacyGeoShapeFieldMapper.java | 5 +- .../migration/MultiFeatureMigrationIT.java | 2 +- .../AnalysisPhoneticFactoryTests.java | 2 +- ...ransportGetFeatureUpgradeStatusAction.java | 4 +- .../metadata/MetadataCreateIndexService.java | 22 ++--- .../settings/AbstractScopedSettings.java | 3 + .../common/settings/IndexScopedSettings.java | 4 +- .../elasticsearch/index/IndexVersions.java | 4 +- .../analysis/ShingleTokenFilterFactory.java | 47 +++------- .../index/mapper/MapperRegistry.java | 2 +- .../index/similarity/SimilarityProviders.java | 52 ++-------- .../indices/analysis/AnalysisModule.java | 2 +- .../completion/context/GeoContextMapping.java | 46 +++------ .../cluster/stats/VersionStatsTests.java | 8 +- .../cluster/metadata/MetadataTests.java | 58 ++---------- .../elasticsearch/env/NodeMetadataTests.java | 2 +- .../index/engine/InternalEngineTests.java | 4 +- .../index/mapper/DateFieldMapperTests.java | 51 ---------- .../index/mapper/DynamicTemplatesTests.java | 94 +++---------------- .../similarity/SimilarityServiceTests.java | 6 +- .../indices/IndicesModuleTests.java | 30 ++---- .../indices/analysis/AnalysisModuleTests.java | 31 ++---- .../index/mapper/MetadataMapperTestCase.java | 2 +- .../AbstractSnapshotIntegTestCase.java | 2 +- .../deprecation/IndexDeprecationChecks.java | 7 +- .../IndexDeprecationChecksTests.java | 32 ------- .../SnapshotsRecoveryPlannerServiceTests.java | 17 +--- ...GeoShapeWithDocValuesFieldMapperTests.java | 10 -- .../index/mapper/ShapeFieldMapperTests.java | 2 +- 33 files changed, 137 insertions(+), 596 deletions(-) diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java index c18cb3dddf0ae..3a3ba6ae276de 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java @@ -57,7 +57,7 @@ public void testNGramFilterInCustomAnalyzerDeprecationError() throws IOException .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersions.V_8_10_0) ) .put("index.analysis.analyzer.custom_analyzer.type", "custom") .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") @@ -106,7 +106,7 @@ public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOExcep .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersions.V_8_10_0) ) .put("index.analysis.analyzer.custom_analyzer.type", "custom") .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") @@ -124,43 +124,9 @@ public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOExcep } /** - * Check that we log a deprecation warning for "nGram" and "edgeNGram" tokenizer names with 7.6 and - * disallow usages for indices created after 8.0 + * Check that we for disallow usages for indices created after 8.0 for "nGram" and "edgeNGram" tokenizer names */ public void testNGramTokenizerDeprecation() throws IOException { - // tests for prebuilt tokenizer - doTestPrebuiltTokenizerDeprecation( - "nGram", - "ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), - false - ); - doTestPrebuiltTokenizerDeprecation( - "edgeNGram", - "edge_ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), - false - ); - doTestPrebuiltTokenizerDeprecation( - "nGram", - "ngram", - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_6_0, - IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) - ), - true - ); - doTestPrebuiltTokenizerDeprecation( - "edgeNGram", - "edge_ngram", - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_6_0, - IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) - ), - true - ); expectThrows( IllegalArgumentException.class, () -> doTestPrebuiltTokenizerDeprecation( @@ -179,40 +145,6 @@ public void testNGramTokenizerDeprecation() throws IOException { true ) ); - - // same batch of tests for custom tokenizer definition in the settings - doTestCustomTokenizerDeprecation( - "nGram", - "ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), - false - ); - doTestCustomTokenizerDeprecation( - "edgeNGram", - "edge_ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), - false - ); - doTestCustomTokenizerDeprecation( - "nGram", - "ngram", - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_6_0, - IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) - ), - true - ); - doTestCustomTokenizerDeprecation( - "edgeNGram", - "edge_ngram", - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_6_0, - IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) - ), - true - ); expectThrows( IllegalArgumentException.class, () -> doTestCustomTokenizerDeprecation( diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java index 412e3ba3e380a..9f291ffc80de0 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java @@ -47,36 +47,7 @@ private IndexAnalyzers buildAnalyzers(IndexVersion version, String tokenizer) th } public void testPreConfiguredTokenizer() throws IOException { - - // Before 7.3 we return ngrams of length 1 only - { - IndexVersion version = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_7_3_0) - ); - try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edge_ngram")) { - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "test", new String[] { "t" }); - } - } - - // Check deprecated name as well - { - IndexVersion version = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_7_3_0) - ); - try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edgeNGram")) { - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "test", new String[] { "t" }); - } - } - - // Afterwards, we return ngrams of length 1 and 2, to match the default factory settings + // we return ngrams of length 1 and 2, to match the default factory settings { try (IndexAnalyzers indexAnalyzers = buildAnalyzers(IndexVersion.current(), "edge_ngram")) { NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java index 7a2bd2a822988..6249189ce538f 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java @@ -337,7 +337,7 @@ public void testShingleFilters() { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .put("index.analysis.filter.synonyms.type", "synonym") @@ -391,7 +391,7 @@ public void testPreconfiguredTokenFilters() throws IOException { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .build(); @@ -423,7 +423,7 @@ public void testDisallowedTokenFilters() throws IOException { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .putList("common_words", "a", "b") diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java index 68e6d6661f944..39fda06363033 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.index.IndexService.IndexCreationContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -25,7 +24,6 @@ import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.index.IndexVersionUtils; import java.io.IOException; import java.io.StringReader; @@ -180,61 +178,26 @@ public void testIgnoreKeywords() throws IOException { } public void testPreconfiguredFilter() throws IOException { - // Before 7.3 we don't adjust offsets - { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - Settings indexSettings = Settings.builder() - .put( - IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_7_3_0) - ) - ) - .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") - .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph") - .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - - try ( - IndexAnalyzers indexAnalyzers = new AnalysisModule( - TestEnvironment.newEnvironment(settings), - Collections.singletonList(new CommonAnalysisPlugin()), - new StablePluginsRegistry() - ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings) - ) { - - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "h100", new String[] { "h", "100" }, new int[] { 0, 0 }, new int[] { 4, 4 }); - - } - } - - // Afger 7.3 we do adjust offsets - { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - Settings indexSettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") - .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph") - .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); + Settings indexSettings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - try ( - IndexAnalyzers indexAnalyzers = new AnalysisModule( - TestEnvironment.newEnvironment(settings), - Collections.singletonList(new CommonAnalysisPlugin()), - new StablePluginsRegistry() - ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings) - ) { + try ( + IndexAnalyzers indexAnalyzers = new AnalysisModule( + TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin()), + new StablePluginsRegistry() + ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings) + ) { - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "h100", new String[] { "h", "100" }, new int[] { 0, 1 }, new int[] { 1, 4 }); + NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "h100", new String[] { "h", "100" }, new int[] { 0, 1 }, new int[] { 1, 4 }); - } } } } diff --git a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java index 2808dae31239c..d4a45113e6845 100644 --- a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java +++ b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java @@ -29,7 +29,6 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper; import org.elasticsearch.index.mapper.DocumentParserContext; @@ -232,9 +231,7 @@ public Builder(String name, IndexVersion version, boolean ignoreMalformedByDefau }); // Set up serialization - if (version.onOrAfter(IndexVersions.V_7_0_0)) { - this.strategy.alwaysSerialize(); - } + this.strategy.alwaysSerialize(); // serialize treeLevels if treeLevels is configured, OR if defaults are requested and precision is not configured treeLevels.setSerializerCheck((id, ic, v) -> ic || (id && precision.get() == null)); // serialize precision if precision is configured, OR if defaults are requested and treeLevels is not configured diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index 8f9c2b7f34105..ab9b857c43bb2 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -264,7 +264,7 @@ public void testMultipleFeatureMigration() throws Exception { .setAliasName(".second-internal-managed-alias") .setPrimaryIndex(".second-int-man-old") .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED) - .setSettings(createSettings(IndexVersions.V_7_0_0, 0)) + .setSettings(createSettings(IndexVersions.V_8_0_0, 0)) .setMappings(createMapping(true, true)) .setOrigin(ORIGIN) .setVersionMetaKey(VERSION_META_KEY) diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java index 348e9f5fae7c8..0c6615237b36c 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java @@ -43,7 +43,7 @@ public void testDisallowedWithSynonyms() throws IOException { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .build(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java index ae538e7c72334..3e5b6c255c5fa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java @@ -57,7 +57,9 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA */ @UpdateForV9 public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_7_0_0; - public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_7_0_0; + @UpdateForV9 + // TODO lucene 10 upgrade, we increased this to IndexVersions.V_8_0_0, not sure if that was premature. Check with ie. core/infra + public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_8_0_0; private final SystemIndices systemIndices; PersistentTasksService persistentTasksService; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 02b7312b4a99d..2b299fcf1ec11 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1620,21 +1620,13 @@ static void prepareResizeIndexSettings( * the less default split operations are supported */ public static int calculateNumRoutingShards(int numShards, IndexVersion indexVersionCreated) { - if (indexVersionCreated.onOrAfter(IndexVersions.V_7_0_0)) { - // only select this automatically for indices that are created on or after 7.0 this will prevent this new behaviour - // until we have a fully upgraded cluster. Additionally it will make integrating testing easier since mixed clusters - // will always have the behavior of the min node in the cluster. - // - // We use as a default number of routing shards the higher number that can be expressed - // as {@code numShards * 2^x`} that is less than or equal to the maximum number of shards: 1024. - int log2MaxNumShards = 10; // logBase2(1024) - int log2NumShards = 32 - Integer.numberOfLeadingZeros(numShards - 1); // ceil(logBase2(numShards)) - int numSplits = log2MaxNumShards - log2NumShards; - numSplits = Math.max(1, numSplits); // Ensure the index can be split at least once - return numShards * 1 << numSplits; - } else { - return numShards; - } + // We use as a default number of routing shards the higher number that can be expressed + // as {@code numShards * 2^x`} that is less than or equal to the maximum number of shards: 1024. + int log2MaxNumShards = 10; // logBase2(1024) + int log2NumShards = 32 - Integer.numberOfLeadingZeros(numShards - 1); // ceil(logBase2(numShards)) + int numSplits = log2MaxNumShards - log2NumShards; + numSplits = Math.max(1, numSplits); // Ensure the index can be split at least once + return numShards * 1 << numSplits; } public static void validateTranslogRetentionSettings(Settings indexSettings) { diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index c3ad2d4f9a9d9..bebec0979d99a 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import java.util.ArrayList; import java.util.Collections; @@ -436,6 +437,8 @@ public synchronized void initializeAndWatch(Setting setting, Consumer addSettingsUpdateConsumer(setting, consumer); } + @UpdateForV9 + // do we need to rename / rework this method for v9? protected void validateDeprecatedAndRemovedSettingV7(Settings settings, Setting setting) {} /** diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index fe6616cb4fb8e..f0ed11401f013 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -263,7 +263,9 @@ protected void validateDeprecatedAndRemovedSettingV7(Settings settings, Setting< // error out on those validations, we will check with the creation version present at index // creation time, as well as on index update settings. if (indexVersion.equals(IndexVersions.ZERO) == false - && (indexVersion.before(IndexVersions.V_7_0_0) || indexVersion.onOrAfter(IndexVersions.V_8_0_0))) { + // TODO lucene 10 upgrade, check if we need to change anything for pre8 indices + // old: && (indexVersion.before(IndexVersions.V_7_0_0) || indexVersion.onOrAfter(IndexVersions.V_8_0_0))) { + && indexVersion.onOrAfter(IndexVersions.V_8_0_0)) { throw new IllegalArgumentException("unknown setting [" + setting.getKey() + "]"); } } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index b94321b8aa886..679f9291fedb2 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -59,9 +59,7 @@ private static Version parseUnchecked(String version) { @UpdateForV9 // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0")); - public static final IndexVersion V_7_1_0 = def(7_01_00_99, parseUnchecked("8.0.0")); public static final IndexVersion V_7_2_0 = def(7_02_00_99, parseUnchecked("8.0.0")); public static final IndexVersion V_7_2_1 = def(7_02_01_99, parseUnchecked("8.0.0")); public static final IndexVersion V_7_3_0 = def(7_03_00_99, parseUnchecked("8.1.0")); @@ -180,7 +178,7 @@ private static Version parseUnchecked(String version) { * In branches 8.7-8.11 see server/src/main/java/org/elasticsearch/index/IndexVersion.java for the equivalent definitions. */ - public static final IndexVersion MINIMUM_COMPATIBLE = V_7_0_0; + public static final IndexVersion MINIMUM_COMPATIBLE = V_7_2_0; static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactory.java index 174253252416c..4d9f53632bcf6 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactory.java @@ -10,12 +10,10 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.shingle.ShingleFilter; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.lucene.analysis.miscellaneous.DisableGraphAttribute; public class ShingleTokenFilterFactory extends AbstractTokenFilterFactory { @@ -36,28 +34,17 @@ public ShingleTokenFilterFactory(IndexSettings indexSettings, Environment enviro int shingleDiff = maxShingleSize - minShingleSize + (outputUnigrams ? 1 : 0); if (shingleDiff > maxAllowedShingleDiff) { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_0_0)) { - throw new IllegalArgumentException( - "In Shingle TokenFilter the difference between max_shingle_size and min_shingle_size (and +1 if outputting unigrams)" - + " must be less than or equal to: [" - + maxAllowedShingleDiff - + "] but was [" - + shingleDiff - + "]. This limit" - + " can be set by changing the [" - + IndexSettings.MAX_SHINGLE_DIFF_SETTING.getKey() - + "] index level setting." - ); - } else { - DEPRECATION_LOGGER.warn( - DeprecationCategory.ANALYSIS, - "excessive_shingle_diff", - "Deprecated big difference between maxShingleSize and minShingleSize" - + " in Shingle TokenFilter, expected difference must be less than or equal to: [" - + maxAllowedShingleDiff - + "]" - ); - } + throw new IllegalArgumentException( + "In Shingle TokenFilter the difference between max_shingle_size and min_shingle_size (and +1 if outputting unigrams)" + + " must be less than or equal to: [" + + maxAllowedShingleDiff + + "] but was [" + + shingleDiff + + "]. This limit" + + " can be set by changing the [" + + IndexSettings.MAX_SHINGLE_DIFF_SETTING.getKey() + + "] index level setting." + ); } Boolean outputUnigramsIfNoShingles = settings.getAsBoolean("output_unigrams_if_no_shingles", false); @@ -81,17 +68,7 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } else { - DEPRECATION_LOGGER.warn( - DeprecationCategory.ANALYSIS, - "synonym_tokenfilters", - "Token filter " + name() + "] will not be usable to parse synonym after v7.0" - ); - } - return this; - + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } public Factory getInnerFactory() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java index aa2a7ce2f3996..05a6f243dda39 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java @@ -74,7 +74,7 @@ public Map getRuntimeFieldParsers() { public Map getMetadataMapperParsers(IndexVersion indexCreatedVersion) { if (indexCreatedVersion.onOrAfter(IndexVersions.V_8_0_0)) { return metadataMapperParsers; - } else if (indexCreatedVersion.onOrAfter(IndexVersions.V_7_0_0)) { + } else if (indexCreatedVersion.onOrAfter(IndexVersion.fromId(7000099))) { return metadataMapperParsers7x; } else if (indexCreatedVersion.onOrAfter(IndexVersion.fromId(6000099))) { return metadataMapperParsers6x; diff --git a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java index 76764f589c09b..6e6926ab667f5 100644 --- a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java +++ b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java @@ -37,11 +37,9 @@ import org.apache.lucene.search.similarities.NormalizationH2; import org.apache.lucene.search.similarities.NormalizationH3; import org.apache.lucene.search.similarities.NormalizationZ; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.lucene.similarity.LegacyBM25Similarity; import java.util.Arrays; @@ -101,23 +99,9 @@ private static BasicModel parseBasicModel(IndexVersion indexCreatedVersion, Sett if (model == null) { String replacement = LEGACY_BASIC_MODELS.get(basicModel); if (replacement != null) { - if (indexCreatedVersion.onOrAfter(IndexVersions.V_7_0_0)) { - throw new IllegalArgumentException( - "Basic model [" + basicModel + "] isn't supported anymore, " + "please use another model." - ); - } else { - deprecationLogger.warn( - DeprecationCategory.INDICES, - basicModel + "_similarity_model_replaced", - "Basic model [" - + basicModel - + "] isn't supported anymore and has arbitrarily been replaced with [" - + replacement - + "]." - ); - model = BASIC_MODELS.get(replacement); - assert model != null; - } + throw new IllegalArgumentException( + "Basic model [" + basicModel + "] isn't supported anymore, " + "please use another model." + ); } } @@ -140,23 +124,9 @@ private static AfterEffect parseAfterEffect(IndexVersion indexCreatedVersion, Se if (effect == null) { String replacement = LEGACY_AFTER_EFFECTS.get(afterEffect); if (replacement != null) { - if (indexCreatedVersion.onOrAfter(IndexVersions.V_7_0_0)) { - throw new IllegalArgumentException( - "After effect [" + afterEffect + "] isn't supported anymore, please use another effect." - ); - } else { - deprecationLogger.warn( - DeprecationCategory.INDICES, - afterEffect + "_after_effect_replaced", - "After effect [" - + afterEffect - + "] isn't supported anymore and has arbitrarily been replaced with [" - + replacement - + "]." - ); - effect = AFTER_EFFECTS.get(replacement); - assert effect != null; - } + throw new IllegalArgumentException( + "After effect [" + afterEffect + "] isn't supported anymore, please use another effect." + ); } } @@ -240,15 +210,7 @@ static void assertSettingsIsSubsetOf(String type, IndexVersion version, Settings unknownSettings.removeAll(Arrays.asList(supportedSettings)); unknownSettings.remove("type"); // used to figure out which sim this is if (unknownSettings.isEmpty() == false) { - if (version.onOrAfter(IndexVersions.V_7_0_0)) { - throw new IllegalArgumentException("Unknown settings for similarity of type [" + type + "]: " + unknownSettings); - } else { - deprecationLogger.warn( - DeprecationCategory.INDICES, - "unknown_similarity_setting", - "Unknown settings for similarity of type [" + type + "]: " + unknownSettings - ); - } + throw new IllegalArgumentException("Unknown settings for similarity of type [" + type + "]: " + unknownSettings); } } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 902e080c42328..ecb75f8b4f506 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -137,7 +137,7 @@ private static NamedRegistry> setupTokenFil tokenFilters.register("standard", new AnalysisProvider() { @Override public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - if (indexSettings.getIndexVersionCreated().before(IndexVersions.V_7_0_0)) { + if (indexSettings.getIndexVersionCreated().before(IndexVersions.V_8_0_0)) { deprecationLogger.warn( DeprecationCategory.ANALYSIS, "standard_deprecation", diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index 2cd7a751264bd..512368110877c 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -17,11 +17,9 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; @@ -272,39 +270,19 @@ public void validateReferences(IndexVersion indexVersionCreated, Function versions = List.of(IndexVersion.current(), IndexVersions.V_7_0_0, IndexVersions.V_7_1_0, IndexVersions.V_7_2_0); + List versions = List.of(IndexVersion.current(), IndexVersions.V_8_0_0, IndexVersions.V_8_1_0, IndexVersions.V_8_2_0); List stats = new ArrayList<>(); for (IndexVersion v : versions) { VersionStats.SingleVersionStats s = new VersionStats.SingleVersionStats( diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 955d7d2de6882..8124666594fad 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -866,19 +866,19 @@ public void testFindMappingsWithFilters() throws IOException { public void testOldestIndexComputation() { Metadata metadata = buildIndicesWithVersions( - IndexVersions.V_7_0_0, + IndexVersions.V_8_0_0, IndexVersion.current(), IndexVersion.fromId(IndexVersion.current().id() + 1) ).build(); - assertEquals(IndexVersions.V_7_0_0, metadata.oldestIndexVersion()); + assertEquals(IndexVersions.V_8_0_0, metadata.oldestIndexVersion()); Metadata.Builder b = Metadata.builder(); assertEquals(IndexVersion.current(), b.build().oldestIndexVersion()); Throwable ex = expectThrows( IllegalArgumentException.class, - () -> buildIndicesWithVersions(IndexVersions.V_7_0_0, IndexVersions.ZERO, IndexVersion.fromId(IndexVersion.current().id() + 1)) + () -> buildIndicesWithVersions(IndexVersions.V_8_0_0, IndexVersions.ZERO, IndexVersion.fromId(IndexVersion.current().id() + 1)) .build() ); @@ -1963,18 +1963,19 @@ public void testHiddenAliasValidation() { } public void testSystemAliasValidationMixedVersionSystemAndRegularFails() { - final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( + final IndexVersion random8xVersion = IndexVersionUtils.randomVersionBetween( random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) + IndexVersions.V_8_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersion.current()) ); final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); - final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true); + // TODO lucene 10 upgrade, check that the following removal isn't making this test unuseful + // final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random8xVersion, true); final IndexMetadata regularIndex = buildIndexWithAlias("regular1", SYSTEM_ALIAS_NAME, false, IndexVersion.current(), false); IllegalStateException exception = expectThrows( IllegalStateException.class, - () -> metadataWithIndices(currentVersionSystem, oldVersionSystem, regularIndex) + () -> metadataWithIndices(currentVersionSystem, regularIndex) ); assertThat( exception.getMessage(), @@ -2012,47 +2013,6 @@ public void testSystemAliasValidationNewSystemAndRegularFails() { ); } - public void testSystemAliasOldSystemAndNewRegular() { - final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ); - final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true); - final IndexMetadata regularIndex = buildIndexWithAlias("regular1", SYSTEM_ALIAS_NAME, false, IndexVersion.current(), false); - - // Should be ok: - metadataWithIndices(oldVersionSystem, regularIndex); - } - - public void testSystemIndexValidationAllRegular() { - final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ); - final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); - final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); - final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true); - - // Should be ok - metadataWithIndices(currentVersionSystem, currentVersionSystem2, oldVersionSystem); - } - - public void testSystemAliasValidationAllSystemSomeOld() { - final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ); - final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); - final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); - final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true); - - // Should be ok: - metadataWithIndices(currentVersionSystem, currentVersionSystem2, oldVersionSystem); - } - public void testSystemAliasValidationAll8x() { final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java index f60812977d578..466585b6aadeb 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java @@ -80,7 +80,7 @@ public void testEqualsHashcodeSerialization() { public void testReadsFormatWithoutVersion() throws IOException { // the behaviour tested here is only appropriate if the current version is compatible with versions 7 and earlier - assertTrue(IndexVersions.MINIMUM_COMPATIBLE.onOrBefore(IndexVersions.V_7_0_0)); + assertTrue(IndexVersions.MINIMUM_COMPATIBLE.onOrBefore(IndexVersions.V_8_0_0)); // when the current version is incompatible with version 7, the behaviour should change to reject files like the given resource // which do not have the version field diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index af7b3fb3d24bf..cfb25804dce51 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -7501,14 +7501,14 @@ public void testTrimUnsafeCommitHasESVersionInUserData() throws IOException { .setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); try (IndexWriter indexWriter = new IndexWriter(store.directory(), indexWriterConfig)) { Map commitUserDataWithOlderVersion = new HashMap<>(committedSegmentsInfo.userData); - commitUserDataWithOlderVersion.put(ES_VERSION, IndexVersions.V_7_0_0.toString()); + commitUserDataWithOlderVersion.put(ES_VERSION, IndexVersions.V_8_0_0.toString()); indexWriter.setLiveCommitData(commitUserDataWithOlderVersion.entrySet()); indexWriter.commit(); } Map userDataBeforeTrimUnsafeCommits = store.readLastCommittedSegmentsInfo().getUserData(); assertThat(userDataBeforeTrimUnsafeCommits, hasKey(ES_VERSION)); - assertThat(userDataBeforeTrimUnsafeCommits.get(ES_VERSION), is(equalTo(IndexVersions.V_7_0_0.toString()))); + assertThat(userDataBeforeTrimUnsafeCommits.get(ES_VERSION), is(equalTo(IndexVersions.V_8_0_0.toString()))); store.trimUnsafeCommits(config.getTranslogConfig().getTranslogPath()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 9cfdb2c46a291..27327663b5956 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -19,9 +19,7 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; import org.elasticsearch.script.DateFieldScript; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -46,7 +44,6 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.Mockito.mock; public class DateFieldMapperTests extends MapperTestCase { @@ -766,52 +763,4 @@ public void testLegacyField() throws Exception { assertThat(service.fieldType("mydate"), instanceOf(DateFieldType.class)); assertNotEquals(DEFAULT_DATE_TIME_FORMATTER, ((DateFieldType) service.fieldType("mydate")).dateTimeFormatter); } - - public void testLegacyDateFormatName() { - DateFieldMapper.Builder builder = new DateFieldMapper.Builder( - "format", - DateFieldMapper.Resolution.MILLISECONDS, - null, - mock(ScriptService.class), - true, - // BWC compatible index, e.g 7.x - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ) - ); - - // Check that we allow the use of camel case date formats on 7.x indices - @SuppressWarnings("unchecked") - FieldMapper.Parameter formatParam = (FieldMapper.Parameter) builder.getParameters()[3]; - formatParam.parse("date_time_format", mock(MappingParserContext.class), "strictDateOptionalTime"); - builder.buildFormatter(); // shouldn't throw exception - - formatParam.parse("date_time_format", mock(MappingParserContext.class), "strictDateOptionalTime||strictDateOptionalTimeNanos"); - builder.buildFormatter(); // shouldn't throw exception - - DateFieldMapper.Builder newFieldBuilder = new DateFieldMapper.Builder( - "format", - DateFieldMapper.Resolution.MILLISECONDS, - null, - mock(ScriptService.class), - true, - IndexVersion.current() - ); - - @SuppressWarnings("unchecked") - final FieldMapper.Parameter newFormatParam = (FieldMapper.Parameter) newFieldBuilder.getParameters()[3]; - - // Check that we don't allow the use of camel case date formats on 8.x indices - assertEquals( - "Error parsing [format] on field [format]: Invalid format: [strictDateOptionalTime]: Unknown pattern letter: t", - expectThrows(IllegalArgumentException.class, () -> { - newFormatParam.parse("date_time_format", mock(MappingParserContext.class), "strictDateOptionalTime"); - assertEquals("strictDateOptionalTime", newFormatParam.getValue()); - newFieldBuilder.buildFormatter(); - }).getMessage() - ); - - } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index a5a5d9726f233..9e683ca9fb839 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -20,10 +20,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.test.XContentTestUtils; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -221,61 +219,24 @@ public void testSimpleWithXContentTraverse() throws Exception { } public void testDynamicMapperWithBadMapping() throws IOException { - { - // in 7.x versions this will issue a deprecation warning - IndexVersion version = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ); - DocumentMapper mapper = createDocumentMapper(version, topMapping(b -> { - b.startArray("dynamic_templates"); - { - b.startObject(); - { - b.startObject("test"); - { - b.field("match_mapping_type", "string"); - b.startObject("mapping").field("badparam", false).endObject(); - } - b.endObject(); - } - b.endObject(); - } - b.endArray(); - })); - assertWarnings( - "Parameter [badparam] is used in a dynamic template mapping and has no effect on type [null]. " - + "Usage will result in an error in future major versions and should be removed." - ); - mapper.parse(source(b -> b.field("field", "foo"))); - assertWarnings( - "Parameter [badparam] is used in a dynamic template mapping and has no effect on type [null]. " - + "Usage will result in an error in future major versions and should be removed." - ); - } - - { - // in 8.x it will error out - Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(topMapping(b -> { - b.startArray("dynamic_templates"); + Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(topMapping(b -> { + b.startArray("dynamic_templates"); + { + b.startObject(); { - b.startObject(); + b.startObject("test"); { - b.startObject("test"); - { - b.field("match_mapping_type", "string"); - b.startObject("mapping").field("badparam", false).endObject(); - } - b.endObject(); + b.field("match_mapping_type", "string"); + b.startObject("mapping").field("badparam", false).endObject(); } b.endObject(); } - b.endArray(); - }))); - assertThat(e.getMessage(), containsString("dynamic template [test] has invalid content")); - assertThat(e.getCause().getMessage(), containsString("badparam")); - } + b.endObject(); + } + b.endArray(); + }))); + assertThat(e.getMessage(), containsString("dynamic template [test] has invalid content")); + assertThat(e.getCause().getMessage(), containsString("badparam")); } public void testDynamicRuntimeWithBadMapping() { @@ -677,35 +638,6 @@ public void testIllegalDynamicTemplateNoMappingTypeRuntime() throws Exception { assertEquals("unknown parameter [foo] on runtime field [__dynamic__my_template] of type [date]", e.getRootCause().getMessage()); } - public void testIllegalDynamicTemplate7DotXIndex() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder(); - mapping.startObject(); - { - mapping.startObject(MapperService.SINGLE_MAPPING_NAME); - mapping.startArray("dynamic_templates"); - { - mapping.startObject(); - mapping.startObject("my_template"); - mapping.field("match_mapping_type", "string"); - mapping.startObject("mapping"); - mapping.field("type", "string"); - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - } - mapping.endArray(); - mapping.endObject(); - } - mapping.endObject(); - IndexVersion createdVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_7_0); - MapperService mapperService = createMapperService(createdVersion, mapping); - assertThat(mapperService.documentMapper().mappingSource().toString(), containsString("\"type\":\"string\"")); - assertWarnings(""" - dynamic template [my_template] has invalid content \ - [{"match_mapping_type":"string","mapping":{"type":"string"}}], attempted to validate it \ - with the following match_mapping_type: [string], last error: [No mapper found for type [string]]"""); - } - public void testTemplateWithoutMatchPredicates() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject(); diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java index a003436fc0523..bfc6ab41f627c 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java @@ -73,7 +73,7 @@ public float score(float freq, long norm) { }; IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> SimilarityService.validateSimilarity(IndexVersions.V_7_0_0, negativeScoresSim) + () -> SimilarityService.validateSimilarity(IndexVersions.V_8_0_0, negativeScoresSim) ); assertThat(e.getMessage(), Matchers.containsString("Similarities should not return negative scores")); @@ -98,7 +98,7 @@ public float score(float freq, long norm) { }; e = expectThrows( IllegalArgumentException.class, - () -> SimilarityService.validateSimilarity(IndexVersions.V_7_0_0, decreasingScoresWithFreqSim) + () -> SimilarityService.validateSimilarity(IndexVersions.V_8_0_0, decreasingScoresWithFreqSim) ); assertThat(e.getMessage(), Matchers.containsString("Similarity scores should not decrease when term frequency increases")); @@ -123,7 +123,7 @@ public float score(float freq, long norm) { }; e = expectThrows( IllegalArgumentException.class, - () -> SimilarityService.validateSimilarity(IndexVersions.V_7_0_0, increasingScoresWithNormSim) + () -> SimilarityService.validateSimilarity(IndexVersions.V_8_0_0, increasingScoresWithNormSim) ); assertThat(e.getMessage(), Matchers.containsString("Similarity scores should not increase when norm increases")); } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index 628ff4b99b133..082489ab2630b 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -98,28 +98,14 @@ public Map getMetadataMappers() { public void testBuiltinMappers() { IndicesModule module = new IndicesModule(Collections.emptyList()); - { - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); - assertThat( - module.getMapperRegistry().getMapperParser("object", IndexVersion.current()), - instanceOf(ObjectMapper.TypeParser.class) - ); - assertFalse(module.getMapperRegistry().getMetadataMapperParsers(version).isEmpty()); - Map metadataMapperParsers = module.getMapperRegistry() - .getMetadataMapperParsers(version); - assertEquals(EXPECTED_METADATA_FIELDS.length, metadataMapperParsers.size()); - int i = 0; - for (String field : metadataMapperParsers.keySet()) { - assertEquals(EXPECTED_METADATA_FIELDS[i++], field); - } - } - { - IndexVersion version = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ); - assertEquals(EXPECTED_METADATA_FIELDS.length - 1, module.getMapperRegistry().getMetadataMapperParsers(version).size()); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); + assertThat(module.getMapperRegistry().getMapperParser("object", IndexVersion.current()), instanceOf(ObjectMapper.TypeParser.class)); + assertFalse(module.getMapperRegistry().getMetadataMapperParsers(version).isEmpty()); + Map metadataMapperParsers = module.getMapperRegistry().getMetadataMapperParsers(version); + assertEquals(EXPECTED_METADATA_FIELDS.length, metadataMapperParsers.size()); + int i = 0; + for (String field : metadataMapperParsers.keySet()) { + assertEquals(EXPECTED_METADATA_FIELDS[i++], field); } } diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index c4d6cb6be502d..e4549fe876a9c 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -191,28 +191,15 @@ public void testStandardFilterBWC() throws IOException { // standard tokenfilter should have been removed entirely in the 7x line. However, a // cacheing bug meant that it was still possible to create indexes using a standard // filter until 7.6 - { - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_6_0, IndexVersion.current()); - final Settings settings = Settings.builder() - .put("index.analysis.analyzer.my_standard.tokenizer", "standard") - .put("index.analysis.analyzer.my_standard.filter", "standard") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetadata.SETTING_VERSION_CREATED, version) - .build(); - IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> getIndexAnalyzers(settings)); - assertThat(exc.getMessage(), equalTo("The [standard] token filter has been removed.")); - } - { - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2); - final Settings settings = Settings.builder() - .put("index.analysis.analyzer.my_standard.tokenizer", "standard") - .put("index.analysis.analyzer.my_standard.filter", "standard") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetadata.SETTING_VERSION_CREATED, version) - .build(); - getIndexAnalyzers(settings); - assertWarnings("The [standard] token filter is deprecated and will be removed in a future version."); - } + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); + final Settings settings = Settings.builder() + .put("index.analysis.analyzer.my_standard.tokenizer", "standard") + .put("index.analysis.analyzer.my_standard.filter", "standard") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, version) + .build(); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> getIndexAnalyzers(settings)); + assertThat(exc.getMessage(), equalTo("The [standard] token filter has been removed.")); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index 1b00ba3e9fd09..9115ec8f7c976 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -145,7 +145,7 @@ public final void testFixedMetaFieldsAreNotConfigurable() throws IOException { public void testTypeAndFriendsAreAcceptedBefore_8_6_0() throws IOException { assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_6_0); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, previousVersion); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, previousVersion); assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); MapperService mapperService = createMapperService(version, mapping(b -> {})); // these parameters were previously silently ignored, they will still be ignored in existing indices diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 1656a09daa123..e04f7339a97bd 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -369,7 +369,7 @@ protected void maybeInitWithOldSnapshotVersion(String repoName, Path repoPath) t initWithSnapshotVersion( repoName, repoPath, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_8_9_0) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersions.V_8_9_0) ); } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index 3da32c7f5a4c2..9428da1844e9b 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.LegacyFormatNames; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -30,13 +31,15 @@ */ public class IndexDeprecationChecks { + @UpdateForV9 static DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata) { // TODO: this check needs to be revised. It's trivially true right now. IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); - if (currentCompatibilityVersion.before(IndexVersions.V_7_0_0)) { + if (currentCompatibilityVersion.before(IndexVersions.V_8_0_0)) { return new DeprecationIssue( DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 7.0", + "Old index with a compatibility version < 8.0", + // we probably have to link this to "breaking-changes-9.0.html", "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + "breaking-changes-8.0.html", "This index has version: " + currentCompatibilityVersion.toReleaseVersion(), false, diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index 62f89f650dec2..bf75ff76c66bd 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -12,18 +12,15 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; -import java.io.IOException; import java.util.List; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.collection.IsIterableContainingInOrder.contains; public class IndexDeprecationChecksTests extends ESTestCase { @@ -149,33 +146,4 @@ public void testFrozenIndex() { ); } - public void testCamelCaseDeprecation() throws IOException { - String simpleMapping = "{\n\"_doc\": {" - + "\"properties\" : {\n" - + " \"date_time_field\" : {\n" - + " \"type\" : \"date\",\n" - + " \"format\" : \"strictDateOptionalTime\"\n" - + " }\n" - + " }" - + "} }"; - - IndexMetadata simpleIndex = IndexMetadata.builder(randomAlphaOfLengthBetween(5, 10)) - .settings(settings(IndexVersions.V_7_0_0)) - .numberOfShards(1) - .numberOfReplicas(1) - .putMapping(simpleMapping) - .build(); - - DeprecationIssue expected = new DeprecationIssue( - DeprecationIssue.Level.CRITICAL, - "Date fields use deprecated camel case formats", - "https://ela.st/es-deprecation-7-camel-case-format", - "Convert [date_time_field] format [strictDateOptionalTime] " - + "which contains deprecated camel case to snake case. [strictDateOptionalTime] to [strict_date_optional_time].", - false, - null - ); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(simpleIndex)); - assertThat(issues, hasItem(expected)); - } } diff --git a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java index 851d5f8f02b2a..d3f443e05a74f 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetadata; -import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.plan.ShardRecoveryPlan; import org.elasticsearch.indices.recovery.plan.ShardSnapshot; import org.elasticsearch.indices.recovery.plan.ShardSnapshotsService; @@ -62,7 +61,6 @@ import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.index.engine.Engine.ES_VERSION; import static org.elasticsearch.index.engine.Engine.HISTORY_UUID_KEY; -import static org.elasticsearch.test.index.IndexVersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -214,18 +212,9 @@ public void testLogicallyEquivalentSnapshotIsUsedEvenIfFilesAreDifferent() throw final IndexVersion snapshotVersion; final Version luceneVersion; if (compatibleVersion) { - snapshotVersion = randomBoolean() ? null : IndexVersionUtils.randomCompatibleVersion(random()); - // If snapshotVersion is not present, - // then lucene version must be < RecoverySettings.SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION - if (snapshotVersion == null) { - luceneVersion = randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - RecoverySettings.SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION - ).luceneVersion(); - } else { - luceneVersion = IndexVersionUtils.randomCompatibleVersion(random()).luceneVersion(); - } + // TODO lucene 10 upgrade, double check the removal of pre V_8_0_0 IndexVersions here + snapshotVersion = IndexVersionUtils.randomCompatibleVersion(random()); + luceneVersion = IndexVersionUtils.randomCompatibleVersion(random()).luceneVersion(); } else { snapshotVersion = IndexVersion.fromId(Integer.MAX_VALUE); luceneVersion = org.apache.lucene.util.Version.parse("255.255.255"); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 5999a3ff1e151..58fde288cfc60 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -91,16 +91,6 @@ public void testDefaultConfiguration() throws IOException { assertTrue(fieldType.hasDocValues()); } - public void testDefaultDocValueConfigurationOnPre7_8() throws IOException { - IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_7_0); - DocumentMapper defaultMapper = createDocumentMapper(oldVersion, fieldMapping(this::minimalMapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME); - assertThat(fieldMapper, instanceOf(fieldMapperClass())); - - GeoShapeWithDocValuesFieldMapper geoShapeFieldMapper = (GeoShapeWithDocValuesFieldMapper) fieldMapper; - assertFalse(geoShapeFieldMapper.fieldType().hasDocValues()); - } - /** * Test that orientation parameter correctly parses */ diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java index 35ccfe8deb5fe..1e363414dbcfe 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java @@ -114,7 +114,7 @@ public void testDefaultConfiguration() throws IOException { public void testDefaultDocValueConfigurationOnPre8_4() throws IOException { // TODO verify which version this test is actually valid for (when PR is actually merged) - IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_8_3_0); + IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersions.V_8_3_0); DocumentMapper defaultMapper = createDocumentMapper(oldVersion, fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME); assertThat(fieldMapper, instanceOf(fieldMapperClass())); From 5146b78a42ed2c3c6f263e5332c277248fd0aad1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 5 Sep 2024 22:58:40 +0200 Subject: [PATCH 246/417] Delete IndexVersions.V_7_2_0 - V_7_5_2 --- .../analysis/common/CommonAnalysisPlugin.java | 20 +-- .../common/EdgeNGramTokenizerTests.java | 22 --- .../s3/S3BlobStoreRepositoryTests.java | 5 +- .../elasticsearch/upgrades/RecoveryIT.java | 38 ++--- .../repositories/IndexSnapshotsServiceIT.java | 2 +- .../snapshots/CloneSnapshotIT.java | 16 +- .../snapshots/ConcurrentSnapshotsIT.java | 38 ----- .../CorruptedBlobStoreRepositoryIT.java | 155 +++++++++--------- .../cluster/metadata/IndexMetadata.java | 2 - .../allocator/DesiredBalanceReconciler.java | 5 +- .../elasticsearch/index/IndexVersions.java | 9 +- .../index/engine/ReadOnlyEngine.java | 6 +- .../index/mapper/TextFieldMapper.java | 5 +- .../vectors/DenseVectorFieldMapper.java | 15 +- .../mapper/vectors/VectorEncoderDecoder.java | 12 +- .../index/seqno/ReplicationTracker.java | 4 +- .../elasticsearch/index/shard/IndexShard.java | 3 +- .../recovery/RecoverySourceHandler.java | 4 +- .../snapshots/SnapshotsService.java | 6 +- .../cluster/stats/VersionStatsTests.java | 4 +- .../index/IndexVersionTests.java | 40 ++--- ...BinaryDenseVectorScriptDocValuesTests.java | 14 +- .../vectors/DenseVectorFieldMapperTests.java | 18 -- .../vectors/VectorEncoderDecoderTests.java | 3 +- .../script/VectorScoreScriptUtilsTests.java | 15 -- .../field/vectors/DenseVectorTests.java | 25 ++- .../AbstractSnapshotIntegTestCase.java | 4 +- .../querydsl/query/SpatialRelatesQuery.java | 5 - .../SearchableSnapshotsIntegTests.java | 6 +- .../GeoShapeWithDocValuesFieldMapper.java | 8 - .../index/mapper/ShapeFieldMapper.java | 7 - .../GeoShapeQueryBuilderGeoShapeTests.java | 24 +-- .../ShapeQueryBuilderOverShapeTests.java | 15 +- 33 files changed, 175 insertions(+), 380 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 35face57b8294..7a3041619c14a 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -563,10 +563,9 @@ public List getPreConfiguredTokenFilters() { ) ); filters.add(PreConfiguredTokenFilter.indexVersion("word_delimiter_graph", false, false, (input, version) -> { - boolean adjustOffsets = version.onOrAfter(IndexVersions.V_7_3_0); return new WordDelimiterGraphFilter( input, - adjustOffsets, + true, WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE, WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS | WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS @@ -587,12 +586,12 @@ public List getPreConfiguredTokenizers() { tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new)); - tokenizers.add(PreConfiguredTokenizer.indexVersion("edge_ngram", (version) -> { - if (version.onOrAfter(IndexVersions.V_7_3_0)) { - return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); - } - return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); - })); + tokenizers.add( + PreConfiguredTokenizer.indexVersion( + "edge_ngram", + (version) -> new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE) + ) + ); tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1))); tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new)); // TODO deprecate and remove in API @@ -630,10 +629,7 @@ public List getPreConfiguredTokenizers() { + "Please change the tokenizer name to [edge_ngram] instead." ); } - if (version.onOrAfter(IndexVersions.V_7_3_0)) { - return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); - } - return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); + return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); })); tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new)); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java index 9f291ffc80de0..2b097a421f918 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java @@ -17,14 +17,12 @@ import org.elasticsearch.index.IndexService.IndexCreationContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.index.IndexVersionUtils; import java.io.IOException; import java.io.StringReader; @@ -55,26 +53,6 @@ public void testPreConfiguredTokenizer() throws IOException { assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" }); } } - - // Check deprecated name as well, needs version before 8.0 because throws IAE after that - { - try ( - IndexAnalyzers indexAnalyzers = buildAnalyzers( - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_3_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ), - "edgeNGram" - ) - ) { - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" }); - - } - } - } public void testCustomTokenChars() throws IOException { diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 1ab370ad203fc..b6f11baaa93d2 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; @@ -55,7 +56,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotState; -import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.snapshots.mockstore.BlobStoreWrapper; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; @@ -424,7 +424,8 @@ public void testEnforcedCooldownPeriod() throws IOException { ) ); final BytesReference serialized = BytesReference.bytes( - modifiedRepositoryData.snapshotsToXContent(XContentFactory.jsonBuilder(), SnapshotsService.OLD_SNAPSHOT_FORMAT) + // TODO lucene 10 upgrade, we can probably remove the IndexVersions here once we delete all V7 versions + modifiedRepositoryData.snapshotsToXContent(XContentFactory.jsonBuilder(), IndexVersions.V_8_0_0) ); repository.blobStore() .blobContainer(repository.basePath()) diff --git a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index fe2236adc4904..b74dcd07265ac 100644 --- a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -419,14 +419,10 @@ public void testRecoveryClosedIndex() throws Exception { } final IndexVersion indexVersionCreated = indexVersionCreated(indexName); - if (indexVersionCreated.onOrAfter(IndexVersions.V_7_2_0)) { - // index was created on a version that supports the replication of closed indices, - // so we expect the index to be closed and replicated - ensureGreen(indexName); - assertClosedIndex(indexName, true); - } else { - assertClosedIndex(indexName, false); - } + // index was created on a version that supports the replication of closed indices, + // so we expect the index to be closed and replicated + ensureGreen(indexName); + assertClosedIndex(indexName, true); } /** @@ -447,15 +443,8 @@ public void testCloseIndexDuringRollingUpgrade() throws Exception { ensureGreen(indexName); closeIndex(indexName); } - - if (minimumIndexVersion().onOrAfter(IndexVersions.V_7_2_0)) { - // index is created on a version that supports the replication of closed indices, - // so we expect the index to be closed and replicated - ensureGreen(indexName); - assertClosedIndex(indexName, true); - } else { - assertClosedIndex(indexName, false); - } + ensureGreen(indexName); + assertClosedIndex(indexName, true); } /** @@ -483,18 +472,11 @@ public void testClosedIndexNoopRecovery() throws Exception { closeIndex(indexName); } - if (indexVersionCreated(indexName).onOrAfter(IndexVersions.V_7_2_0)) { - // index was created on a version that supports the replication of closed indices, so we expect it to be closed and replicated - assertTrue(minimumIndexVersion().onOrAfter(IndexVersions.V_7_2_0)); - ensureGreen(indexName); - assertClosedIndex(indexName, true); - if (CLUSTER_TYPE != ClusterType.OLD) { - assertNoopRecoveries(indexName, s -> CLUSTER_TYPE == ClusterType.UPGRADED || s.startsWith(CLUSTER_NAME + "-0")); - } - } else { - assertClosedIndex(indexName, false); + ensureGreen(indexName); + assertClosedIndex(indexName, true); + if (CLUSTER_TYPE != ClusterType.OLD) { + assertNoopRecoveries(indexName, s -> CLUSTER_TYPE == ClusterType.UPGRADED || s.startsWith(CLUSTER_NAME + "-0")); } - } /** diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java index 6c7bcd17af1f0..d91adc604d8d5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java @@ -114,7 +114,7 @@ public void testGetShardSnapshotReturnsTheLatestSuccessfulSnapshot() throws Exce final boolean useBwCFormat = randomBoolean(); if (useBwCFormat) { - final IndexVersion version = randomVersionBetween(random(), IndexVersions.V_7_5_0, IndexVersion.current()); + final IndexVersion version = randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); initWithSnapshotVersion(repoName, repoPath, version); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java index d7c7acf9737a1..8710d52f5b83c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java @@ -53,11 +53,6 @@ public void testShardClone() throws Exception { final Path repoPath = randomRepoPath(); createRepository(repoName, "fs", repoPath); - final boolean useBwCFormat = randomBoolean(); - if (useBwCFormat) { - initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); - } - final String indexName = "test-index"; createIndexWithRandomDocs(indexName, randomIntBetween(5, 10)); final String sourceSnapshot = "source-snapshot"; @@ -72,11 +67,8 @@ public void testShardClone() throws Exception { final SnapshotId targetSnapshotId = new SnapshotId("target-snapshot", UUIDs.randomBase64UUID(random())); final ShardGeneration currentShardGen; - if (useBwCFormat) { - currentShardGen = null; - } else { - currentShardGen = repositoryData.shardGenerations().getShardGen(indexId, shardId); - } + + currentShardGen = repositoryData.shardGenerations().getShardGen(indexId, shardId); final ShardSnapshotResult shardSnapshotResult = safeAwait( listener -> repository.cloneShardSnapshot( sourceSnapshotInfo.snapshotId(), @@ -88,10 +80,6 @@ public void testShardClone() throws Exception { ); final ShardGeneration newShardGeneration = shardSnapshotResult.getGeneration(); - if (useBwCFormat) { - assertEquals(newShardGeneration, new ShardGeneration(1L)); // Initial snapshot brought it to 0, clone increments it to 1 - } - final BlobStoreIndexShardSnapshot targetShardSnapshot = readShardSnapshot(repository, repositoryShardId, targetSnapshotId); final BlobStoreIndexShardSnapshot sourceShardSnapshot = readShardSnapshot( repository, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 71616abf0dcfa..983533eaf7e26 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -40,7 +40,6 @@ import org.elasticsearch.repositories.RepositoryConflictException; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.mockstore.MockRepository; import org.elasticsearch.test.ClusterServiceUtils; @@ -1370,43 +1369,6 @@ public void testConcurrentOperationsLimit() throws Exception { } } - public void testConcurrentSnapshotWorksWithOldVersionRepo() throws Exception { - internalCluster().startMasterOnlyNode(); - final String dataNode = internalCluster().startDataOnlyNode(); - final String repoName = "test-repo"; - final Path repoPath = randomRepoPath(); - createRepository( - repoName, - "mock", - Settings.builder().put(BlobStoreRepository.CACHE_REPOSITORY_DATA.getKey(), false).put("location", repoPath) - ); - initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); - - createIndexWithContent("index-slow"); - - final ActionFuture createSlowFuture = startFullSnapshotBlockedOnDataNode( - "slow-snapshot", - repoName, - dataNode - ); - - final String dataNode2 = internalCluster().startDataOnlyNode(); - ensureStableCluster(3); - final String indexFast = "index-fast"; - createIndexWithContent(indexFast, dataNode2, dataNode); - - final ActionFuture createFastSnapshot = startFullSnapshot(repoName, "fast-snapshot"); - - assertThat(createSlowFuture.isDone(), is(false)); - unblockNode(repoName, dataNode); - - assertSuccessful(createFastSnapshot); - assertSuccessful(createSlowFuture); - - final RepositoryData repositoryData = getRepositoryData(repoName); - assertThat(repositoryData.shardGenerations(), is(ShardGenerations.EMPTY)); - } - public void testQueuedDeleteAfterFinalizationFailure() throws Exception { final String masterNode = internalCluster().startMasterOnlyNode(); final String repoName = "test-repo"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java index abcac0cade456..a3004e9972063 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java @@ -23,12 +23,12 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.IndexMetaDataGenerations; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.ShardGeneration; import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.repositories.fs.FsRepository; @@ -46,7 +46,6 @@ import java.util.stream.Stream; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -308,7 +307,7 @@ public void testHandlingMissingRootLevelSnapshotMetadata() throws Exception { logger.info("--> verify that repo is assumed in old metadata format"); assertThat( SnapshotsService.minCompatibleVersion(IndexVersion.current(), getRepositoryData(repoName), null), - is(SnapshotsService.OLD_SNAPSHOT_FORMAT) + is(IndexVersions.V_8_0_0) ); logger.info("--> verify that snapshot with missing root level metadata can be deleted"); @@ -382,79 +381,83 @@ public void testMountCorruptedRepositoryData() throws Exception { ); } - public void testHandleSnapshotErrorWithBwCFormat() throws Exception { - final String repoName = "test-repo"; - final Path repoPath = randomRepoPath(); - createRepository(repoName, "fs", repoPath); - final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); - - final String indexName = "test-index"; - createIndex(indexName); - - createFullSnapshot(repoName, "snapshot-1"); - - // In the old metadata version the shard level metadata could be moved to the next generation for all sorts of reasons, this should - // not break subsequent repository operations - logger.info("--> move shard level metadata to new generation"); - final IndexId indexId = getRepositoryData(repoName).resolveIndexId(indexName); - final Path shardPath = repoPath.resolve("indices").resolve(indexId.getId()).resolve("0"); - final Path initialShardMetaPath = shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "0"); - assertFileExists(initialShardMetaPath); - Files.move(initialShardMetaPath, shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "1")); - - startDeleteSnapshot(repoName, oldVersionSnapshot).get(); - - createFullSnapshot(repoName, "snapshot-2"); - } - - public void testRepairBrokenShardGenerations() throws Exception { - final String repoName = "test-repo"; - final Path repoPath = randomRepoPath(); - createRepository(repoName, "fs", repoPath); - final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); - - final String indexName = "test-index"; - createIndex(indexName); - - createFullSnapshot(repoName, "snapshot-1"); - - startDeleteSnapshot(repoName, oldVersionSnapshot).get(); - - logger.info("--> move shard level metadata to new generation and make RepositoryData point at an older generation"); - final IndexId indexId = getRepositoryData(repoName).resolveIndexId(indexName); - final Path shardPath = repoPath.resolve("indices").resolve(indexId.getId()).resolve("0"); - final Path initialShardMetaPath = shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "0"); - assertFileExists(initialShardMetaPath); - Files.move(initialShardMetaPath, shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + randomIntBetween(1, 1000))); - - final RepositoryData repositoryData = getRepositoryData(repoName); - final Map snapshotIds = repositoryData.getSnapshotIds() - .stream() - .collect(Collectors.toMap(SnapshotId::getUUID, Function.identity())); - final RepositoryData brokenRepoData = new RepositoryData( - repositoryData.getUuid(), - repositoryData.getGenId(), - snapshotIds, - snapshotIds.values().stream().collect(Collectors.toMap(SnapshotId::getUUID, repositoryData::getSnapshotDetails)), - repositoryData.getIndices().values().stream().collect(Collectors.toMap(Function.identity(), repositoryData::getSnapshots)), - ShardGenerations.builder().putAll(repositoryData.shardGenerations()).put(indexId, 0, new ShardGeneration(0L)).build(), - repositoryData.indexMetaDataGenerations(), - repositoryData.getClusterUUID() - ); - Files.write( - repoPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + repositoryData.getGenId()), - BytesReference.toBytes( - BytesReference.bytes(brokenRepoData.snapshotsToXContent(XContentFactory.jsonBuilder(), IndexVersion.current())) - ), - StandardOpenOption.TRUNCATE_EXISTING - ); - - logger.info("--> recreating repository to clear caches"); - clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName).get(); - createRepository(repoName, "fs", repoPath); - - createFullSnapshot(repoName, "snapshot-2"); - } + // TODO lucene 10 upgrade, deleting old IndexVersions and with this SnapshotsService.OLD_SNAPSHOT_FORMAT + // can the following test be deleted? + // public void testHandleSnapshotErrorWithBwCFormat() throws Exception { + // final String repoName = "test-repo"; + // final Path repoPath = randomRepoPath(); + // createRepository(repoName, "fs", repoPath); + // final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); + // + // final String indexName = "test-index"; + // createIndex(indexName); + // + // createFullSnapshot(repoName, "snapshot-1"); + // + // // In the old metadata version the shard level metadata could be moved to the next generation for all sorts of reasons, this should + // // not break subsequent repository operations + // logger.info("--> move shard level metadata to new generation"); + // final IndexId indexId = getRepositoryData(repoName).resolveIndexId(indexName); + // final Path shardPath = repoPath.resolve("indices").resolve(indexId.getId()).resolve("0"); + // final Path initialShardMetaPath = shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "0"); + // assertFileExists(initialShardMetaPath); + // Files.move(initialShardMetaPath, shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "1")); + // + // startDeleteSnapshot(repoName, oldVersionSnapshot).get(); + // + // createFullSnapshot(repoName, "snapshot-2"); + // } + + // TODO lucene 10 upgrade, deleting old IndexVersions and with this SnapshotsService.OLD_SNAPSHOT_FORMAT + // can the following test be deleted? + // public void testRepairBrokenShardGenerations() throws Exception { + // final String repoName = "test-repo"; + // final Path repoPath = randomRepoPath(); + // createRepository(repoName, "fs", repoPath); + // final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); + // + // final String indexName = "test-index"; + // createIndex(indexName); + // + // createFullSnapshot(repoName, "snapshot-1"); + // + // startDeleteSnapshot(repoName, oldVersionSnapshot).get(); + // + // logger.info("--> move shard level metadata to new generation and make RepositoryData point at an older generation"); + // final IndexId indexId = getRepositoryData(repoName).resolveIndexId(indexName); + // final Path shardPath = repoPath.resolve("indices").resolve(indexId.getId()).resolve("0"); + // final Path initialShardMetaPath = shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "0"); + // assertFileExists(initialShardMetaPath); + // Files.move(initialShardMetaPath, shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + randomIntBetween(1, 1000))); + // + // final RepositoryData repositoryData = getRepositoryData(repoName); + // final Map snapshotIds = repositoryData.getSnapshotIds() + // .stream() + // .collect(Collectors.toMap(SnapshotId::getUUID, Function.identity())); + // final RepositoryData brokenRepoData = new RepositoryData( + // repositoryData.getUuid(), + // repositoryData.getGenId(), + // snapshotIds, + // snapshotIds.values().stream().collect(Collectors.toMap(SnapshotId::getUUID, repositoryData::getSnapshotDetails)), + // repositoryData.getIndices().values().stream().collect(Collectors.toMap(Function.identity(), repositoryData::getSnapshots)), + // ShardGenerations.builder().putAll(repositoryData.shardGenerations()).put(indexId, 0, new ShardGeneration(0L)).build(), + // repositoryData.indexMetaDataGenerations(), + // repositoryData.getClusterUUID() + // ); + // Files.write( + // repoPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + repositoryData.getGenId()), + // BytesReference.toBytes( + // BytesReference.bytes(brokenRepoData.snapshotsToXContent(XContentFactory.jsonBuilder(), IndexVersion.current())) + // ), + // StandardOpenOption.TRUNCATE_EXISTING + // ); + // + // logger.info("--> recreating repository to clear caches"); + // clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName).get(); + // createRepository(repoName, "fs", repoPath); + // + // createFullSnapshot(repoName, "snapshot-2"); + // } /** * Tests that a shard snapshot with a corrupted shard index file can still be used for restore and incremental snapshots. diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 742439c9a2484..c0fb2270d891e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -2736,8 +2736,6 @@ public static IndexMetadata fromXContent(XContentParser parser, Map // skip any pre-7.2 closed indices which have no routing table entries at all - indexMetadata.getCreationVersion().onOrAfter(IndexVersions.V_7_2_0) - || indexMetadata.getState() == IndexMetadata.State.OPEN - || MetadataIndexStateService.isIndexVerifiedBeforeClosed(indexMetadata)) + indexMetadata.getState() == IndexMetadata.State.OPEN || MetadataIndexStateService.isIndexVerifiedBeforeClosed(indexMetadata)) .flatMap( indexMetadata -> IntStream.range(0, indexMetadata.getNumberOfShards()) .mapToObj( diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 679f9291fedb2..8e0ca1de3ec2f 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -59,13 +59,6 @@ private static Version parseUnchecked(String version) { @UpdateForV9 // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - - public static final IndexVersion V_7_2_0 = def(7_02_00_99, parseUnchecked("8.0.0")); - public static final IndexVersion V_7_2_1 = def(7_02_01_99, parseUnchecked("8.0.0")); - public static final IndexVersion V_7_3_0 = def(7_03_00_99, parseUnchecked("8.1.0")); - public static final IndexVersion V_7_4_0 = def(7_04_00_99, parseUnchecked("8.2.0")); - public static final IndexVersion V_7_5_0 = def(7_05_00_99, parseUnchecked("8.3.0")); - public static final IndexVersion V_7_5_2 = def(7_05_02_99, parseUnchecked("8.3.0")); public static final IndexVersion V_7_6_0 = def(7_06_00_99, parseUnchecked("8.4.0")); public static final IndexVersion V_7_7_0 = def(7_07_00_99, parseUnchecked("8.5.1")); public static final IndexVersion V_7_8_0 = def(7_08_00_99, parseUnchecked("8.5.1")); @@ -178,7 +171,7 @@ private static Version parseUnchecked(String version) { * In branches 8.7-8.11 see server/src/main/java/org/elasticsearch/index/IndexVersion.java for the equivalent definitions. */ - public static final IndexVersion MINIMUM_COMPATIBLE = V_7_2_0; + public static final IndexVersion MINIMUM_COMPATIBLE = V_7_6_0; static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index c9474b58ef447..51a301eea1707 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -25,8 +25,6 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentParser; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.seqno.SeqNoStats; @@ -175,9 +173,7 @@ protected void ensureMaxSeqNoEqualsToGlobalCheckpoint(final SeqNoStats seqNoStat // In addition to that we only execute the check if the index the engine belongs to has been // created after the refactoring of the Close Index API and its TransportVerifyShardBeforeCloseAction // that guarantee that all operations have been flushed to Lucene. - IndexVersion indexVersionCreated = engineConfig.getIndexSettings().getIndexVersionCreated(); - if (indexVersionCreated.onOrAfter(IndexVersions.V_7_2_0) - || (seqNoStats.getGlobalCheckpoint() != SequenceNumbers.UNASSIGNED_SEQ_NO)) { + if (seqNoStats.getGlobalCheckpoint() != SequenceNumbers.UNASSIGNED_SEQ_NO) { assert assertMaxSeqNoEqualsToGlobalCheckpoint(seqNoStats.getMaxSeqNo(), seqNoStats.getGlobalCheckpoint()); if (seqNoStats.getMaxSeqNo() != seqNoStats.getGlobalCheckpoint()) { throw new IllegalStateException( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 89bae7f6acc98..2318b34c5568a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -54,7 +54,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -416,7 +415,9 @@ private SubFieldInfo buildPrefixInfo(MapperBuilderContext context, FieldType fie * or a multi-field). This way search will continue to work on old indices and new indices * will use the expected full name. */ - String fullName = indexCreatedVersion.before(IndexVersions.V_7_2_1) ? leafName() : context.buildFullName(leafName()); + // TODO lucene 10 upgrade, we remove pre 8.x index versions but may need these for legacy archive support here? + // String fullName = indexCreatedVersion.before(IndexVersions.V_7_2_1) ? leafName() : context.buildFullName(leafName()); + String fullName = context.buildFullName(leafName()); // Copy the index options of the main field to allow phrase queries on // the prefix field. FieldType pft = new FieldType(fieldType); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 6a231d15f7be8..8ed8fde109748 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -105,7 +105,6 @@ public static boolean isNotUnitVector(float magnitude) { public static final NodeFeature INT4_QUANTIZATION = new NodeFeature("mapper.vectors.int4_quantization"); public static final NodeFeature BIT_VECTORS = new NodeFeature("mapper.vectors.bit_vectors"); - public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; public static final IndexVersion NORMALIZE_COSINE = IndexVersions.NORMALIZED_VECTOR_COSINE; public static final IndexVersion DEFAULT_TO_INT8 = DEFAULT_DENSE_VECTOR_TO_INT8_HNSW; @@ -2039,19 +2038,15 @@ private void parseBinaryDocValuesVectorAndIndex(DocumentParserContext context) t // this code is here and not in the VectorEncoderDecoder so not to create extra arrays int dims = fieldType().dims; ElementType elementType = fieldType().elementType; - int numBytes = indexCreatedVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION) - ? elementType.getNumBytes(dims) + MAGNITUDE_BYTES - : elementType.getNumBytes(dims); + int numBytes = elementType.getNumBytes(dims) + MAGNITUDE_BYTES; ByteBuffer byteBuffer = elementType.createByteBuffer(indexCreatedVersion, numBytes); VectorData vectorData = elementType.parseKnnVector(context, this); vectorData.addToBuffer(byteBuffer); - if (indexCreatedVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION)) { - // encode vector magnitude at the end - double dotProduct = elementType.computeSquaredMagnitude(vectorData); - float vectorMagnitude = (float) Math.sqrt(dotProduct); - byteBuffer.putFloat(vectorMagnitude); - } + // encode vector magnitude at the end + double dotProduct = elementType.computeSquaredMagnitude(vectorData); + float vectorMagnitude = (float) Math.sqrt(dotProduct); + byteBuffer.putFloat(vectorMagnitude); Field field = new BinaryDocValuesField(fieldType().name(), new BytesRef(byteBuffer.array())); context.doc().addWithKey(fieldType().name(), field); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java index e3285c4dc8644..decfce3e965cb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java @@ -17,7 +17,6 @@ import java.nio.FloatBuffer; import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION; -import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAGNITUDE_STORED_INDEX_VERSION; public final class VectorEncoderDecoder { public static final byte INT_BYTES = 4; @@ -25,9 +24,7 @@ public final class VectorEncoderDecoder { private VectorEncoderDecoder() {} public static int denseVectorLength(IndexVersion indexVersion, BytesRef vectorBR) { - return indexVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION) - ? (vectorBR.length - INT_BYTES) / INT_BYTES - : vectorBR.length / INT_BYTES; + return (vectorBR.length - INT_BYTES) / INT_BYTES; } /** @@ -36,7 +33,6 @@ public static int denseVectorLength(IndexVersion indexVersion, BytesRef vectorBR * equal to 7.5.0, since vectors created prior to that do not store the magnitude. */ public static float decodeMagnitude(IndexVersion indexVersion, BytesRef vectorBR) { - assert indexVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION); ByteBuffer byteBuffer = indexVersion.onOrAfter(LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION) ? ByteBuffer.wrap(vectorBR.bytes, vectorBR.offset, vectorBR.length).order(ByteOrder.LITTLE_ENDIAN) : ByteBuffer.wrap(vectorBR.bytes, vectorBR.offset, vectorBR.length); @@ -54,11 +50,7 @@ public static float getMagnitude(IndexVersion indexVersion, BytesRef vectorBR, f if (vectorBR == null) { throw new IllegalArgumentException(DenseVectorScriptDocValues.MISSING_VECTOR_FIELD_MESSAGE); } - if (indexVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION)) { - return decodeMagnitude(indexVersion, vectorBR); - } else { - return calculateMagnitude(decodedVector); - } + return decodeMagnitude(indexVersion, vectorBR); } /** diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 247c2fd70761e..138bd2c6d8840 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -976,9 +976,7 @@ public ReplicationTracker( this.routingTable = null; this.replicationGroup = null; this.hasAllPeerRecoveryRetentionLeases = indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0) - || (indexSettings.isSoftDeleteEnabled() - && indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_4_0) - && indexSettings.getIndexMetadata().getState() == IndexMetadata.State.OPEN); + || (indexSettings.isSoftDeleteEnabled() && indexSettings.getIndexMetadata().getState() == IndexMetadata.State.OPEN); this.fileBasedRecoveryThreshold = IndexSettings.FILE_BASED_RECOVERY_THRESHOLD_SETTING.get(indexSettings.getSettings()); this.safeCommitInfoSupplier = safeCommitInfoSupplier; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index b7d1beb4d1e06..cb959b4931d2a 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -73,7 +73,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.bulk.stats.BulkOperationListener; import org.elasticsearch.index.bulk.stats.BulkStats; @@ -3370,7 +3369,7 @@ public RetentionLease addPeerRecoveryRetentionLease( ) { assert assertPrimaryMode(); // only needed for BWC reasons involving rolling upgrades from versions that do not support PRRLs: - assert indexSettings.getIndexVersionCreated().before(IndexVersions.V_7_4_0) || indexSettings.isSoftDeleteEnabled() == false; + assert indexSettings.isSoftDeleteEnabled() == false; return replicationTracker.addPeerRecoveryRetentionLease(nodeId, globalCheckpoint, listener); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index de8833ac60275..3515b9e9d2a36 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -38,7 +38,6 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.RecoveryEngineException; import org.elasticsearch.index.seqno.ReplicationTracker; @@ -969,8 +968,7 @@ void createRetentionLease(final long startingSeqNo, ActionListener { return switch (randomIntBetween(1, 4)) { case 1 -> new VersionStats.SingleVersionStats( - IndexVersions.V_7_3_0, + IndexVersions.V_8_0_0, svs.indexCount, svs.primaryShardCount, svs.totalPrimaryByteCount @@ -143,7 +143,7 @@ private static IndexMetadata indexMeta(String name, IndexVersion version, int pr } public static VersionStats randomInstance() { - List versions = List.of(IndexVersion.current(), IndexVersions.V_8_0_0, IndexVersions.V_8_1_0, IndexVersions.V_8_2_0); + List versions = List.of(IndexVersion.current(), IndexVersions.V_8_1_0, IndexVersions.V_8_2_0, IndexVersions.V_8_3_0); List stats = new ArrayList<>(); for (IndexVersion v : versions) { VersionStats.SingleVersionStats s = new VersionStats.SingleVersionStats( diff --git a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java index dcf73ec617e60..0f5a1e479d041 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java @@ -32,27 +32,27 @@ public class IndexVersionTests extends ESTestCase { public void testVersionComparison() { - IndexVersion V_7_2_0 = IndexVersions.V_7_2_0; IndexVersion V_8_0_0 = IndexVersions.V_8_0_0; - assertThat(V_7_2_0.before(V_8_0_0), is(true)); - assertThat(V_7_2_0.before(V_7_2_0), is(false)); - assertThat(V_8_0_0.before(V_7_2_0), is(false)); - - assertThat(V_7_2_0.onOrBefore(V_8_0_0), is(true)); - assertThat(V_7_2_0.onOrBefore(V_7_2_0), is(true)); - assertThat(V_8_0_0.onOrBefore(V_7_2_0), is(false)); - - assertThat(V_7_2_0.after(V_8_0_0), is(false)); - assertThat(V_7_2_0.after(V_7_2_0), is(false)); - assertThat(V_8_0_0.after(V_7_2_0), is(true)); - - assertThat(V_7_2_0.onOrAfter(V_8_0_0), is(false)); - assertThat(V_7_2_0.onOrAfter(V_7_2_0), is(true)); - assertThat(V_8_0_0.onOrAfter(V_7_2_0), is(true)); - - assertThat(V_7_2_0, is(lessThan(V_8_0_0))); - assertThat(V_7_2_0.compareTo(V_7_2_0), is(0)); - assertThat(V_8_0_0, is(greaterThan(V_7_2_0))); + IndexVersion V_8_10_0 = IndexVersions.V_8_10_0; + assertThat(V_8_0_0.before(V_8_10_0), is(true)); + assertThat(V_8_0_0.before(V_8_0_0), is(false)); + assertThat(V_8_10_0.before(V_8_0_0), is(false)); + + assertThat(V_8_0_0.onOrBefore(V_8_10_0), is(true)); + assertThat(V_8_0_0.onOrBefore(V_8_0_0), is(true)); + assertThat(V_8_10_0.onOrBefore(V_8_0_0), is(false)); + + assertThat(V_8_0_0.after(V_8_10_0), is(false)); + assertThat(V_8_0_0.after(V_8_0_0), is(false)); + assertThat(V_8_10_0.after(V_8_0_0), is(true)); + + assertThat(V_8_0_0.onOrAfter(V_8_10_0), is(false)); + assertThat(V_8_0_0.onOrAfter(V_8_0_0), is(true)); + assertThat(V_8_10_0.onOrAfter(V_8_0_0), is(true)); + + assertThat(V_8_0_0, is(lessThan(V_8_10_0))); + assertThat(V_8_0_0.compareTo(V_8_0_0), is(0)); + assertThat(V_8_10_0, is(greaterThan(V_8_0_0))); } public static class CorrectFakeVersion { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java index 1df42368041ac..3c3bff2ffe023 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java @@ -32,7 +32,7 @@ public void testFloatGetVectorValueAndGetMagnitude() throws IOException { float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; float[] expectedMagnitudes = { 1.7320f, 2.4495f, 3.3166f }; - for (IndexVersion indexVersion : List.of(IndexVersions.V_7_4_0, IndexVersion.current())) { + for (IndexVersion indexVersion : List.of(IndexVersions.V_8_0_0, IndexVersion.current())) { BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, indexVersion); DenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", ElementType.FLOAT, dims, indexVersion); DenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); @@ -235,9 +235,7 @@ public long cost() { } public static BytesRef mockEncodeDenseVector(float[] values, ElementType elementType, IndexVersion indexVersion) { - int numBytes = indexVersion.onOrAfter(DenseVectorFieldMapper.MAGNITUDE_STORED_INDEX_VERSION) - ? elementType.getNumBytes(values.length) + DenseVectorFieldMapper.MAGNITUDE_BYTES - : elementType.getNumBytes(values.length); + int numBytes = elementType.getNumBytes(values.length) + DenseVectorFieldMapper.MAGNITUDE_BYTES; double dotProduct = 0f; ByteBuffer byteBuffer = elementType.createByteBuffer(indexVersion, numBytes); for (float value : values) { @@ -251,11 +249,9 @@ public static BytesRef mockEncodeDenseVector(float[] values, ElementType element dotProduct += value * value; } - if (indexVersion.onOrAfter(DenseVectorFieldMapper.MAGNITUDE_STORED_INDEX_VERSION)) { - // encode vector magnitude at the end - float vectorMagnitude = (float) Math.sqrt(dotProduct); - byteBuffer.putFloat(vectorMagnitude); - } + // encode vector magnitude at the end + float vectorMagnitude = (float) Math.sqrt(dotProduct); + byteBuffer.putFloat(vectorMagnitude); return new BytesRef(byteBuffer.array()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 83b8a8fa991c2..ad719e398ae37 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -1321,24 +1321,6 @@ public void testDefaultParamsIndexByDefault() throws Exception { assertEquals(VectorSimilarity.COSINE, denseVectorFieldType.getSimilarity()); } - public void testAddDocumentsToIndexBefore_V_7_5_0() throws Exception { - IndexVersion indexVersion = IndexVersions.V_7_4_0; - DocumentMapper mapper = createDocumentMapper( - indexVersion, - fieldMapping(b -> b.field("index", false).field("type", "dense_vector").field("dims", 3)) - ); - - float[] validVector = { -12.1f, 100.7f, -4 }; - ParsedDocument doc1 = mapper.parse(source(b -> b.array("field", validVector))); - List fields = doc1.rootDoc().getFields("field"); - assertEquals(1, fields.size()); - assertThat(fields.get(0), instanceOf(BinaryDocValuesField.class)); - // assert that after decoding the indexed value is equal to expected - BytesRef vectorBR = fields.get(0).binaryValue(); - float[] decodedValues = decodeDenseVector(indexVersion, vectorBR); - assertArrayEquals("Decoded dense vector values is not equal to the indexed one.", validVector, decodedValues, 0.001f); - } - public void testValidateOnBuild() { final MapperBuilderContext context = MapperBuilderContext.root(false, false); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoderTests.java index 80b08f907be8d..9e581aa7711ef 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoderTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -27,7 +28,7 @@ public void testVectorDecodingWithOffset() { for (IndexVersion version : List.of( IndexVersionUtils.randomVersionBetween( random(), - DenseVectorFieldMapper.MAGNITUDE_STORED_INDEX_VERSION, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion(DenseVectorFieldMapper.LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION) ), DenseVectorFieldMapper.LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION diff --git a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java index 8bd53047b2dc7..02fe42626c79c 100644 --- a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.script; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.vectors.BinaryDenseVectorScriptDocValuesTests; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.index.mapper.vectors.KnnDenseVectorScriptDocValuesTests; @@ -44,13 +43,6 @@ public void testFloatVectorClassBindings() throws IOException { List invalidQueryVector = Arrays.asList(0.5, 111.3); List fields = List.of( - new BinaryDenseVectorDocValuesField( - BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersions.V_7_4_0), - "test", - ElementType.FLOAT, - dims, - IndexVersions.V_7_4_0 - ), new BinaryDenseVectorDocValuesField( BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersion.current()), "test", @@ -237,13 +229,6 @@ public void testByteVsFloatSimilarity() throws IOException { byte[] byteVector = new byte[] { (byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4 }; List fields = List.of( - new BinaryDenseVectorDocValuesField( - BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersions.V_7_4_0), - "field0", - ElementType.FLOAT, - dims, - IndexVersions.V_7_4_0 - ), new BinaryDenseVectorDocValuesField( BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersion.current()), "field1", diff --git a/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java b/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java index 2be338efd7174..a8f4edd1568e7 100644 --- a/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java +++ b/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java @@ -10,10 +10,10 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.vectors.BinaryDenseVectorScriptDocValuesTests; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.index.IndexVersionUtils; import java.nio.ByteBuffer; import java.util.ArrayList; @@ -68,22 +68,21 @@ public void testFloatVsListQueryVector() { assertEquals(knn.cosineSimilarity(arrayQV), knn.cosineSimilarity(listQV), 0.001f); assertEquals(knn.cosineSimilarity((Object) listQV), knn.cosineSimilarity((Object) arrayQV), 0.001f); - for (IndexVersion indexVersion : List.of(IndexVersions.V_7_4_0, IndexVersion.current())) { - BytesRef value = BinaryDenseVectorScriptDocValuesTests.mockEncodeDenseVector(docVector, ElementType.FLOAT, indexVersion); - BinaryDenseVector bdv = new BinaryDenseVector(docVector, value, dims, indexVersion); + IndexVersion randomCompatibleVersion = IndexVersionUtils.randomCompatibleVersion(random()); + BytesRef value = BinaryDenseVectorScriptDocValuesTests.mockEncodeDenseVector(docVector, ElementType.FLOAT, randomCompatibleVersion); + BinaryDenseVector bdv = new BinaryDenseVector(docVector, value, dims, randomCompatibleVersion); - assertEquals(bdv.dotProduct(arrayQV), bdv.dotProduct(listQV), 0.001f); - assertEquals(bdv.dotProduct((Object) listQV), bdv.dotProduct((Object) arrayQV), 0.001f); + assertEquals(bdv.dotProduct(arrayQV), bdv.dotProduct(listQV), 0.001f); + assertEquals(bdv.dotProduct((Object) listQV), bdv.dotProduct((Object) arrayQV), 0.001f); - assertEquals(bdv.l1Norm(arrayQV), bdv.l1Norm(listQV), 0.001f); - assertEquals(bdv.l1Norm((Object) listQV), bdv.l1Norm((Object) arrayQV), 0.001f); + assertEquals(bdv.l1Norm(arrayQV), bdv.l1Norm(listQV), 0.001f); + assertEquals(bdv.l1Norm((Object) listQV), bdv.l1Norm((Object) arrayQV), 0.001f); - assertEquals(bdv.l2Norm(arrayQV), bdv.l2Norm(listQV), 0.001f); - assertEquals(bdv.l2Norm((Object) listQV), bdv.l2Norm((Object) arrayQV), 0.001f); + assertEquals(bdv.l2Norm(arrayQV), bdv.l2Norm(listQV), 0.001f); + assertEquals(bdv.l2Norm((Object) listQV), bdv.l2Norm((Object) arrayQV), 0.001f); - assertEquals(bdv.cosineSimilarity(arrayQV), bdv.cosineSimilarity(listQV), 0.001f); - assertEquals(bdv.cosineSimilarity((Object) listQV), bdv.cosineSimilarity((Object) arrayQV), 0.001f); - } + assertEquals(bdv.cosineSimilarity(arrayQV), bdv.cosineSimilarity(listQV), 0.001f); + assertEquals(bdv.cosineSimilarity((Object) listQV), bdv.cosineSimilarity((Object) arrayQV), 0.001f); } public void testByteVsListQueryVector() { diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index e04f7339a97bd..de7d5a629ddc3 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -529,7 +529,7 @@ protected void addBwCFailedSnapshot(String repoName, String snapshotName, Map {} ) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index 4f0bcbb43e260..f264b04c9d159 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.GeoShapeQueryable; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryBuilder; @@ -214,10 +213,6 @@ private static org.apache.lucene.search.Query shapeShapeQuery( ShapeRelation relation, SearchExecutionContext context ) { - // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0); - if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { - throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); - } final MappedFieldType fieldType = context.getFieldType(fieldName); try { return XYQueriesUtils.toXYShapeQuery(geometry, fieldName, relation, fieldType.isIndexed(), fieldType.hasDocValues()); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java index c99f2be0a6cad..23e502572a005 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java @@ -829,11 +829,7 @@ public void testSnapshotOfSearchableSnapshotIncludesNoDataButCanBeRestored() thr initWithSnapshotVersion( tmpRepositoryName, repoPath, - randomFrom( - SnapshotsService.OLD_SNAPSHOT_FORMAT, - SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION, - SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION - ) + randomFrom(SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION, SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION) ); assertAcked(clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, tmpRepositoryName)); createRepository(repositoryName, "fs", repoPath); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index 04b194c2ec208..da7af5129e828 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -46,7 +46,6 @@ import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.index.mapper.StoredValueFetcher; import org.elasticsearch.index.mapper.ValueFetcher; -import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper; import org.elasticsearch.lucene.spatial.BinaryShapeDocValuesField; @@ -258,13 +257,6 @@ public String typeName() { @Override public Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... geometries) { failIfNotIndexedNorDocValuesFallback(context); - // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0) - if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { - throw new QueryShardException( - context, - ShapeRelation.CONTAINS + " query relation not supported for Field [" + fieldName + "]." - ); - } Query query; if (isIndexed()) { query = LatLonShape.newGeometryQuery(fieldName, relation.getLuceneRelation(), geometries); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index 0ea8c3e22e288..0d5c19ff3f997 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -161,13 +161,6 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext @Override public Query shapeQuery(Geometry shape, String fieldName, ShapeRelation relation, SearchExecutionContext context) { failIfNotIndexedNorDocValuesFallback(context); - // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0); - if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { - throw new QueryShardException( - context, - ShapeRelation.CONTAINS + " query relation not supported for Field [" + fieldName + "]." - ); - } try { return XYQueriesUtils.toXYShapeQuery(shape, fieldName, relation, isIndexed(), hasDocValues()); } catch (IllegalArgumentException e) { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java index 593656411eb41..405ef5c480687 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.ShapeType; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -88,25 +87,12 @@ protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { } if (ESTestCase.randomBoolean()) { SearchExecutionContext context = AbstractBuilderTestCase.createSearchExecutionContext(); - if (context.indexVersionCreated().onOrAfter(IndexVersions.V_7_5_0)) { // CONTAINS is only supported from version 7.5 - if (shapeType == ShapeType.LINESTRING || shapeType == ShapeType.MULTILINESTRING) { - builder.relation(ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS)); - } else { - builder.relation( - ESTestCase.randomFrom( - ShapeRelation.DISJOINT, - ShapeRelation.INTERSECTS, - ShapeRelation.WITHIN, - ShapeRelation.CONTAINS - ) - ); - } + if (shapeType == ShapeType.LINESTRING || shapeType == ShapeType.MULTILINESTRING) { + builder.relation(ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS)); } else { - if (shapeType == ShapeType.LINESTRING || shapeType == ShapeType.MULTILINESTRING) { - builder.relation(ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS)); - } else { - builder.relation(ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN)); - } + builder.relation( + ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN, ShapeRelation.CONTAINS) + ); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java index aa5ae72df2b9e..72073a6eff550 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.ShapeType; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.SearchExecutionContext; @@ -33,18 +32,10 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected ShapeRelation getShapeRelation(ShapeType type) { SearchExecutionContext context = createSearchExecutionContext(); - if (context.indexVersionCreated().onOrAfter(IndexVersions.V_7_5_0)) { // CONTAINS is only supported from version 7.5 - if (type == ShapeType.LINESTRING || type == ShapeType.MULTILINESTRING) { - return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS); - } else { - return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN, ShapeRelation.CONTAINS); - } + if (type == ShapeType.LINESTRING || type == ShapeType.MULTILINESTRING) { + return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS); } else { - if (type == ShapeType.LINESTRING || type == ShapeType.MULTILINESTRING) { - return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS); - } else { - return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN); - } + return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN, ShapeRelation.CONTAINS); } } From 9e8ac1ec8b14eae80d7a69a44193bc96795a2b97 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 6 Sep 2024 06:12:00 +0000 Subject: [PATCH 247/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-5f242b3b268 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 8ba5dcd92e1c8..47ca29f57249b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-b91b4136aff +lucene = 10.0.0-snapshot-5f242b3b268 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 228f4f0962eb8..a8efc3a5e01d1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e382a035bb4cc856f2ee86bac1010ff1cd1a8a15 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 6 Sep 2024 06:12:19 +0000 Subject: [PATCH 248/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-40c4e582cf9 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ce7b9e2e77e57..ec6bba21e9a74 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-56468ea3bb8 +lucene = 9.12.0-snapshot-40c4e582cf9 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 89529592bf8d5..c99e5c5b5f740 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a70eec5b86134e35adc07521d14d393422beff8c Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 6 Sep 2024 11:04:45 +0100 Subject: [PATCH 249/417] Update usage of Automaton Operations.subsetOf in x-pack core --- .../permission/ApplicationPermission.java | 4 +- .../authz/permission/ClusterPermission.java | 3 +- .../authz/permission/FieldPermissions.java | 12 +-- .../authz/permission/IndicesPermission.java | 4 +- .../security/authz/privilege/Privilege.java | 3 +- .../core/security/support/Automatons.java | 75 ++++++++++++++++++- .../authz/privilege/IndexPrivilegeTests.java | 16 ++-- .../authz/privilege/PrivilegeTests.java | 14 ++-- 8 files changed, 101 insertions(+), 30 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index c85a648761ca7..5ba5c1fd1218a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -187,7 +187,7 @@ private PermissionEntry(ApplicationPrivilege privilege, Set resourceName } private boolean grants(ApplicationPrivilege other, Automaton resource) { - return matchesPrivilege(other) && Operations.subsetOf(resource, this.resourceAutomaton); + return matchesPrivilege(other) && Automatons.subsetOf(resource, this.resourceAutomaton); } private boolean matchesPrivilege(ApplicationPrivilege other) { @@ -202,7 +202,7 @@ private boolean matchesPrivilege(ApplicationPrivilege other) { } return Operations.isEmpty(privilege.getAutomaton()) == false && Operations.isEmpty(other.getAutomaton()) == false - && Operations.subsetOf(other.getAutomaton(), privilege.getAutomaton()); + && Automatons.subsetOf(other.getAutomaton(), privilege.getAutomaton()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 9c41786f39eeb..4e608281a7858 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.permission; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RestrictedIndices; @@ -215,7 +214,7 @@ public final boolean check(final String action, final TransportRequest request, @Override public final boolean implies(final PermissionCheck permissionCheck) { if (permissionCheck instanceof ActionBasedPermissionCheck) { - return Operations.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) + return Automatons.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) && doImplies((ActionBasedPermissionCheck) permissionCheck); } return false; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index 44515a84351e2..235d7419d2bf0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -33,8 +33,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.apache.lucene.util.automaton.Operations.subsetOf; - /** * Stores patterns to fields which access is granted or denied to and maintains an automaton that can be used to check if permission is * allowed for a specific field. @@ -174,10 +172,14 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel deniedFieldsAutomaton = Automatons.patterns(deniedFields); } - grantedFieldsAutomaton = Operations.determinize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - deniedFieldsAutomaton = Operations.determinize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + grantedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); + deniedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); - if (subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { + if (Automatons.subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { throw new ElasticsearchSecurityException( "Exceptions for field permissions must be a subset of the " + "granted fields but " diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index e1b72cc43b38e..558f8e6f22ac1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -288,7 +288,7 @@ public boolean checkResourcePrivileges( if (false == Operations.isEmpty(checkIndexAutomaton)) { Automaton allowedIndexPrivilegesAutomaton = null; for (var indexAndPrivilegeAutomaton : indexGroupAutomatons.entrySet()) { - if (Operations.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { + if (Automatons.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { if (allowedIndexPrivilegesAutomaton != null) { allowedIndexPrivilegesAutomaton = Automatons.unionAndMinimize( Arrays.asList(allowedIndexPrivilegesAutomaton, indexAndPrivilegeAutomaton.getKey()) @@ -301,7 +301,7 @@ public boolean checkResourcePrivileges( for (String privilege : checkForPrivileges) { IndexPrivilege indexPrivilege = IndexPrivilege.get(Collections.singleton(privilege)); if (allowedIndexPrivilegesAutomaton != null - && Operations.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { + && Automatons.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { if (resourcePrivilegesMapBuilder != null) { resourcePrivilegesMapBuilder.addResourcePrivilege(forIndexPattern, privilege, Boolean.TRUE); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java index 68e3f11751aac..7434128f03129 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.core.security.support.Automatons; @@ -90,7 +89,7 @@ public static SortedMap sortByAccessLevel(Map subsetCount.put( name, - privileges.values().stream().filter(p2 -> p2 != priv && Operations.subsetOf(priv.automaton, p2.automaton)).count() + privileges.values().stream().filter(p2 -> p2 != priv && Automatons.subsetOf(priv.automaton, p2.automaton)).count() ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index 20e2010ac29bc..7d3367482c067 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -11,6 +11,8 @@ import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.apache.lucene.util.automaton.StatePair; +import org.apache.lucene.util.automaton.Transition; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.Setting; @@ -19,6 +21,7 @@ import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -222,7 +225,7 @@ private static Automaton buildAutomaton(String pattern) { ); } String regex = pattern.substring(1, pattern.length() - 1); - return new RegExp(regex).toAutomaton(); + return Operations.determinize(new RegExp(regex).toAutomaton(), DEFAULT_DETERMINIZE_WORK_LIMIT); } else if (pattern.equals("*")) { return MATCH_ALL; } else { @@ -268,7 +271,7 @@ static Automaton wildcard(String text) { } i += length; } - return concatenate(automata); + return Operations.determinize(concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public static Automaton unionAndMinimize(Collection automata) { @@ -368,4 +371,72 @@ static List getPatterns(Automaton automaton) { throw new IllegalArgumentException("recordPatterns is set to false"); } } + + /** + * Returns true if the language of a1 is a subset of the language of a2. + * Both automata must be determinized and must have no dead states. + * + *

Complexity: quadratic in number of states. + * Copied of Lucene's AutomatonTestUtil + */ + public static boolean subsetOf(Automaton a1, Automaton a2) { + if (a1.isDeterministic() == false) { + throw new IllegalArgumentException("a1 must be deterministic"); + } + if (a2.isDeterministic() == false) { + throw new IllegalArgumentException("a2 must be deterministic"); + } + assert Operations.hasDeadStatesFromInitial(a1) == false; + assert Operations.hasDeadStatesFromInitial(a2) == false; + if (a1.getNumStates() == 0) { + // Empty language is alwyas a subset of any other language + return true; + } else if (a2.getNumStates() == 0) { + return Operations.isEmpty(a1); + } + + // TODO: cutover to iterators instead + Transition[][] transitions1 = a1.getSortedTransitions(); + Transition[][] transitions2 = a2.getSortedTransitions(); + ArrayDeque worklist = new ArrayDeque<>(); + HashSet visited = new HashSet<>(); + StatePair p = new StatePair(0, 0); + worklist.add(p); + visited.add(p); + while (worklist.size() > 0) { + p = worklist.removeFirst(); + if (a1.isAccept(p.s1) && a2.isAccept(p.s2) == false) { + return false; + } + Transition[] t1 = transitions1[p.s1]; + Transition[] t2 = transitions2[p.s2]; + for (int n1 = 0, b2 = 0; n1 < t1.length; n1++) { + while (b2 < t2.length && t2[b2].max < t1[n1].min) { + b2++; + } + int min1 = t1[n1].min, max1 = t1[n1].max; + + for (int n2 = b2; n2 < t2.length && t1[n1].max >= t2[n2].min; n2++) { + if (t2[n2].min > min1) { + return false; + } + if (t2[n2].max < Character.MAX_CODE_POINT) { + min1 = t2[n2].max + 1; + } else { + min1 = Character.MAX_CODE_POINT; + max1 = Character.MIN_CODE_POINT; + } + StatePair q = new StatePair(t1[n1].dest, t2[n2].dest); + if (visited.contains(q) == false) { + worklist.add(q); + visited.add(q); + } + } + if (min1 <= max1) { + return false; + } + } + } + return true; + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java index 265714ee6ea16..073b3b92a43a5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -17,6 +16,7 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import java.util.Collection; @@ -83,7 +83,7 @@ public void testPrivilegesForGetCheckPointAction() { public void testRelationshipBetweenPrivileges() { assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("view_index_metadata")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -91,12 +91,12 @@ public void testRelationshipBetweenPrivileges() { ); assertThat( - Operations.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(true) ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create", "create_doc", "index", "delete")).automaton, IndexPrivilege.get(Set.of("write")).automaton ), @@ -104,7 +104,7 @@ public void testRelationshipBetweenPrivileges() { ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create_index", "delete_index")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -122,7 +122,7 @@ public void testCrossClusterReplicationPrivileges() { "indices:admin/seq_no/renew_retention_lease" ).forEach(action -> assertThat(crossClusterReplication.predicate.test(action + randomAlphaOfLengthBetween(0, 8)), is(true))); assertThat( - Operations.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), + Automatons.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), is(true) ); @@ -139,10 +139,10 @@ public void testCrossClusterReplicationPrivileges() { ); assertThat( - Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(false) ); - assertThat(Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); + assertThat(Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 6f3c435eb12f6..a58acf82ea44e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; @@ -218,13 +218,13 @@ public void testIndexCollapse() throws Exception { Set name = Sets.newHashSet(first.name().iterator().next(), second.name().iterator().next()); IndexPrivilege index = IndexPrivilege.get(name); - if (Operations.subsetOf(second.getAutomaton(), first.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - } else if (Operations.subsetOf(first.getAutomaton(), second.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + if (Automatons.subsetOf(second.getAutomaton(), first.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + } else if (Automatons.subsetOf(first.getAutomaton(), second.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } else { - assertFalse(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - assertFalse(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } } From f5e71470709e35359cc7f261e7e477551abe6b19 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 6 Sep 2024 12:13:04 +0100 Subject: [PATCH 250/417] Mute test that uses removed complement syntax --- .../xpack/core/security/support/StringMatcherTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java index 2e31f760f6db2..ef0497dd224f4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java @@ -98,6 +98,7 @@ public void testSingleExactMatch() throws Exception { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/112453") public void testSingleRegex() throws Exception { final String notStr = randomAlphaOfLengthBetween(3, 5); final StringMatcher matcher = StringMatcher.of("/~(" + notStr + ")/"); From c96b78e825fbee285a24b59f53f8554c5f324cbb Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 6 Sep 2024 15:37:52 +0100 Subject: [PATCH 251/417] A couple more Automaton subsetOf --- .../org/elasticsearch/xpack/security/authz/RBACEngine.java | 3 ++- .../authz/store/DeprecationRoleDescriptorConsumer.java | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 1c773a6e3963f..fa6187798da25 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -85,6 +85,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.NamedClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import org.elasticsearch.xpack.core.sql.SqlAsyncActionNames; import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; @@ -550,7 +551,7 @@ public void validateIndexPermissionsAreSubset( Automaton existingPermissions = permissionMap.computeIfAbsent(entry.getKey(), role::allowedActionsMatcher); for (String alias : entry.getValue()) { Automaton newNamePermissions = permissionMap.computeIfAbsent(alias, role::allowedActionsMatcher); - if (Operations.subsetOf(newNamePermissions, existingPermissions) == false) { + if (Automatons.subsetOf(newNamePermissions, existingPermissions) == false) { listener.onResponse(AuthorizationResult.deny()); return; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java index 40cb3ea4d9864..8ff535f3f6231 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationCategory; @@ -21,6 +20,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import java.time.ZoneOffset; @@ -195,7 +195,7 @@ private void logDeprecatedPermission(RoleDescriptor roleDescriptor) { index.getName(), i -> IndexPrivilege.get(indexPrivileges).getAutomaton() ); - if (false == Operations.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { + if (false == Automatons.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { inferiorIndexNames.add(index.getName()); } } else { From 1ea377de6b158b7bffbc3f6980ff8e1321cf036e Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 6 Sep 2024 15:40:56 +0100 Subject: [PATCH 252/417] Update usage of Automaton sameLanguage in test --- .../security/authz/store/FileRolesStoreTests.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 84998aa48d41f..a4d9dacd1a63d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -136,8 +137,8 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("idx3")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); - assertTrue(Operations.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); descriptor = roles.get("role1.ab"); assertNotNull(descriptor); @@ -180,7 +181,7 @@ public void testParseFile() throws Exception { assertThat(group.indices()[0], equalTo("/.*_.*/")); assertThat(group.privilege(), notNullValue()); assertTrue( - Operations.sameLanguage( + AutomatonTestUtil.sameLanguage( group.privilege().getAutomaton(), Operations.determinize( Operations.union(IndexPrivilege.READ.getAutomaton(), IndexPrivilege.WRITE.getAutomaton()), @@ -235,7 +236,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("field_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); @@ -257,7 +258,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertFalse(group.getFieldPermissions().hasFieldLevelSecurity()); assertThat(group.getQuery(), notNullValue()); @@ -278,7 +279,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_fields_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); From 15dd0a8d55fab7db5227f6cced7c18aa32cc15c3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 7 Sep 2024 06:11:25 +0000 Subject: [PATCH 253/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-dc47adbbe73 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 47ca29f57249b..413fd79b178cb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-5f242b3b268 +lucene = 10.0.0-snapshot-dc47adbbe73 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a8efc3a5e01d1..93cf40d208176 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 79010f48e8d0eb5206ac67babcdf35eb95d0fe86 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 7 Sep 2024 06:12:10 +0000 Subject: [PATCH 254/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-ef5d0f2729a --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ec6bba21e9a74..641f486fe4c0e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-40c4e582cf9 +lucene = 9.12.0-snapshot-ef5d0f2729a bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index c99e5c5b5f740..a8ef20e8d1530 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 697fc672c94763290ecd90737120f8c622d14226 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 8 Sep 2024 06:11:50 +0000 Subject: [PATCH 255/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-dc47adbbe73 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 93cf40d208176..b36b50e0009cf 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From f4197a81ae228f3dc5fc45d257ef80ef8a93af2d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 8 Sep 2024 06:12:10 +0000 Subject: [PATCH 256/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-371fa57d9c7 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 641f486fe4c0e..d6d849ee12612 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-ef5d0f2729a +lucene = 9.12.0-snapshot-371fa57d9c7 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a8ef20e8d1530..d0b1e06469dbb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 5ff75bb0716070bbeaf3fdaf5cd638758af4518c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 9 Sep 2024 06:11:21 +0000 Subject: [PATCH 257/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-dc47adbbe73 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b36b50e0009cf..586aeabb0d6f0 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From e0e2e1ec0d3719349bd996eef1d2461006c3f4cc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 9 Sep 2024 06:11:21 +0000 Subject: [PATCH 258/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-371fa57d9c7 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d0b1e06469dbb..c186d0a357e38 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 3267fd0c2b8fb7e429e7c5615e6be63c056cb524 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 9 Sep 2024 11:47:33 +0200 Subject: [PATCH 259/417] Remove IndexVersions.V_7_6_0 --- .../analysis/common/CommonAnalysisPlugin.java | 73 ++++--------------- .../repositories/s3/S3Repository.java | 18 ++--- .../elasticsearch/index/IndexVersions.java | 3 +- .../index/seqno/ReplicationTracker.java | 5 +- .../indices/analysis/AnalysisModule.java | 14 +--- .../IndexMetaDataGenerations.java | 3 +- .../repositories/RepositoryData.java | 6 +- .../repositories/ShardGeneration.java | 3 +- .../repositories/ShardGenerations.java | 2 +- .../blobstore/BlobStoreRepository.java | 5 +- .../snapshots/SnapshotsService.java | 7 +- .../test/rest/ESRestTestCase.java | 30 +++----- .../SearchableSnapshotsIntegTests.java | 6 +- 13 files changed, 47 insertions(+), 128 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 7a3041619c14a..4782c16c0a80e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -345,37 +345,17 @@ public Map> getTokenizers() { tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new); tokenizers.put("thai", ThaiTokenizerFactory::new); tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { - throw new IllegalArgumentException( - "The [nGram] tokenizer name was deprecated in 7.6. " - + "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead." - ); - } else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) { - deprecationLogger.warn( - DeprecationCategory.ANALYSIS, - "nGram_tokenizer_deprecation", - "The [nGram] tokenizer name is deprecated and will be removed in a future version. " - + "Please change the tokenizer name to [ngram] instead." - ); - } - return new NGramTokenizerFactory(indexSettings, environment, name, settings); + throw new IllegalArgumentException( + "The [nGram] tokenizer name was deprecated in 7.6 and removed in 8.0.0. " + + "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead." + ); }); tokenizers.put("ngram", NGramTokenizerFactory::new); tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { - throw new IllegalArgumentException( - "The [edgeNGram] tokenizer name was deprecated in 7.6. " - + "Please use the tokenizer name to [edge_nGram] for indices created in versions 8 or higher instead." - ); - } else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) { - deprecationLogger.warn( - DeprecationCategory.ANALYSIS, - "edgeNGram_tokenizer_deprecation", - "The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. " - + "Please change the tokenizer name to [edge_ngram] instead." - ); - } - return new EdgeNGramTokenizerFactory(indexSettings, environment, name, settings); + throw new IllegalArgumentException( + "The [edgeNGram] tokenizer name was deprecated in 7.6 and removed in 8.0.0. " + + "Please use the tokenizer name to [edge_nGram] for indices created in versions 8 or higher instead." + ); }); tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new); tokenizers.put("char_group", CharGroupTokenizerFactory::new); @@ -600,39 +580,18 @@ public List getPreConfiguredTokenizers() { // Temporary shim for aliases. TODO deprecate after they are moved tokenizers.add(PreConfiguredTokenizer.indexVersion("nGram", (version) -> { - if (version.onOrAfter(IndexVersions.V_8_0_0)) { - throw new IllegalArgumentException( - "The [nGram] tokenizer name was deprecated in 7.6. " - + "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead." - ); - } else if (version.onOrAfter(IndexVersions.V_7_6_0)) { - deprecationLogger.warn( - DeprecationCategory.ANALYSIS, - "nGram_tokenizer_deprecation", - "The [nGram] tokenizer name is deprecated and will be removed in a future version. " - + "Please change the tokenizer name to [ngram] instead." - ); - } - return new NGramTokenizer(); + throw new IllegalArgumentException( + "The [nGram] tokenizer name was deprecated in 7.6. " + + "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead." + ); })); tokenizers.add(PreConfiguredTokenizer.indexVersion("edgeNGram", (version) -> { - if (version.onOrAfter(IndexVersions.V_8_0_0)) { - throw new IllegalArgumentException( - "The [edgeNGram] tokenizer name was deprecated in 7.6. " - + "Please use the tokenizer name to [edge_ngram] for indices created in versions 8 or higher instead." - ); - } else if (version.onOrAfter(IndexVersions.V_7_6_0)) { - deprecationLogger.warn( - DeprecationCategory.ANALYSIS, - "edgeNGram_tokenizer_deprecation", - "The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. " - + "Please change the tokenizer name to [edge_ngram] instead." - ); - } - return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); + throw new IllegalArgumentException( + "The [edgeNGram] tokenizer name was deprecated in 7.6. " + + "Please use the tokenizer name to [edge_ngram] for indices created in versions 8 or higher instead." + ); })); tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new)); - return tokenizers; } } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index d75a3e8ad433e..62b14dbe90c5b 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.repositories.FinalizeSnapshotContext; @@ -156,13 +155,12 @@ class S3Repository extends MeteredBlobStoreRepository { /** * Artificial delay to introduce after a snapshot finalization or delete has finished so long as the repository is still using the - * backwards compatible snapshot format from before - * {@link org.elasticsearch.snapshots.SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION} ({@link IndexVersions#V_7_6_0}). - * This delay is necessary so that the eventually consistent nature of AWS S3 does not randomly result in repository corruption when - * doing repository operations in rapid succession on a repository in the old metadata format. + * backwards compatible snapshot format from before V_7_6_0. This delay is necessary so that the eventually consistent + * nature of AWS S3 does not randomly result in repository corruption when doing repository operations in rapid succession on a + * repository in the old metadata format. * This setting should not be adjusted in production when working with an AWS S3 backed repository. Doing so risks the repository * becoming silently corrupted. To get rid of this waiting period, either create a new S3 repository or remove all snapshots older than - * {@link IndexVersions#V_7_6_0} from the repository which will trigger an upgrade of the repository metadata to the new + * V_7_6_0 from the repository which will trigger an upgrade of the repository metadata to the new * format and disable the cooldown period. */ static final Setting COOLDOWN_PERIOD = Setting.timeSetting( @@ -371,14 +369,12 @@ public void onFailure(Exception e) { private void logCooldownInfo() { logger.info( - "Sleeping for [{}] after modifying repository [{}] because it contains snapshots older than version [{}]" + "Sleeping for [{}] after modifying repository [{}] because it contains snapshots older than version [\"7060099\"]" + " and therefore is using a backwards compatible metadata format that requires this cooldown period to avoid " + "repository corruption. To get rid of this message and move to the new repository metadata format, either remove " - + "all snapshots older than version [{}] from the repository or create a new repository at an empty location.", + + "all snapshots older than version [\"7060099\"] from the repository or create a new repository at an empty location.", coolDown, - metadata.name(), - SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION, - SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION + metadata.name() ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 8e0ca1de3ec2f..6f0089d876cbb 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -59,7 +59,6 @@ private static Version parseUnchecked(String version) { @UpdateForV9 // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_6_0 = def(7_06_00_99, parseUnchecked("8.4.0")); public static final IndexVersion V_7_7_0 = def(7_07_00_99, parseUnchecked("8.5.1")); public static final IndexVersion V_7_8_0 = def(7_08_00_99, parseUnchecked("8.5.1")); public static final IndexVersion V_7_9_0 = def(7_09_00_99, parseUnchecked("8.6.0")); @@ -171,7 +170,7 @@ private static Version parseUnchecked(String version) { * In branches 8.7-8.11 see server/src/main/java/org/elasticsearch/index/IndexVersion.java for the equivalent definitions. */ - public static final IndexVersion MINIMUM_COMPATIBLE = V_7_6_0; + public static final IndexVersion MINIMUM_COMPATIBLE = V_7_7_0; static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 138bd2c6d8840..4d60eb1a20e65 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.replication.ReplicationResponse; -import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; @@ -975,8 +974,8 @@ public ReplicationTracker( this.pendingInSync = new HashSet<>(); this.routingTable = null; this.replicationGroup = null; - this.hasAllPeerRecoveryRetentionLeases = indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0) - || (indexSettings.isSoftDeleteEnabled() && indexSettings.getIndexMetadata().getState() == IndexMetadata.State.OPEN); + // TODO lucene 10 upgrade, remove the following field since its trivially true after V_8_0_0 + this.hasAllPeerRecoveryRetentionLeases = true; this.fileBasedRecoveryThreshold = IndexSettings.FILE_BASED_RECOVERY_THRESHOLD_SETTING.get(indexSettings.getSettings()); this.safeCommitInfoSupplier = safeCommitInfoSupplier; diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index ecb75f8b4f506..9f6e9f9e46bb4 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -199,19 +199,7 @@ static Map setupPreConfiguredTokenFilters(List preConfiguredTokenFilters.register("lowercase", PreConfiguredTokenFilter.singleton("lowercase", true, LowerCaseFilter::new)); // Add "standard" for old indices (bwc) preConfiguredTokenFilters.register("standard", PreConfiguredTokenFilter.indexVersion("standard", true, (reader, version) -> { - // This was originally removed in 7_0_0 but due to a cacheing bug it was still possible - // in certain circumstances to create a new index referencing the standard token filter - // until version 7_5_2 - if (version.before(IndexVersions.V_7_6_0)) { - deprecationLogger.warn( - DeprecationCategory.ANALYSIS, - "standard_deprecation", - "The [standard] token filter is deprecated and will be removed in a future version." - ); - } else { - throw new IllegalArgumentException("The [standard] token filter has been removed."); - } - return reader; + throw new IllegalArgumentException("The [standard] token filter has been removed."); })); /* Note that "stop" is available in lucene-core but it's pre-built * version uses a set of English stop words that are in diff --git a/server/src/main/java/org/elasticsearch/repositories/IndexMetaDataGenerations.java b/server/src/main/java/org/elasticsearch/repositories/IndexMetaDataGenerations.java index 7f2a09637dbf7..ac4febed90800 100644 --- a/server/src/main/java/org/elasticsearch/repositories/IndexMetaDataGenerations.java +++ b/server/src/main/java/org/elasticsearch/repositories/IndexMetaDataGenerations.java @@ -70,8 +70,7 @@ public String getIndexMetaBlobId(String metaIdentifier) { /** * Get the blob id by {@link SnapshotId} and {@link IndexId} and fall back to the value of {@link SnapshotId#getUUID()} if none is - * known to enable backwards compatibility with versions older than - * {@link org.elasticsearch.snapshots.SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION} which used the snapshot uuid as index metadata + * known to enable backwards compatibility with versions older than 7.6 which used the snapshot uuid as index metadata * blob uuid. * * @param snapshotId Snapshot Id diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index c6494eca9823b..b12ccde476971 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -699,10 +699,8 @@ public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final final IndexVersion minVersion; if (shouldWriteUUIDS) { minVersion = SnapshotsService.UUIDS_IN_REPO_DATA_VERSION; - } else if (shouldWriteIndexGens) { - minVersion = SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION; } else { - minVersion = SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION; + minVersion = SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION; } // Note that all known versions expect the MIN_VERSION field to be a string, and versions before 8.11.0 try and parse it as a // major.minor.patch version number, so if we introduce a numeric format version in future then this will cause them to fail @@ -885,7 +883,7 @@ public static RepositoryData snapshotsFromXContent(XContentParser parser, long g final var version = switch (versionString) { case "7.12.0" -> IndexVersions.V_7_12_0; case "7.9.0" -> IndexVersions.V_7_9_0; - case "7.6.0" -> IndexVersions.V_7_6_0; + case "7.6.0" -> IndexVersion.fromId(7_06_00_99); default -> // All (known) versions only ever emit one of the above strings for the format version, so if we see something // else it must be a newer version or else something wholly invalid. Report the raw string rather than trying diff --git a/server/src/main/java/org/elasticsearch/repositories/ShardGeneration.java b/server/src/main/java/org/elasticsearch/repositories/ShardGeneration.java index 275bbdb3da45d..3a90af3582293 100644 --- a/server/src/main/java/org/elasticsearch/repositories/ShardGeneration.java +++ b/server/src/main/java/org/elasticsearch/repositories/ShardGeneration.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots; -import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -24,7 +23,7 @@ /** * The generation ID of a shard, used to name the shard-level {@code index-$SHARD_GEN} file that represents a {@link - * BlobStoreIndexShardSnapshots} instance. Before 7.6 ({@link SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION}) these generations were + * BlobStoreIndexShardSnapshots} instance. Before 7.6 these generations were * numeric, but recent versions use a UUID instead. */ public final class ShardGeneration implements Writeable, ToXContentFragment { diff --git a/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java b/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java index 0dcb28278a66d..7a92a9ffd840d 100644 --- a/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java +++ b/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java @@ -135,7 +135,7 @@ public Map> obsoleteShardGenerations(Shar *

  • {@link #DELETED_SHARD_GEN} a deleted shard that isn't referenced by any snapshot in the repository any longer
  • *
  • {@link #NEW_SHARD_GEN} a new shard that we know doesn't hold any valid data yet in the repository
  • *
  • {@code null} unknown state. The shard either does not exist at all or it was created by a node older than - * {@link org.elasticsearch.snapshots.SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION}. If a caller expects a shard to exist in the + * 7.6. If a caller expects a shard to exist in the * repository but sees a {@code null} return, it should try to recover the generation by falling back to listing the contents * of the respective shard directory.
  • * diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 5ae2b3668c34d..a9423c44d47bd 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1671,7 +1671,7 @@ public void finalizeSnapshot(final FinalizeSnapshotContext finalizeSnapshotConte final Collection indices = shardGenerations.indices(); final SnapshotId snapshotId = snapshotInfo.snapshotId(); // Once we are done writing the updated index-N blob we remove the now unreferenced index-${uuid} blobs in each shard - // directory if all nodes are at least at version SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION + // directory if all nodes are at least at version 7.6 // If there are older version nodes in the cluster, we don't need to run this cleanup as it will have already happened // when writing the index-${N} to each shard directory. final IndexVersion repositoryMetaVersion = finalizeSnapshotContext.repositoryMetaVersion(); @@ -3795,8 +3795,7 @@ public BlobStoreIndexShardSnapshots getBlobStoreIndexShardSnapshots(IndexId inde * * @param blobs list of blobs in repository * @param generation shard generation or {@code null} in case there was no shard generation tracked in the {@link RepositoryData} for - * this shard because its snapshot was created in a version older than - * {@link SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION}. + * this shard because its snapshot was created in a version older than 7.6. * @return tuple of BlobStoreIndexShardSnapshots and the last snapshot index generation */ private Tuple buildBlobStoreIndexShardSnapshots( diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 65fce62ea4c41..8e2f0a8ba2015 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -138,8 +138,6 @@ */ public final class SnapshotsService extends AbstractLifecycleComponent implements ClusterStateApplier { - public static final IndexVersion SHARD_GEN_IN_REPO_DATA_VERSION = IndexVersions.V_7_6_0; - public static final IndexVersion INDEX_GEN_IN_REPO_DATA_VERSION = IndexVersions.V_7_9_0; public static final IndexVersion UUIDS_IN_REPO_DATA_VERSION = IndexVersions.V_7_12_0; @@ -2337,8 +2335,9 @@ public static IndexVersion minCompatibleVersion( * @param repositoryMetaVersion version to check * @return true if version supports {@link ShardGenerations} */ + // TODO lucene 10 upgrade, remove method since its trivially true now public static boolean useShardGenerations(IndexVersion repositoryMetaVersion) { - return repositoryMetaVersion.onOrAfter(SHARD_GEN_IN_REPO_DATA_VERSION); + return true; } /** @@ -2822,7 +2821,7 @@ private SnapshotsInProgress updatedSnapshotsInProgress(ClusterState currentState updatedDeletions, currentState, entry.indices().values(), - entry.version().onOrAfter(SHARD_GEN_IN_REPO_DATA_VERSION), + true, repositoryData, repoName ); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 8cafd73858c74..d88a7cc4dda74 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1874,13 +1874,7 @@ protected static void expectSoftDeletesWarning(Request request, String indexName + indexName + "]."; - final var softDeleteDisabledDeprecated = minimumIndexVersion().onOrAfter(IndexVersions.V_7_6_0); - request.setOptions(expectVersionSpecificWarnings(v -> { - if (softDeleteDisabledDeprecated) { - v.current(expectedWarning); - } - v.compatible(expectedWarning); - })); + request.setOptions(expectVersionSpecificWarnings(v -> { v.current(expectedWarning); })); } protected static Map getIndexSettings(String index) throws IOException { @@ -2210,7 +2204,6 @@ public void assertEmptyTranslog(String index) throws Exception { * that we have renewed every PRRL to the global checkpoint of the corresponding copy and properly synced to all copies. */ public void ensurePeerRecoveryRetentionLeasesRenewedAndSynced(String index) throws Exception { - boolean mustHavePRRLs = minimumIndexVersion().onOrAfter(IndexVersions.V_7_6_0); assertBusy(() -> { Map stats = entityAsMap(client().performRequest(new Request("GET", index + "/_stats?level=shards"))); @SuppressWarnings("unchecked") @@ -2228,25 +2221,20 @@ public void ensurePeerRecoveryRetentionLeasesRenewedAndSynced(String index) thro "retention_leases.leases", copy ); - if (mustHavePRRLs == false && retentionLeases == null) { - continue; - } assertNotNull(retentionLeases); for (Map retentionLease : retentionLeases) { if (((String) retentionLease.get("id")).startsWith("peer_recovery/")) { assertThat(retentionLease.get("retaining_seq_no"), equalTo(globalCheckpoint + 1)); } } - if (mustHavePRRLs) { - List existingLeaseIds = retentionLeases.stream() - .map(lease -> (String) lease.get("id")) - .collect(Collectors.toList()); - List expectedLeaseIds = shard.stream() - .map(shr -> (String) XContentMapValues.extractValue("routing.node", shr)) - .map(ReplicationTracker::getPeerRecoveryRetentionLeaseId) - .collect(Collectors.toList()); - assertThat("not every active copy has established its PPRL", expectedLeaseIds, everyItem(in(existingLeaseIds))); - } + List existingLeaseIds = retentionLeases.stream() + .map(lease -> (String) lease.get("id")) + .collect(Collectors.toList()); + List expectedLeaseIds = shard.stream() + .map(shr -> (String) XContentMapValues.extractValue("routing.node", shr)) + .map(ReplicationTracker::getPeerRecoveryRetentionLeaseId) + .collect(Collectors.toList()); + assertThat("not every active copy has established its PPRL", expectedLeaseIds, everyItem(in(existingLeaseIds))); } } }, 60, TimeUnit.SECONDS); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java index 23e502572a005..4e344af61726c 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java @@ -826,11 +826,7 @@ public void testSnapshotOfSearchableSnapshotIncludesNoDataButCanBeRestored() thr .settings() .get("location") ); - initWithSnapshotVersion( - tmpRepositoryName, - repoPath, - randomFrom(SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION, SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION) - ); + initWithSnapshotVersion(tmpRepositoryName, repoPath, SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION); assertAcked(clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, tmpRepositoryName)); createRepository(repositoryName, "fs", repoPath); } From 74040a59521625f74d7e65e3f886aefb2b7e87c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 9 Sep 2024 12:29:59 +0200 Subject: [PATCH 260/417] Remove IndexVersions.V_7_7_0 - V_7_11_0 --- .../elasticsearch/index/IndexVersions.java | 7 +-- .../repositories/RepositoryData.java | 54 ++++++++----------- .../blobstore/BlobStoreRepository.java | 44 ++++++--------- .../snapshots/SnapshotsService.java | 14 ++--- .../index/mapper/DateFieldMapperTests.java | 3 -- .../index/mapper/IpFieldMapperTests.java | 7 --- .../index/mapper/ParametrizedMapperTests.java | 6 --- .../index/mapper/MapperTestCase.java | 18 ------- .../GeoShapeWithDocValuesFieldMapper.java | 2 +- .../wildcard/mapper/WildcardFieldMapper.java | 7 +-- .../mapper/WildcardFieldMapperTests.java | 35 ------------ 11 files changed, 43 insertions(+), 154 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 6f0089d876cbb..4520dc2a042ac 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -59,11 +59,6 @@ private static Version parseUnchecked(String version) { @UpdateForV9 // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_7_0 = def(7_07_00_99, parseUnchecked("8.5.1")); - public static final IndexVersion V_7_8_0 = def(7_08_00_99, parseUnchecked("8.5.1")); - public static final IndexVersion V_7_9_0 = def(7_09_00_99, parseUnchecked("8.6.0")); - public static final IndexVersion V_7_10_0 = def(7_10_00_99, parseUnchecked("8.7.0")); - public static final IndexVersion V_7_11_0 = def(7_11_00_99, parseUnchecked("8.7.0")); public static final IndexVersion V_7_12_0 = def(7_12_00_99, parseUnchecked("8.8.0")); public static final IndexVersion V_7_13_0 = def(7_13_00_99, parseUnchecked("8.8.2")); public static final IndexVersion V_7_14_0 = def(7_14_00_99, parseUnchecked("8.9.0")); @@ -170,7 +165,7 @@ private static Version parseUnchecked(String version) { * In branches 8.7-8.11 see server/src/main/java/org/elasticsearch/index/IndexVersion.java for the equivalent definitions. */ - public static final IndexVersion MINIMUM_COMPATIBLE = V_7_7_0; + public static final IndexVersion MINIMUM_COMPATIBLE = V_7_12_0; static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index b12ccde476971..b31b78c1cd1ee 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -686,39 +686,31 @@ public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final throws IOException { final boolean shouldWriteUUIDS = SnapshotsService.includesUUIDs(repoMetaVersion); - final boolean shouldWriteIndexGens = SnapshotsService.useIndexGenerations(repoMetaVersion); - final boolean shouldWriteShardGens = SnapshotsService.useShardGenerations(repoMetaVersion); + final boolean shouldWriteIndexGens = true; assert Boolean.compare(shouldWriteUUIDS, shouldWriteIndexGens) <= 0; - assert Boolean.compare(shouldWriteIndexGens, shouldWriteShardGens) <= 0; builder.startObject(); - if (shouldWriteShardGens) { - // Add min version field to make it impossible for older ES versions to deserialize this object - final IndexVersion minVersion; - if (shouldWriteUUIDS) { - minVersion = SnapshotsService.UUIDS_IN_REPO_DATA_VERSION; - } else { - minVersion = SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION; - } - // Note that all known versions expect the MIN_VERSION field to be a string, and versions before 8.11.0 try and parse it as a - // major.minor.patch version number, so if we introduce a numeric format version in future then this will cause them to fail - // with an opaque parse error rather than the more helpful: - // - // IllegalStateException: this snapshot repository format requires Elasticsearch version [x.y.z] or later - // - // Likewise if we simply encode the numeric IndexVersion as a string then versions from 8.11.0 onwards will report the exact - // string in this message, which is not especially helpful to users. Slightly more helpful than the opaque parse error reported - // by earlier versions, but still not great. TODO rethink this if and when adding a new snapshot repository format version. - if (minVersion.before(IndexVersions.V_8_10_0)) { - // write as a string - builder.field(MIN_VERSION, Version.fromId(minVersion.id()).toString()); - } else { - assert false : "writing a numeric version [" + minVersion + "] is unhelpful here, see preceding comment"; - // write an int - builder.field(MIN_VERSION, minVersion.id()); - } + // Add min version field to make it impossible for older ES versions to deserialize this object + final IndexVersion minVersion; + minVersion = SnapshotsService.UUIDS_IN_REPO_DATA_VERSION; + // Note that all known versions expect the MIN_VERSION field to be a string, and versions before 8.11.0 try and parse it as a + // major.minor.patch version number, so if we introduce a numeric format version in future then this will cause them to fail + // with an opaque parse error rather than the more helpful: + // + // IllegalStateException: this snapshot repository format requires Elasticsearch version [x.y.z] or later + // + // Likewise if we simply encode the numeric IndexVersion as a string then versions from 8.11.0 onwards will report the exact + // string in this message, which is not especially helpful to users. Slightly more helpful than the opaque parse error reported + // by earlier versions, but still not great. TODO rethink this if and when adding a new snapshot repository format version. + if (minVersion.before(IndexVersions.V_8_10_0)) { + // write as a string + builder.field(MIN_VERSION, Version.fromId(minVersion.id()).toString()); + } else { + assert false : "writing a numeric version [" + minVersion + "] is unhelpful here, see preceding comment"; + // write an int + builder.field(MIN_VERSION, minVersion.id()); } if (shouldWriteUUIDS) { @@ -828,9 +820,7 @@ public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final builder.value(snapshotId.getUUID()); } builder.endArray(); - if (shouldWriteShardGens) { - builder.xContentList(SHARD_GENERATIONS, shardGenerations.getGens(indexId)); - } + builder.xContentList(SHARD_GENERATIONS, shardGenerations.getGens(indexId)); builder.endObject(); } builder.endObject(); @@ -882,7 +872,7 @@ public static RepositoryData snapshotsFromXContent(XContentParser parser, long g final var versionString = parser.text(); final var version = switch (versionString) { case "7.12.0" -> IndexVersions.V_7_12_0; - case "7.9.0" -> IndexVersions.V_7_9_0; + case "7.9.0" -> IndexVersion.fromId(7_09_00_99); case "7.6.0" -> IndexVersion.fromId(7_06_00_99); default -> // All (known) versions only ever emit one of the above strings for the format version, so if we see something diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index a9423c44d47bd..cf6a3b2d7e926 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1679,8 +1679,6 @@ public void finalizeSnapshot(final FinalizeSnapshotContext finalizeSnapshotConte final Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT); - final boolean writeIndexGens = SnapshotsService.useIndexGenerations(repositoryMetaVersion); - record MetadataWriteResult( RepositoryData existingRepositoryData, Map indexMetas, @@ -1716,15 +1714,11 @@ record RootBlobUpdateResult(RepositoryData oldRepositoryData, RepositoryData new } final MetadataWriteResult metadataWriteResult; - if (writeIndexGens) { - metadataWriteResult = new MetadataWriteResult( - existingRepositoryData, - ConcurrentCollections.newConcurrentMap(), - ConcurrentCollections.newConcurrentMap() - ); - } else { - metadataWriteResult = new MetadataWriteResult(existingRepositoryData, null, null); - } + metadataWriteResult = new MetadataWriteResult( + existingRepositoryData, + ConcurrentCollections.newConcurrentMap(), + ConcurrentCollections.newConcurrentMap() + ); try (var allMetaListeners = new RefCountingListener(l.map(ignored -> metadataWriteResult))) { // We ignore all FileAlreadyExistsException when writing metadata since otherwise a master failover while in this method @@ -1746,24 +1740,16 @@ record RootBlobUpdateResult(RepositoryData oldRepositoryData, RepositoryData new for (IndexId index : indices) { executor.execute(ActionRunnable.run(allMetaListeners.acquire(), () -> { final IndexMetadata indexMetaData = clusterMetadata.index(index.getName()); - if (writeIndexGens) { - final String identifiers = IndexMetaDataGenerations.buildUniqueIdentifier(indexMetaData); - String metaUUID = existingRepositoryData.indexMetaDataGenerations().getIndexMetaBlobId(identifiers); - if (metaUUID == null) { - // We don't yet have this version of the metadata so we write it - metaUUID = UUIDs.base64UUID(); - INDEX_METADATA_FORMAT.write(indexMetaData, indexContainer(index), metaUUID, compress); - metadataWriteResult.indexMetaIdentifiers().put(identifiers, metaUUID); - } // else this task was largely a no-op - TODO no need to fork in that case - metadataWriteResult.indexMetas().put(index, identifiers); - } else { - INDEX_METADATA_FORMAT.write( - clusterMetadata.index(index.getName()), - indexContainer(index), - snapshotId.getUUID(), - compress - ); - } + + final String identifiers = IndexMetaDataGenerations.buildUniqueIdentifier(indexMetaData); + String metaUUID = existingRepositoryData.indexMetaDataGenerations().getIndexMetaBlobId(identifiers); + if (metaUUID == null) { + // We don't yet have this version of the metadata so we write it + metaUUID = UUIDs.base64UUID(); + INDEX_METADATA_FORMAT.write(indexMetaData, indexContainer(index), metaUUID, compress); + metadataWriteResult.indexMetaIdentifiers().put(identifiers, metaUUID); + } // else this task was largely a no-op - TODO no need to fork in that case + metadataWriteResult.indexMetas().put(index, identifiers); })); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 8e2f0a8ba2015..e9e5591371710 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -138,7 +138,9 @@ */ public final class SnapshotsService extends AbstractLifecycleComponent implements ClusterStateApplier { - public static final IndexVersion INDEX_GEN_IN_REPO_DATA_VERSION = IndexVersions.V_7_9_0; + // TODO lucene 10 upgrade: check if we need any of the pre-8 versions constants here for bwc + // e.g. reading from old snapshots + public static final IndexVersion INDEX_GEN_IN_REPO_DATA_VERSION = IndexVersion.fromId(7_09_00_99); public static final IndexVersion UUIDS_IN_REPO_DATA_VERSION = IndexVersions.V_7_12_0; public static final TransportVersion UUIDS_IN_REPO_DATA_TRANSPORT_VERSION = TransportVersions.V_7_12_0; @@ -2340,16 +2342,6 @@ public static boolean useShardGenerations(IndexVersion repositoryMetaVersion) { return true; } - /** - * Checks whether the metadata version supports writing {@link ShardGenerations} to the repository. - * - * @param repositoryMetaVersion version to check - * @return true if version supports {@link ShardGenerations} - */ - public static boolean useIndexGenerations(IndexVersion repositoryMetaVersion) { - return repositoryMetaVersion.onOrAfter(INDEX_GEN_IN_REPO_DATA_VERSION); - } - /** * Checks whether the metadata version supports writing the cluster- and repository-uuid to the repository. * diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 27327663b5956..ad4064aaf97fa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -246,9 +246,6 @@ public void testBadNullValue() throws IOException { ) ); - createDocumentMapper(IndexVersions.V_7_9_0, fieldMapping(b -> b.field("type", "date").field("null_value", "foo"))); - - assertWarnings("Error parsing [foo] as date in [null_value] on field [field]); [null_value] will be ignored"); } public void testNullConfigValuesFail() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index ba9c2e6c4a299..9a22436928c79 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.script.IpFieldScript; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -204,12 +203,6 @@ public void testNullValue() throws IOException { e.getMessage(), "Failed to parse mapping: Error parsing [null_value] on field [field]: ':1' is not an IP string literal." ); - - createDocumentMapper(IndexVersions.V_7_9_0, fieldMapping(b -> { - b.field("type", "ip"); - b.field("null_value", ":1"); - })); - assertWarnings("Error parsing [:1] as IP in [null_value] on field [field]); [null_value] will be ignored"); } public void testDimension() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index 55761e5ec339d..4ce81b1dcd320 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -597,12 +597,6 @@ public void testDeprecatedParameters() { // 'index' is declared explicitly, 'store' is not, but is one of the previously always-accepted params String mapping = """ {"type":"test_mapper","index":false,"store":true,"required":"value"}"""; - TestMapper mapper = fromMapping(mapping, IndexVersions.V_7_8_0, TransportVersions.V_7_8_0); - assertWarnings("Parameter [store] has no effect on type [test_mapper] and will be removed in future"); - assertFalse(mapper.index); - assertEquals(""" - {"field":{"type":"test_mapper","index":false,"required":"value"}}""", Strings.toString(mapper)); - MapperParsingException e = expectThrows( MapperParsingException.class, () -> fromMapping(mapping, IndexVersions.V_8_0_0, TransportVersions.V_8_0_0) diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 9eaace8f93e58..b3c68a2e059a0 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -92,8 +92,6 @@ */ public abstract class MapperTestCase extends MapperServiceTestCase { - public static final IndexVersion DEPRECATED_BOOST_INDEX_VERSION = IndexVersions.V_7_10_0; - protected abstract void minimalMapping(XContentBuilder b) throws IOException; /** @@ -508,22 +506,6 @@ public final void testMeta() throws IOException { ); } - public final void testDeprecatedBoostWarning() throws IOException { - try { - createMapperService(DEPRECATED_BOOST_INDEX_VERSION, fieldMapping(b -> { - minimalMapping(b, DEPRECATED_BOOST_INDEX_VERSION); - b.field("boost", 2.0); - })); - String[] warnings = Strings.concatStringArrays( - getParseMinimalWarnings(DEPRECATED_BOOST_INDEX_VERSION), - new String[] { "Parameter [boost] on field [field] is deprecated and has no effect" } - ); - assertWarnings(warnings); - } catch (MapperParsingException e) { - assertThat(e.getMessage(), anyOf(containsString("Unknown parameter [boost]"), containsString("[boost : 2.0]"))); - } - } - public void testBoostNotAllowed() throws IOException { MapperParsingException e = expectThrows( MapperParsingException.class, diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index da7af5129e828..86dd7df205851 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -130,7 +130,7 @@ public Builder( this.geoFormatterFactory = geoFormatterFactory; this.ignoreMalformed = ignoreMalformedParam(m -> builder(m).ignoreMalformed.get(), ignoreMalformedByDefault); this.coerce = coerceParam(m -> builder(m).coerce.get(), coerceByDefault); - this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), IndexVersions.V_7_8_0.onOrBefore(version)); + this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), false); addScriptValidation(script, indexed, hasDocValues); } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index c5b3b63427780..df98325edb836 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -50,7 +50,6 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.LowercaseNormalizer; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -270,11 +269,7 @@ public static final class WildcardFieldType extends MappedFieldType { private WildcardFieldType(String name, String nullValue, int ignoreAbove, IndexVersion version, Map meta) { super(name, true, false, true, Defaults.TEXT_SEARCH_INFO, meta); - if (version.onOrAfter(IndexVersions.V_7_10_0)) { - this.analyzer = WILDCARD_ANALYZER_7_10; - } else { - this.analyzer = WILDCARD_ANALYZER_7_9; - } + this.analyzer = WILDCARD_ANALYZER_7_10; this.nullValue = nullValue; this.ignoreAbove = ignoreAbove; } diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 1cdaf23e8070d..3406aa7ee27c4 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -51,7 +51,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -129,9 +128,6 @@ public void setUp() throws Exception { builder.ignoreAbove(MAX_FIELD_LENGTH); wildcardFieldType = builder.build(MapperBuilderContext.root(false, false)); - Builder builder79 = new WildcardFieldMapper.Builder(WILDCARD_FIELD_NAME, IndexVersions.V_7_9_0); - wildcardFieldType79 = builder79.build(MapperBuilderContext.root(false, false)); - org.elasticsearch.index.mapper.KeywordFieldMapper.Builder kwBuilder = new KeywordFieldMapper.Builder( KEYWORD_FIELD_NAME, IndexVersion.current() @@ -212,37 +208,6 @@ public void testIgnoreAbove() throws IOException { assertTrue(fields.stream().anyMatch(field -> "field".equals(field.stringValue()))); } - public void testBWCIndexVersion() throws IOException { - // Create old format index using wildcard ngram analyzer used in 7.9 launch - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(WildcardFieldMapper.WILDCARD_ANALYZER_7_9); - iwc.setMergePolicy(newTieredMergePolicy(random())); - RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); - - Document doc = new Document(); - LuceneDocument parseDoc = new LuceneDocument(); - addFields(parseDoc, doc, "a b"); - indexDoc(parseDoc, doc, iw); - - iw.forceMerge(1); - DirectoryReader reader = iw.getReader(); - IndexSearcher searcher = newSearcher(reader); - iw.close(); - - // Unnatural circumstance - testing we fail if we were to use the new analyzer on old index - Query oldWildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery("a b", null, null); - TopDocs oldWildcardFieldTopDocs = searcher.search(oldWildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(oldWildcardFieldTopDocs.totalHits.value, equalTo(0L)); - - // Natural circumstance test we revert to the old analyzer for old indices - Query wildcardFieldQuery = wildcardFieldType79.fieldType().wildcardQuery("a b", null, null); - TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(1L)); - - reader.close(); - dir.close(); - } - // Test long query strings don't cause exceptions public void testTooBigQueryField() throws IOException { Directory dir = newDirectory(); From 70ee5487633363f4aadbcd32e8f26bfd4351c5ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 9 Sep 2024 13:50:23 +0200 Subject: [PATCH 261/417] Remove remainign V_7x IndexVersions --- .../MultiVersionRepositoryAccessIT.java | 5 +---- .../gateway/PersistedClusterStateService.java | 2 +- .../elasticsearch/index/IndexSortConfig.java | 18 +----------------- .../org/elasticsearch/index/IndexVersions.java | 8 +------- .../indices/recovery/RecoverySettings.java | 7 ++++--- .../repositories/RepositoryData.java | 2 +- .../snapshots/SnapshotsService.java | 5 ++--- .../index/IndexSortSettingsTests.java | 14 -------------- ...earchableSnapshotIndexMetadataUpgrader.java | 10 ++-------- ...ableSnapshotIndexMetadataUpgraderTests.java | 4 ++-- .../SnapshotsRecoveryPlannerServiceTests.java | 3 +-- 11 files changed, 16 insertions(+), 62 deletions(-) diff --git a/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java b/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java index 9d330cd7e35eb..2148ea66cae5e 100644 --- a/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java +++ b/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; @@ -181,9 +180,7 @@ public void testUpgradeMovesRepoToNewMetaVersion() throws IOException { // 7.12.0+ will try to load RepositoryData during repo creation if verify is true, which is impossible in case of version // incompatibility in the downgrade test step. We verify that it is impossible here and then create the repo using verify=false // to check behavior on other operations below. - final boolean verify = TEST_STEP != TestStep.STEP3_OLD_CLUSTER - || SnapshotsService.includesUUIDs(minNodeVersion) - || minNodeVersion.before(IndexVersions.V_7_12_0); + final boolean verify = TEST_STEP != TestStep.STEP3_OLD_CLUSTER || SnapshotsService.includesUUIDs(minNodeVersion); if (verify == false) { expectThrowsAnyOf(EXPECTED_BWC_EXCEPTIONS, () -> createRepository(repoName, false, true)); } diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 17d01bc9332ef..84f8aad1187ac 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -712,7 +712,7 @@ private static void consumeFromType( if (document.getField(PAGE_FIELD_NAME) == null) { // legacy format: not paginated or compressed - assert IndexVersions.MINIMUM_COMPATIBLE.before(IndexVersions.V_7_16_0); + assert IndexVersions.MINIMUM_COMPATIBLE.before(IndexVersion.fromId(7_16_00_99)); bytesReferenceConsumer.accept(documentData); continue; } diff --git a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java index a11a51ef7ad62..05744a0eaf185 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; import org.elasticsearch.cluster.metadata.DataStream; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -237,22 +236,7 @@ public Sort buildIndexSort( throw new IllegalArgumentException(err); } if (Objects.equals(ft.name(), sortSpec.field) == false) { - if (this.indexCreatedVersion.onOrAfter(IndexVersions.V_7_13_0)) { - throw new IllegalArgumentException("Cannot use alias [" + sortSpec.field + "] as an index sort field"); - } else { - DEPRECATION_LOGGER.warn( - DeprecationCategory.MAPPINGS, - "index-sort-aliases", - "Index sort for index [" - + indexName - + "] defined on field [" - + sortSpec.field - + "] which resolves to field [" - + ft.name() - + "]. " - + "You will not be able to define an index sort over aliased fields in new indexes" - ); - } + throw new IllegalArgumentException("Cannot use alias [" + sortSpec.field + "] as an index sort field"); } boolean reverse = sortSpec.order == null ? false : (sortSpec.order == SortOrder.DESC); MultiValueMode mode = sortSpec.mode; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 4520dc2a042ac..27cb16798f7c2 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -59,12 +59,6 @@ private static Version parseUnchecked(String version) { @UpdateForV9 // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_12_0 = def(7_12_00_99, parseUnchecked("8.8.0")); - public static final IndexVersion V_7_13_0 = def(7_13_00_99, parseUnchecked("8.8.2")); - public static final IndexVersion V_7_14_0 = def(7_14_00_99, parseUnchecked("8.9.0")); - public static final IndexVersion V_7_15_0 = def(7_15_00_99, parseUnchecked("8.9.0")); - public static final IndexVersion V_7_16_0 = def(7_16_00_99, parseUnchecked("8.10.1")); - public static final IndexVersion V_7_17_0 = def(7_17_00_99, parseUnchecked("8.11.1")); public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); @@ -165,7 +159,7 @@ private static Version parseUnchecked(String version) { * In branches 8.7-8.11 see server/src/main/java/org/elasticsearch/index/IndexVersion.java for the equivalent definitions. */ - public static final IndexVersion MINIMUM_COMPATIBLE = V_7_12_0; + public static final IndexVersion MINIMUM_COMPATIBLE = V_8_0_0; static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 2e10a5de2d4e1..f7bf115434cbb 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -28,7 +28,6 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.monitor.os.OsProbe; import org.elasticsearch.node.NodeRoleSettings; @@ -46,9 +45,11 @@ import static org.elasticsearch.node.NodeRoleSettings.NODE_ROLES_SETTING; public class RecoverySettings { - public static final IndexVersion SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION = IndexVersions.V_7_15_0; + // TODO lucene 10 upgrade can the following constant be removed? + public static final IndexVersion SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION = IndexVersion.fromId(7_15_00_99); public static final TransportVersion SNAPSHOT_RECOVERIES_SUPPORTED_TRANSPORT_VERSION = TransportVersions.V_7_15_0; - public static final IndexVersion SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION = IndexVersions.V_7_16_0; + // TODO lucene 10 upgrade can the following constant be removed? + public static final IndexVersion SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION = IndexVersion.fromId(7_16_00_99); public static final TransportVersion SNAPSHOT_FILE_DOWNLOAD_THROTTLING_SUPPORTED_TRANSPORT_VERSION = TransportVersions.V_7_16_0; private static final Logger logger = LogManager.getLogger(RecoverySettings.class); diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index b31b78c1cd1ee..a824d94d8cc35 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -871,7 +871,7 @@ public static RepositoryData snapshotsFromXContent(XContentParser parser, long g XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_STRING, token, parser); final var versionString = parser.text(); final var version = switch (versionString) { - case "7.12.0" -> IndexVersions.V_7_12_0; + case "7.12.0" -> IndexVersion.fromId(7_12_00_99); case "7.9.0" -> IndexVersion.fromId(7_09_00_99); case "7.6.0" -> IndexVersion.fromId(7_06_00_99); default -> diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index e9e5591371710..4dd86c66b5427 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -141,12 +141,11 @@ public final class SnapshotsService extends AbstractLifecycleComponent implement // TODO lucene 10 upgrade: check if we need any of the pre-8 versions constants here for bwc // e.g. reading from old snapshots public static final IndexVersion INDEX_GEN_IN_REPO_DATA_VERSION = IndexVersion.fromId(7_09_00_99); + public static final IndexVersion UUIDS_IN_REPO_DATA_VERSION = IndexVersion.fromId(7_12_00_99); + public static final IndexVersion FILE_INFO_WRITER_UUIDS_IN_SHARD_DATA_VERSION = IndexVersion.fromId(7_16_00_99); - public static final IndexVersion UUIDS_IN_REPO_DATA_VERSION = IndexVersions.V_7_12_0; public static final TransportVersion UUIDS_IN_REPO_DATA_TRANSPORT_VERSION = TransportVersions.V_7_12_0; - public static final IndexVersion FILE_INFO_WRITER_UUIDS_IN_SHARD_DATA_VERSION = IndexVersions.V_7_16_0; - // TODO lucene 10 upgrade, check if IndexVersion removal affects reading supported spanshots // public static final IndexVersion OLD_SNAPSHOT_FORMAT = IndexVersions.V_7_5_0; diff --git a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java index 379adc9ce517a..b2db13c1481ec 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java @@ -159,20 +159,6 @@ public void testSortingAgainstAliases() { assertEquals("Cannot use alias [field] as an index sort field", e.getMessage()); } - public void testSortingAgainstAliasesPre713() { - IndexSettings indexSettings = indexSettings( - Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_7_12_0).put("index.sort.field", "field").build() - ); - MappedFieldType aliased = new KeywordFieldMapper.KeywordFieldType("aliased"); - Sort sort = buildIndexSort(indexSettings, Map.of("field", aliased)); - assertThat(sort.getSort(), arrayWithSize(1)); - assertThat(sort.getSort()[0].getField(), equalTo("aliased")); - assertWarnings( - "Index sort for index [test] defined on field [field] which resolves to field [aliased]. " - + "You will not be able to define an index sort over aliased fields in new indexes" - ); - } - public void testTimeSeriesMode() { IndexSettings indexSettings = indexSettings( Settings.builder() diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java index ccdad61adee52..ed384ed013df3 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java @@ -97,10 +97,7 @@ private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String static boolean needsUpgrade(ClusterState state) { return state.metadata() .stream() - .filter( - imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersions.V_7_12_0) - && imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0) - ) + .filter(imd -> imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0)) .filter(IndexMetadata::isPartialSearchableSnapshot) .map(IndexMetadata::getSettings) .anyMatch(SearchableSnapshotIndexMetadataUpgrader::notFrozenShardLimitGroup); @@ -113,10 +110,7 @@ static ClusterState upgradeIndices(ClusterState currentState) { Metadata.Builder builder = Metadata.builder(currentState.metadata()); currentState.metadata() .stream() - .filter( - imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersions.V_7_12_0) - && imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0) - ) + .filter(imd -> imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0)) .filter(imd -> imd.isPartialSearchableSnapshot() && notFrozenShardLimitGroup(imd.getSettings())) .map(SearchableSnapshotIndexMetadataUpgrader::setShardLimitGroupFrozen) .forEach(imd -> builder.put(imd, true)); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java index 594d356becf87..051a8814ed4c2 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java @@ -119,7 +119,7 @@ private Settings partialNeedsUpgrade() { return searchableSnapshotSettings( IndexVersionUtils.randomVersionBetween( random(), - IndexVersions.V_7_12_0, + IndexVersion.fromId(7_12_00_99), IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ), true @@ -132,7 +132,7 @@ private Settings partialNeedsUpgrade() { private Settings partial_7_13plus() { return shardLimitGroupFrozen( searchableSnapshotSettings( - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_13_0, IndexVersion.current()), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), true ) ); diff --git a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java index d3f443e05a74f..5840791696497 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; import org.elasticsearch.index.store.Store; @@ -400,7 +399,7 @@ public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener Date: Mon, 9 Sep 2024 16:45:48 +0200 Subject: [PATCH 262/417] Uncomment some tests for better mergability with main --- .../CorruptedBlobStoreRepositoryIT.java | 146 +++++++++--------- .../snapshots/SnapshotsService.java | 2 + 2 files changed, 76 insertions(+), 72 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java index a3004e9972063..1542aa652ad3e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.ShardGeneration; import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.repositories.fs.FsRepository; @@ -46,6 +47,7 @@ import java.util.stream.Stream; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -383,81 +385,81 @@ public void testMountCorruptedRepositoryData() throws Exception { // TODO lucene 10 upgrade, deleting old IndexVersions and with this SnapshotsService.OLD_SNAPSHOT_FORMAT // can the following test be deleted? - // public void testHandleSnapshotErrorWithBwCFormat() throws Exception { - // final String repoName = "test-repo"; - // final Path repoPath = randomRepoPath(); - // createRepository(repoName, "fs", repoPath); - // final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); - // - // final String indexName = "test-index"; - // createIndex(indexName); - // - // createFullSnapshot(repoName, "snapshot-1"); - // - // // In the old metadata version the shard level metadata could be moved to the next generation for all sorts of reasons, this should - // // not break subsequent repository operations - // logger.info("--> move shard level metadata to new generation"); - // final IndexId indexId = getRepositoryData(repoName).resolveIndexId(indexName); - // final Path shardPath = repoPath.resolve("indices").resolve(indexId.getId()).resolve("0"); - // final Path initialShardMetaPath = shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "0"); - // assertFileExists(initialShardMetaPath); - // Files.move(initialShardMetaPath, shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "1")); - // - // startDeleteSnapshot(repoName, oldVersionSnapshot).get(); - // - // createFullSnapshot(repoName, "snapshot-2"); - // } + public void testHandleSnapshotErrorWithBwCFormat() throws Exception { + final String repoName = "test-repo"; + final Path repoPath = randomRepoPath(); + createRepository(repoName, "fs", repoPath); + final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); + + final String indexName = "test-index"; + createIndex(indexName); + + createFullSnapshot(repoName, "snapshot-1"); + + // In the old metadata version the shard level metadata could be moved to the next generation for all sorts of reasons, this should + // not break subsequent repository operations + logger.info("--> move shard level metadata to new generation"); + final IndexId indexId = getRepositoryData(repoName).resolveIndexId(indexName); + final Path shardPath = repoPath.resolve("indices").resolve(indexId.getId()).resolve("0"); + final Path initialShardMetaPath = shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "0"); + assertFileExists(initialShardMetaPath); + Files.move(initialShardMetaPath, shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "1")); + + startDeleteSnapshot(repoName, oldVersionSnapshot).get(); + + createFullSnapshot(repoName, "snapshot-2"); + } // TODO lucene 10 upgrade, deleting old IndexVersions and with this SnapshotsService.OLD_SNAPSHOT_FORMAT // can the following test be deleted? - // public void testRepairBrokenShardGenerations() throws Exception { - // final String repoName = "test-repo"; - // final Path repoPath = randomRepoPath(); - // createRepository(repoName, "fs", repoPath); - // final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); - // - // final String indexName = "test-index"; - // createIndex(indexName); - // - // createFullSnapshot(repoName, "snapshot-1"); - // - // startDeleteSnapshot(repoName, oldVersionSnapshot).get(); - // - // logger.info("--> move shard level metadata to new generation and make RepositoryData point at an older generation"); - // final IndexId indexId = getRepositoryData(repoName).resolveIndexId(indexName); - // final Path shardPath = repoPath.resolve("indices").resolve(indexId.getId()).resolve("0"); - // final Path initialShardMetaPath = shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "0"); - // assertFileExists(initialShardMetaPath); - // Files.move(initialShardMetaPath, shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + randomIntBetween(1, 1000))); - // - // final RepositoryData repositoryData = getRepositoryData(repoName); - // final Map snapshotIds = repositoryData.getSnapshotIds() - // .stream() - // .collect(Collectors.toMap(SnapshotId::getUUID, Function.identity())); - // final RepositoryData brokenRepoData = new RepositoryData( - // repositoryData.getUuid(), - // repositoryData.getGenId(), - // snapshotIds, - // snapshotIds.values().stream().collect(Collectors.toMap(SnapshotId::getUUID, repositoryData::getSnapshotDetails)), - // repositoryData.getIndices().values().stream().collect(Collectors.toMap(Function.identity(), repositoryData::getSnapshots)), - // ShardGenerations.builder().putAll(repositoryData.shardGenerations()).put(indexId, 0, new ShardGeneration(0L)).build(), - // repositoryData.indexMetaDataGenerations(), - // repositoryData.getClusterUUID() - // ); - // Files.write( - // repoPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + repositoryData.getGenId()), - // BytesReference.toBytes( - // BytesReference.bytes(brokenRepoData.snapshotsToXContent(XContentFactory.jsonBuilder(), IndexVersion.current())) - // ), - // StandardOpenOption.TRUNCATE_EXISTING - // ); - // - // logger.info("--> recreating repository to clear caches"); - // clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName).get(); - // createRepository(repoName, "fs", repoPath); - // - // createFullSnapshot(repoName, "snapshot-2"); - // } + public void testRepairBrokenShardGenerations() throws Exception { + final String repoName = "test-repo"; + final Path repoPath = randomRepoPath(); + createRepository(repoName, "fs", repoPath); + final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); + + final String indexName = "test-index"; + createIndex(indexName); + + createFullSnapshot(repoName, "snapshot-1"); + + startDeleteSnapshot(repoName, oldVersionSnapshot).get(); + + logger.info("--> move shard level metadata to new generation and make RepositoryData point at an older generation"); + final IndexId indexId = getRepositoryData(repoName).resolveIndexId(indexName); + final Path shardPath = repoPath.resolve("indices").resolve(indexId.getId()).resolve("0"); + final Path initialShardMetaPath = shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + "0"); + assertFileExists(initialShardMetaPath); + Files.move(initialShardMetaPath, shardPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + randomIntBetween(1, 1000))); + + final RepositoryData repositoryData = getRepositoryData(repoName); + final Map snapshotIds = repositoryData.getSnapshotIds() + .stream() + .collect(Collectors.toMap(SnapshotId::getUUID, Function.identity())); + final RepositoryData brokenRepoData = new RepositoryData( + repositoryData.getUuid(), + repositoryData.getGenId(), + snapshotIds, + snapshotIds.values().stream().collect(Collectors.toMap(SnapshotId::getUUID, repositoryData::getSnapshotDetails)), + repositoryData.getIndices().values().stream().collect(Collectors.toMap(Function.identity(), repositoryData::getSnapshots)), + ShardGenerations.builder().putAll(repositoryData.shardGenerations()).put(indexId, 0, new ShardGeneration(0L)).build(), + repositoryData.indexMetaDataGenerations(), + repositoryData.getClusterUUID() + ); + Files.write( + repoPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + repositoryData.getGenId()), + BytesReference.toBytes( + BytesReference.bytes(brokenRepoData.snapshotsToXContent(XContentFactory.jsonBuilder(), IndexVersion.current())) + ), + StandardOpenOption.TRUNCATE_EXISTING + ); + + logger.info("--> recreating repository to clear caches"); + clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName).get(); + createRepository(repoName, "fs", repoPath); + + createFullSnapshot(repoName, "snapshot-2"); + } /** * Tests that a shard snapshot with a corrupted shard index file can still be used for restore and incremental snapshots. diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 4dd86c66b5427..10c8bffce3cde 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -140,6 +140,8 @@ public final class SnapshotsService extends AbstractLifecycleComponent implement // TODO lucene 10 upgrade: check if we need any of the pre-8 versions constants here for bwc // e.g. reading from old snapshots + + public static final IndexVersion OLD_SNAPSHOT_FORMAT = IndexVersion.fromId(7_05_00_99); public static final IndexVersion INDEX_GEN_IN_REPO_DATA_VERSION = IndexVersion.fromId(7_09_00_99); public static final IndexVersion UUIDS_IN_REPO_DATA_VERSION = IndexVersion.fromId(7_12_00_99); public static final IndexVersion FILE_INFO_WRITER_UUIDS_IN_SHARD_DATA_VERSION = IndexVersion.fromId(7_16_00_99); From d0a1110e719ceaf415b8d5f34e45a4d309dcfa62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 9 Sep 2024 21:44:17 +0200 Subject: [PATCH 263/417] Fix errors in imports --- .../java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java | 1 - .../org/elasticsearch/indices/recovery/RecoverySettings.java | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 61347635ed564..b2f3237e85938 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -40,7 +40,6 @@ import org.elasticsearch.repositories.RepositoryConflictException; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.mockstore.MockRepository; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 2dac6323aa83f..c4bf8b545ff24 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -29,7 +29,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.SourceFieldMapper; -import 222org.elasticsearch.monitor.os.OsProbe; +import org.elasticsearch.monitor.os.OsProbe; import org.elasticsearch.node.NodeRoleSettings; import java.util.Collection; From afec00bfd72b2c0d919432fde536047def250b76 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 10 Sep 2024 06:11:35 +0000 Subject: [PATCH 264/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-64f5697f537 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 413fd79b178cb..1832e0091e0c1 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-dc47adbbe73 +lucene = 10.0.0-snapshot-64f5697f537 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 586aeabb0d6f0..ab84d742b7c6e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@
    - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 304a1c6a1c2bd00073d9c46caa57e9ed96c0762e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 10 Sep 2024 06:11:35 +0000 Subject: [PATCH 265/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-ce23e15eb54 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index d6d849ee12612..5dd08b7fed2e1 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-371fa57d9c7 +lucene = 9.12.0-snapshot-ce23e15eb54 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index c186d0a357e38..18f35ebe3bce9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 8fe6c3a5f66f529f1c2b084a1f3e59f014966a3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 10 Sep 2024 14:04:18 +0200 Subject: [PATCH 266/417] Remove usages of random versions before V_8_0_0 in tests --- .../legacygeo/GeoJsonShapeParserTests.java | 3 +- .../legacygeo/GeoWKTShapeParserTests.java | 29 -------------- .../LegacyGeoShapeFieldMapperTests.java | 5 ++- .../mapper/LegacyGeoShapeFieldTypeTests.java | 3 +- .../MetadataCreateIndexServiceTests.java | 27 ------------- .../index/engine/InternalEngineTests.java | 7 ++-- .../mapper/FieldNamesFieldMapperTests.java | 38 ------------------- .../index/mapper/ParametrizedMapperTests.java | 7 +--- .../index/mapper/TypeParsersTests.java | 26 ------------- .../action/AutoFollowCoordinatorTests.java | 3 +- ...bleSnapshotIndexMetadataUpgraderTests.java | 9 +---- ...GeoShapeWithDocValuesFieldMapperTests.java | 3 +- ...LegacyGeoShapeWithDocValuesQueryTests.java | 4 +- 13 files changed, 15 insertions(+), 149 deletions(-) diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java index 4fa1d7b7a3108..d1f4a0597044c 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.geometry.MultiLine; import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper; import org.elasticsearch.legacygeo.parsers.ShapeParser; @@ -387,7 +386,7 @@ public void testParse3DPolygon() throws IOException, ParseException { LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); - final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); + final IndexVersion version = IndexVersionUtils.randomCompatibleVersion(random()); final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( MapperBuilderContext.root(false, false) ); diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java index 6e8a61277cccf..00e911deeb00b 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.geometry.MultiLine; import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.legacygeo.builders.CoordinatesBuilder; import org.elasticsearch.legacygeo.builders.EnvelopeBuilder; @@ -34,7 +33,6 @@ import org.elasticsearch.legacygeo.parsers.GeoWKTParser; import org.elasticsearch.legacygeo.parsers.ShapeParser; import org.elasticsearch.legacygeo.test.RandomShapeGenerator; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -323,15 +321,6 @@ public void testParseMixedDimensionPolyWithHoleStoredZ() throws IOException { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().value(builder.toWKT()); XContentParser parser = createParser(xContentBuilder); parser.nextToken(); - - final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); - final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( - MapperBuilderContext.root(false, false) - ); - - // test store z disabled - ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> ShapeParser.parse(parser, mapperBuilder)); - assertThat(e, hasToString(containsString("unable to add coordinate to CoordinateBuilder: coordinate dimensions do not match"))); } public void testParsePolyWithStoredZ() throws IOException { @@ -347,14 +336,6 @@ public void testParsePolyWithStoredZ() throws IOException { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().value(builder.toWKT()); XContentParser parser = createParser(xContentBuilder); parser.nextToken(); - - final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); - final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( - MapperBuilderContext.root(false, false) - ); - - ShapeBuilder shapeBuilder = ShapeParser.parse(parser, mapperBuilder); - assertEquals(shapeBuilder.numDimensions(), 3); } public void testParseOpenPolygon() throws IOException { @@ -364,16 +345,6 @@ public void testParseOpenPolygon() throws IOException { XContentParser parser = createParser(xContentBuilder); parser.nextToken(); - final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); - final LegacyGeoShapeFieldMapper defaultMapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).coerce( - false - ).build(MapperBuilderContext.root(false, false)); - ElasticsearchParseException exception = expectThrows( - ElasticsearchParseException.class, - () -> ShapeParser.parse(parser, defaultMapperBuilder) - ); - assertEquals("invalid LinearRing found (coordinates are not closed)", exception.getMessage()); - final LegacyGeoShapeFieldMapper coercingMapperBuilder = new LegacyGeoShapeFieldMapper.Builder( "test", IndexVersion.current(), diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java index 0a0bb12bedbae..44b73a2b7fca3 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -32,7 +33,6 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.legacygeo.test.TestLegacyGeoShapeFieldMapperPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.AssumptionViolatedException; @@ -52,6 +52,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +@UpdateForV9 // can we remove this entire test? @SuppressWarnings("deprecation") public class LegacyGeoShapeFieldMapperTests extends MapperTestCase { @@ -122,7 +123,7 @@ protected boolean supportsMeta() { @Override protected IndexVersion getVersion() { - return IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); + return IndexVersions.V_8_0_0; } public void testLegacySwitches() throws IOException { diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java index dc74b9cd295ce..cfe28d065194d 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper.GeoShapeFieldType; -import org.elasticsearch.test.index.IndexVersionUtils; import java.io.IOException; import java.util.List; @@ -36,7 +35,7 @@ public void testSetStrategyName() { } public void testFetchSourceValue() throws IOException { - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); + IndexVersion version = IndexVersions.V_8_0_0; MappedFieldType mapper = new LegacyGeoShapeFieldMapper.Builder("field", version, false, true).build( MapperBuilderContext.root(false, false) ).fieldType(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 17d6c1999ba17..9aae549ddfe2e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -1328,33 +1328,6 @@ public void testRejectTranslogRetentionSettings() { ); } - public void testDeprecateTranslogRetentionSettings() { - request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); - final Settings.Builder settings = Settings.builder(); - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), TimeValue.timeValueMillis(between(1, 120))); - } else { - settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), between(1, 128) + "mb"); - } - settings.put(SETTING_VERSION_CREATED, IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0)); - request.settings(settings.build()); - aggregateIndexSettings( - ClusterState.EMPTY_STATE, - request, - Settings.EMPTY, - null, - null, - Settings.EMPTY, - IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - randomShardLimitService(), - Collections.emptySet() - ); - assertWarnings( - "Translog retention settings [index.translog.retention.age] " - + "and [index.translog.retention.size] are deprecated and effectively ignored. They will be removed in a future version." - ); - } - public void testDeprecateSimpleFS() { request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); final Settings.Builder settings = Settings.builder(); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index fc11634c88fe6..556ca6411ce83 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -95,6 +95,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -6616,6 +6617,7 @@ public void testRebuildLocalCheckpointTrackerAndVersionMap() throws Exception { } } + @UpdateForV9 // can this test be removed, it testing 7x versions behaviour public void testRecoverFromHardDeletesIndex() throws Exception { IndexWriterFactory hardDeletesWriter = (directory, iwc) -> new IndexWriter(directory, iwc) { boolean isTombstone(Iterable doc) { @@ -6664,10 +6666,7 @@ public long softUpdateDocuments(Term term, Iterable b.startObject("_field_names").field("enabled", false).endObject()) - ); - - assertWarnings(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE); - FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); - assertFalse(fieldNamesMapper.fieldType().isEnabled()); - - ParsedDocument doc = docMapper.parse(source(b -> b.field("field", "value"))); - assertNull(doc.rootDoc().get("_field_names")); - } - - /** - * Merging the "_field_names" enabled setting is forbidden in 8.0, but we still want to tests the behavior on pre-8 indices - */ - public void testMergingMappingsBefore8() throws Exception { - MapperService mapperService = createMapperService( - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), - mapping(b -> {}) - ); - - merge(mapperService, topMapping(b -> b.startObject("_field_names").field("enabled", false).endObject())); - assertFalse(mapperService.documentMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); - assertWarnings(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE); - - merge(mapperService, topMapping(b -> b.startObject("_field_names").field("enabled", true).endObject())); - assertTrue(mapperService.documentMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); - assertWarnings(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE); - } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index d79d0a8d3cf8b..f1569aaffe2c3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.test.TransportVersionUtils; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -555,7 +554,7 @@ public void testBWCunknownParametersfromDynamicTemplates() { {"type":"test_mapper","some_unknown_parameter":true,"required":"value"}"""; TestMapper mapper = fromMapping( mapping, - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), + IndexVersion.fromId(7_10_00_99), TransportVersionUtils.randomVersionBetween( random(), TransportVersions.V_7_0_0, @@ -564,10 +563,6 @@ public void testBWCunknownParametersfromDynamicTemplates() { true ); assertNotNull(mapper); - assertWarnings( - "Parameter [some_unknown_parameter] is used in a dynamic template mapping and has no effect on type [test_mapper]. " - + "Usage will result in an error in future major versions and should be removed." - ); assertEquals(""" {"field":{"type":"test_mapper","required":"value"}}""", Strings.toString(mapper)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java index 035466d93ab06..3d25e6929939b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java @@ -86,32 +86,6 @@ public void testMultiFieldWithinMultiField() throws IOException { IndexSettings indexSettings = new IndexSettings(metadata, Settings.EMPTY); when(mapperService.getIndexSettings()).thenReturn(indexSettings); - IndexVersion olderVersion = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); - MappingParserContext olderContext = new MappingParserContext( - null, - type -> typeParser, - type -> null, - olderVersion, - () -> TransportVersions.MINIMUM_COMPATIBLE, - null, - ScriptCompiler.NONE, - mapperService.getIndexAnalyzers(), - mapperService.getIndexSettings(), - ProvidedIdFieldMapper.NO_FIELD_DATA, - query -> { - throw new UnsupportedOperationException(); - } - ); - - TextFieldMapper.PARSER.parse("some-field", fieldNode, olderContext); - assertWarnings( - "At least one multi-field, [sub-field], " - + "was encountered that itself contains a multi-field. Defining multi-fields within a multi-field is deprecated " - + "and is not supported for indices created in 8.0 and later. To migrate the mappings, all instances of [fields] " - + "that occur within a [fields] block should be removed from the mappings, either by flattening the chained " - + "[fields] blocks into a single level, or switching to [copy_to] if appropriate." - ); - // For indices created in 8.0 or later, we should throw an error. Map fieldNodeCopy = XContentHelper.convertToMap(BytesReference.bytes(mapping), true, mapping.contentType()).v2(); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 467ef3c68f648..b9fd8aeaab869 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -2520,7 +2519,7 @@ private static ClusterState createRemoteClusterState( ) { Settings.Builder indexSettings; if (enableSoftDeletes == false) { - indexSettings = settings(IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0)).put( + indexSettings = settings(IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.current())).put( IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false ); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java index 051a8814ed4c2..7ed197cccaa8e 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java @@ -116,14 +116,7 @@ private Settings normal() { * other than 7.12 versions here, but not 8.0 (since a rolling upgrade to 8.0 requires an upgrade to 7.latest first). */ private Settings partialNeedsUpgrade() { - return searchableSnapshotSettings( - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersion.fromId(7_12_00_99), - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ), - true - ); + return searchableSnapshotSettings(randomFrom(IndexVersion.fromId(7_12_00_99), IndexVersion.fromId(7_17_00_99)), true); } /** diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 58fde288cfc60..4e2f48b7d0706 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; import org.junit.AssumptionViolatedException; @@ -280,7 +279,7 @@ public void testInvalidCurrentVersion() { } public void testGeoShapeLegacyMerge() throws Exception { - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); + IndexVersion version = IndexVersions.V_8_0_0; MapperService m = createMapperService(version, fieldMapping(b -> b.field("type", getFieldName()))); Exception e = expectThrows( IllegalArgumentException.class, diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java index 053931a882e4c..aecf88c8a9dc4 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.spatial.index.query; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.settings.Settings; @@ -39,6 +40,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; +@LuceneTestCase.AwaitsFix(bugUrl = "TODO lucene 10 upgrade") public class LegacyGeoShapeWithDocValuesQueryTests extends GeoShapeQueryTestCase { @SuppressWarnings("deprecation") @@ -72,7 +74,7 @@ protected void createMapping(String indexName, String fieldName, Settings settin ex.getMessage(), containsString("using deprecated parameters [tree] in mapper [" + fieldName + "] of type [geo_shape] is no longer allowed") ); - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); + IndexVersion version = IndexVersions.V_8_0_0; finalSetting = settings(version).put(settings).build(); indicesAdmin().prepareCreate(indexName).setMapping(xcb).setSettings(finalSetting).get(); } From 8a018849cb6e2b6d265c7afd74d3390879a483bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 10 Sep 2024 15:46:31 +0200 Subject: [PATCH 267/417] More test fixed related to legacy versions --- .../common/CommonAnalysisPluginTests.java | 39 ----------------- .../vectors/SparseVectorFieldMapperTests.java | 42 ------------------- .../IndexDeprecationChecksTests.java | 2 +- .../predicate/regex/RLikePattern.java | 3 +- 4 files changed, 3 insertions(+), 83 deletions(-) diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java index 3a3ba6ae276de..31b7cff51ba47 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java @@ -52,25 +52,6 @@ public void testNGramFilterInCustomAnalyzerDeprecationError() throws IOException ex.getMessage() ); } - - final Settings settingsPre7 = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put( - IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersions.V_8_10_0) - ) - .put("index.analysis.analyzer.custom_analyzer.type", "custom") - .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") - .putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram") - .put("index.analysis.filter.my_ngram.type", "nGram") - .build(); - try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { - createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settingsPre7), settingsPre7, commonAnalysisPlugin); - assertWarnings( - "The [nGram] token filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [ngram] instead." - ); - } } /** @@ -101,26 +82,6 @@ public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOExcep ex.getMessage() ); } - - final Settings settingsPre7 = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put( - IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersions.V_8_10_0) - ) - .put("index.analysis.analyzer.custom_analyzer.type", "custom") - .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") - .putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram") - .put("index.analysis.filter.my_ngram.type", "edgeNGram") - .build(); - - try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { - createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settingsPre7), settingsPre7, commonAnalysisPlugin); - assertWarnings( - "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [edge_ngram] instead." - ); - } } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java index 9cfbbad5ebf50..2bfd8740a971c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.document.FeatureField; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentMapper; @@ -22,11 +21,8 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentType; import org.hamcrest.Matchers; import org.junit.AssumptionViolatedException; @@ -226,44 +222,6 @@ protected IndexVersion boostNotAllowedIndexVersion() { return NEW_SPARSE_VECTOR_INDEX_VERSION; } - public void testSparseVectorWith7xIndex() throws Exception { - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), PREVIOUS_SPARSE_VECTOR_INDEX_VERSION); - - XContentBuilder builder = XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("my-vector") - .field("type", "sparse_vector") - .endObject() - .endObject() - .endObject() - .endObject(); - - DocumentMapper mapper = createDocumentMapper(version, builder); - assertWarnings(SparseVectorFieldMapper.ERROR_MESSAGE_7X); - - // Check that new vectors cannot be indexed. - int[] indexedDims = { 65535, 50, 2 }; - float[] indexedValues = { 0.5f, 1800f, -34567.11f }; - BytesReference source = BytesReference.bytes( - XContentFactory.jsonBuilder() - .startObject() - .startObject("my-vector") - .field(Integer.toString(indexedDims[0]), indexedValues[0]) - .field(Integer.toString(indexedDims[1]), indexedValues[1]) - .field(Integer.toString(indexedDims[2]), indexedValues[2]) - .endObject() - .endObject() - ); - - DocumentParsingException indexException = expectThrows( - DocumentParsingException.class, - () -> mapper.parse(new SourceToParse("id", source, XContentType.JSON)) - ); - assertThat(indexException.getCause().getMessage(), containsString(SparseVectorFieldMapper.ERROR_MESSAGE_7X)); - } - public void testSparseVectorUnsupportedIndex() throws Exception { IndexVersion version = IndexVersionUtils.randomVersionBetween( random(), diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index bf75ff76c66bd..dc2cc5662acab 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -33,7 +33,7 @@ public void testOldIndicesCheck() { .build(); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 7.0", + "Old index with a compatibility version < 8.0", "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + "breaking-changes-8.0.html", "This index has version: " + createdWith.toReleaseVersion(), false, diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java index 528872ca9b4cf..1e29a4a229d6c 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ql.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,7 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize(new RegExp(regexpPattern).toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override From 09b68e2689108016bd9967b0aefe75ee084d664a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 10 Sep 2024 18:37:39 +0200 Subject: [PATCH 268/417] Mute CompositeRolesStoreTests.testXPackUserCanAccessNonRestrictedIndices --- .../xpack/security/authz/store/CompositeRolesStoreTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 693bd9b868ede..e5c9b99c2431c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -2736,6 +2736,7 @@ public void testSecurityProfileUserHasAccessForOnlyProfileIndex() { assertThat(CompositeRolesStore.tryGetRoleDescriptorForInternalUser(subject).get().getClusterPrivileges(), emptyArray()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/112453") public void testXPackUserCanAccessNonRestrictedIndices() { for (String action : Arrays.asList( TransportGetAction.TYPE.name(), From ca97d863cd3d4857a75ea4ed4fbe44b3f28eedb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 10 Sep 2024 19:05:15 +0200 Subject: [PATCH 269/417] Fix failing knn yaml test --- .../resources/rest-api-spec/test/search/370_profile.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index dc79961ae78cd..8a03833cb003a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -212,7 +212,7 @@ dfs knn vector profiling: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } + - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 } @@ -276,7 +276,7 @@ dfs knn vector profiling with vector_operations_count: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } + - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } - match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } From 84d4e86f997aea30f16b9d5a4926e5b24af39dcb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 10 Sep 2024 20:02:52 +0200 Subject: [PATCH 270/417] Fix docs test related to knn queries --- docs/reference/search/profile.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 3fed14231808c..5f1a0ccfdd6b4 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1298,7 +1298,7 @@ One of the `dfs.knn` sections for a shard looks like the following: "query" : [ { "type" : "DocAndScoreQuery", - "description" : "DocAndScore[100]", + "description" : "DocAndScoreQuery[0,...][0.008961825,...],0.008961825", "time_in_nanos" : 444414, "breakdown" : { "set_min_competitive_score_count" : 0, From 77f24aeb461e660d4e71c1a2f01745e3d0d8df4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 10 Sep 2024 20:04:48 +0200 Subject: [PATCH 271/417] Null out missing codec in BWCLucene70Codec This temporarily gets us past missing codec that we attempt to load at node startup, even if we don't use it. --- .../java/org/elasticsearch/indices/SystemIndexDescriptor.java | 2 +- .../src/main/java/org/elasticsearch/xpack/fleet/Fleet.java | 2 +- .../xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index aa724af1c2e74..131f4634027dd 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -881,7 +881,7 @@ static Automaton buildAutomaton(String pattern, String alias) { final String patternAsRegex = patternToRegex(pattern); final String aliasAsRegex = alias == null ? null : patternToRegex(alias); - final Automaton patternAutomaton = new RegExp(patternAsRegex).toAutomaton(); + final Automaton patternAutomaton = Operations.determinize(new RegExp(patternAsRegex).toAutomaton(), DEFAULT_DETERMINIZE_WORK_LIMIT); if (aliasAsRegex == null) { return patternAutomaton; diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java index 40e41cb18bdf8..da5a276eea2ed 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java @@ -144,7 +144,7 @@ private static SystemIndexDescriptor fleetActionsSystemIndexDescriptor() { .setMappings(request.mappings()) .setSettings(request.settings()) .setPrimaryIndex(".fleet-actions-" + CURRENT_INDEX_VERSION) - .setIndexPattern(".fleet-actions~(-results*)") + .setIndexPattern(".fleet-actions-[^r]*") .setAliasName(".fleet-actions") .setDescription("Fleet agents") .build(); diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java index 0e689138acd8f..9b226306edf62 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java @@ -32,7 +32,8 @@ public class BWCLucene70Codec extends BWCCodec { private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat(); private final CompoundFormat compoundFormat = new Lucene50CompoundFormat(); private final StoredFieldsFormat storedFieldsFormat; - private final DocValuesFormat defaultDVFormat = DocValuesFormat.forName("Lucene70"); + // TODO lucene 10 upgrade: resolve below hack that gets us past missing Lucene70 codec for now + private final DocValuesFormat defaultDVFormat = null; private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { From 3cea216ced7f507c68dfc9c488fcc60e9e66d5a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 10 Sep 2024 20:11:00 +0200 Subject: [PATCH 272/417] Partially revert previous commit --- .../java/org/elasticsearch/indices/SystemIndexDescriptor.java | 2 +- .../src/main/java/org/elasticsearch/xpack/fleet/Fleet.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index 131f4634027dd..aa724af1c2e74 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -881,7 +881,7 @@ static Automaton buildAutomaton(String pattern, String alias) { final String patternAsRegex = patternToRegex(pattern); final String aliasAsRegex = alias == null ? null : patternToRegex(alias); - final Automaton patternAutomaton = Operations.determinize(new RegExp(patternAsRegex).toAutomaton(), DEFAULT_DETERMINIZE_WORK_LIMIT); + final Automaton patternAutomaton = new RegExp(patternAsRegex).toAutomaton(); if (aliasAsRegex == null) { return patternAutomaton; diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java index da5a276eea2ed..40e41cb18bdf8 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java @@ -144,7 +144,7 @@ private static SystemIndexDescriptor fleetActionsSystemIndexDescriptor() { .setMappings(request.mappings()) .setSettings(request.settings()) .setPrimaryIndex(".fleet-actions-" + CURRENT_INDEX_VERSION) - .setIndexPattern(".fleet-actions-[^r]*") + .setIndexPattern(".fleet-actions~(-results*)") .setAliasName(".fleet-actions") .setDescription("Fleet agents") .build(); From 8bf41b86ec929fe456a1cf02ec608930c8ae63c3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 11 Sep 2024 06:11:21 +0000 Subject: [PATCH 273/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-7964682ddf5 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 5dd08b7fed2e1..4e0318eaf84df 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-ce23e15eb54 +lucene = 9.12.0-snapshot-7964682ddf5 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 18f35ebe3bce9..0587be8956a94 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From fff3fbb6f708eb7dc9a68fa509caf2588774ea4b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 11 Sep 2024 06:12:27 +0000 Subject: [PATCH 274/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-7c529ce092d --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1832e0091e0c1..2627132fb9c49 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-64f5697f537 +lucene = 10.0.0-snapshot-7c529ce092d bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index ab84d742b7c6e..1e29eceb8478b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0ee5e4f0f72ddcfa330375b9c281f5a861cc07a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 11 Sep 2024 10:24:10 +0200 Subject: [PATCH 275/417] Fix compilation issues after last Lucene 10 snapshot merge --- .../index/fielddata/ordinals/MultiOrdinals.java | 4 ++-- .../index/query/CombinedFieldsQueryBuilder.java | 4 ++-- .../index/search/MultiMatchQueryParser.java | 2 +- .../lucene/queries/SearchAfterSortedDocQuery.java | 2 +- .../bucket/composite/CompositeAggregator.java | 2 +- .../bucket/filter/QueryToFilterAdapter.java | 6 ++++-- .../bucket/global/GlobalAggregator.java | 11 +++++++++-- .../subphase/highlight/FragmentBuilderHelper.java | 6 +++--- .../search/internal/ContextIndexSearcher.java | 13 +++++++------ .../org/elasticsearch/search/query/QueryPhase.java | 2 +- .../suggest/completion/CompletionSuggester.java | 3 ++- .../suggest/phrase/DirectCandidateGenerator.java | 4 ++-- .../search/suggest/phrase/LaplaceScorer.java | 2 +- .../suggest/phrase/LinearInterpolatingScorer.java | 2 +- .../suggest/phrase/NoisyChannelSpellChecker.java | 2 +- .../search/suggest/phrase/StupidBackoffScorer.java | 4 ++-- .../index/engine/InternalEngineTests.java | 2 +- .../elasticsearch/search/SearchServiceTests.java | 12 ++++++------ .../search/internal/ContextIndexSearcherTests.java | 10 ++++++---- .../search/query/QueryPhaseCollectorTests.java | 2 +- .../phrase/DirectCandidateGeneratorTests.java | 8 ++++---- .../compute/lucene/LuceneSliceQueue.java | 4 ++-- .../compute/lucene/PartialLeafReaderContext.java | 5 +++++ .../esql/enrich/EnrichQuerySourceOperator.java | 5 ++++- .../xpack/lucene/bwc/codecs/lucene50/ForUtil.java | 12 ++++++------ 25 files changed, 75 insertions(+), 54 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java index cff98fa6d84a0..825b7cb9af8c8 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java @@ -39,13 +39,13 @@ public static boolean significantlySmallerThanSinglePackedOrdinals( float acceptableOverheadRatio ) { int bitsPerOrd = PackedInts.bitsRequired(numOrds); - bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue; + bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue(); // Compute the worst-case number of bits per value for offsets in the worst case, eg. if no docs have a value at the // beginning of the block and all docs have one at the end of the block final float avgValuesPerDoc = (float) numDocsWithValue / maxDoc; final int maxDelta = (int) Math.ceil(OFFSETS_PAGE_SIZE * (1 - avgValuesPerDoc) * avgValuesPerDoc); int bitsPerOffset = PackedInts.bitsRequired(maxDelta) + 1; // +1 because of the sign - bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue; + bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue(); final long expectedMultiSizeInBytes = (long) numDocsWithValue * bitsPerOrd + (long) maxDoc * bitsPerOffset; final long expectedSingleSizeInBytes = (long) maxDoc * bitsPerOrd; diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index b869096c12fb2..66151dcd91cd2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -411,8 +411,8 @@ public Query createPhraseQuery(String field, String queryText, int phraseSlop) { protected Query newSynonymQuery(String field, TermAndBoost[] terms) { CombinedFieldQuery.Builder query = new CombinedFieldQuery.Builder(); for (TermAndBoost termAndBoost : terms) { - assert termAndBoost.boost == BoostAttribute.DEFAULT_BOOST; - BytesRef bytes = termAndBoost.term; + assert termAndBoost.boost() == BoostAttribute.DEFAULT_BOOST; + BytesRef bytes = termAndBoost.term(); query.addTerm(bytes); } for (FieldAndBoost fieldAndBoost : fields) { diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java index 4c3020bdcd9d5..23e182f190d1a 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java @@ -199,7 +199,7 @@ protected Query createBooleanPrefixQuery(String field, String queryText, Boolean protected Query newSynonymQuery(String field, TermAndBoost[] terms) { BytesRef[] values = new BytesRef[terms.length]; for (int i = 0; i < terms.length; i++) { - values[i] = terms[i].term; + values[i] = terms[i].term(); } return blendTerms(context, values, tieBreaker, lenient, blendedFields); } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java index 5de0d7fafdf27..8be30c60aee51 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -68,7 +68,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, 1.0f) { @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { - Sort segmentSort = context.reader().getMetaData().getSort(); + Sort segmentSort = context.reader().getMetaData().sort(); if (segmentSort == null || Lucene.canEarlyTerminate(sort, segmentSort) == false) { throw new IOException("search sort :[" + sort + "] does not match the index sort:[" + segmentSort + "]"); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 638afbb3df261..e5f9edc2314fd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -277,7 +277,7 @@ private static boolean isMaybeMultivalued(LeafReaderContext context, SortField s * optimization and null if index sort is not applicable. */ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException { - Sort indexSort = context.reader().getMetaData().getSort(); + Sort indexSort = context.reader().getMetaData().sort(); if (indexSort == null) { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java index f7a613fbe142b..7f33a43163027 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java @@ -214,7 +214,8 @@ long count(LeafReaderContext ctx, FiltersAggregator.Counter counter, Bits live) // No hits in this segment. return 0; } - scorer.score(counter, live); + IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx); + scorer.score(counter, live, partition.minDocId, partition.maxDocId); return counter.readAndReset(ctx); } @@ -227,7 +228,8 @@ void collect(LeafReaderContext ctx, LeafCollector collector, Bits live) throws I // No hits in this segment. return; } - scorer.score(collector, live); + IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx); + scorer.score(collector, live, partition.minDocId, partition.maxDocId); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index ce3031d4cddf8..8f7f299ecf8c2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -7,7 +7,9 @@ */ package org.elasticsearch.search.aggregations.bucket.global; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorable; @@ -39,11 +41,16 @@ public GlobalAggregator(String name, AggregatorFactories subFactories, Aggregati @Override public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, LeafBucketCollector sub) throws IOException { // Run sub-aggregations on child documents - BulkScorer scorer = weight.bulkScorer(aggCtx.getLeafReaderContext()); + LeafReaderContext leafReaderContext = aggCtx.getLeafReaderContext(); + BulkScorer scorer = weight.bulkScorer(leafReaderContext); if (scorer == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } grow(1); + IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment( + leafReaderContext + ); + scorer.score(new LeafCollector() { @Override public void collect(int doc) throws IOException { @@ -54,7 +61,7 @@ public void collect(int doc) throws IOException { public void setScorer(Scorable scorer) throws IOException { sub.setScorer(scorer); } - }, aggCtx.getLeafReaderContext().reader().getLiveDocs()); + }, aggCtx.getLeafReaderContext().reader().getLiveDocs(), partition.minDocId, partition.maxDocId); return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java index 5421cd59a23e4..4e72ac6ff1450 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java @@ -39,12 +39,12 @@ public static WeightedFragInfo fixWeightedFragInfo(WeightedFragInfo fragInfo) { * than in this hack... aka. "we are are working on in!" */ final List subInfos = fragInfo.getSubInfos(); CollectionUtil.introSort(subInfos, (o1, o2) -> { - int startOffset = o1.getTermsOffsets().get(0).getStartOffset(); - int startOffset2 = o2.getTermsOffsets().get(0).getStartOffset(); + int startOffset = o1.termsOffsets().get(0).getStartOffset(); + int startOffset2 = o2.termsOffsets().get(0).getStartOffset(); return Integer.compare(startOffset, startOffset2); }); return new WeightedFragInfo( - Math.min(fragInfo.getSubInfos().get(0).getTermsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), + Math.min(fragInfo.getSubInfos().get(0).termsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), fragInfo.getEndOffset(), subInfos, fragInfo.getTotalBoost() diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index a70e0db249541..4de4b3e48e6f5 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -246,7 +246,8 @@ public static LeafSlice[] computeSlices(List leaves, int maxS throw new IllegalArgumentException("maxSliceNum must be >= 1 (got " + maxSliceNum + ")"); } if (maxSliceNum == 1) { - return new LeafSlice[] { new LeafSlice(new ArrayList<>(leaves)) }; + return new LeafSlice[] { + new LeafSlice(new ArrayList<>(leaves.stream().map(LeafReaderContextPartition::createForEntireSegment).toList())) }; } // total number of documents to be searched final int numDocs = leaves.stream().mapToInt(l -> l.reader().maxDoc()).sum(); @@ -294,7 +295,7 @@ private static LeafSlice[] computeSlices(List leaves, int min for (List currentLeaf : queue) { // LeafSlice ctor reorders leaves so that leaves within a slice preserve the order they had within the IndexReader. // This is important given how Elasticsearch sorts leaves by descending @timestamp to get better query performance. - slices[upto++] = new LeafSlice(currentLeaf); + slices[upto++] = new LeafSlice(currentLeaf.stream().map(LeafReaderContextPartition::createForEntireSegment).toList()); } return slices; @@ -347,10 +348,10 @@ private T search(Weight weight, CollectorManager } final List> listTasks = new ArrayList<>(leafSlices.length); for (int i = 0; i < leafSlices.length; ++i) { - final LeafReaderContext[] leaves = leafSlices[i].leaves; + final LeafReaderContextPartition[] leaves = leafSlices[i].partitions; final C collector = collectors.get(i); listTasks.add(() -> { - search(Arrays.asList(leaves), weight, collector); + search(leaves, weight, collector); return collector; }); } @@ -415,7 +416,7 @@ private static class TimeExceededException extends RuntimeException { } @Override - protected void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collector) throws IOException { + protected void searchLeaf(LeafReaderContext ctx, int minDocId, int maxDocId, Weight weight, Collector collector) throws IOException { cancellable.checkCancelled(); final LeafCollector leafCollector; try { @@ -435,7 +436,7 @@ protected void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collec bulkScorer = new CancellableBulkScorer(bulkScorer, cancellable::checkCancelled); } try { - bulkScorer.score(leafCollector, liveDocs); + bulkScorer.score(leafCollector, liveDocs, minDocId, maxDocId); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index af0240e9497f2..8a7d95644a1c0 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -240,7 +240,7 @@ private static boolean canEarlyTerminate(IndexReader reader, SortAndFormats sort } final Sort sort = sortAndFormats.sort; for (LeafReaderContext ctx : reader.leaves()) { - Sort indexSort = ctx.reader().getMetaData().getSort(); + Sort indexSort = ctx.reader().getMetaData().sort(); if (indexSort == null || Lucene.canEarlyTerminate(sort, indexSort) == false) { return false; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index d10a067a963d3..2c65d9a4e7e86 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -78,12 +78,13 @@ private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSu query = (CompletionQuery) query.rewrite(searcher); Weight weight = query.createWeight(searcher, collector.scoreMode(), 1f); for (LeafReaderContext context : searcher.getIndexReader().leaves()) { + IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment(context); BulkScorer scorer = weight.bulkScorer(context); if (scorer != null) { LeafCollector leafCollector = null; try { leafCollector = collector.getLeafCollector(context); - scorer.score(leafCollector, context.reader().getLiveDocs()); + scorer.score(leafCollector, context.reader().getLiveDocs(), partition.minDocId, partition.maxDocId); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index b95971d13c11d..dd8f2df80f7dd 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -132,7 +132,7 @@ public CandidateSet drawCandidates(CandidateSet set) throws IOException { * because that's what {@link DirectSpellChecker#suggestSimilar} expects * when filtering terms. */ - int threshold = thresholdTermFrequency(original.termStats.docFreq); + int threshold = thresholdTermFrequency(original.termStats.docFreq()); if (threshold == Integer.MAX_VALUE) { // the threshold is the max possible frequency so we can skip the search return set; @@ -225,7 +225,7 @@ public void nextToken() throws IOException { } private static double score(TermStats termStats, double errorScore, long dictionarySize) { - return errorScore * (((double) termStats.totalTermFreq + 1) / ((double) dictionarySize + 1)); + return errorScore * (((double) termStats.totalTermFreq() + 1) / ((double) dictionarySize + 1)); } // package protected for test diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java index dce063d6e655b..f1de1b1e81729 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java @@ -35,7 +35,7 @@ protected double scoreUnigram(Candidate word) throws IOException { @Override protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { join(separator, spare, w_1.term, word.term); - return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq + alpha * numTerms); + return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq() + alpha * numTerms); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java index 27a427a374962..eef9fc7cec7bb 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java @@ -57,7 +57,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return unigramLambda * scoreUnigram(word); } - return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq)) + unigramLambda * scoreUnigram(word); + return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq())) + unigramLambda * scoreUnigram(word); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index 6e1524dfc8588..76c7f8e7838c9 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -69,7 +69,7 @@ public void nextToken() throws IOException { if (posIncAttr.getPositionIncrement() == 0 && typeAttribute.type() == SynonymFilter.TYPE_SYNONYM) { assert currentSet != null; TermStats termStats = generator.termStats(term); - if (termStats.docFreq > 0) { + if (termStats.docFreq() > 0) { currentSet.addOneCandidate(generator.createCandidate(BytesRef.deepCopyOf(term), termStats, realWordLikelihood)); } } else { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java index 4aa5663d36f63..503083237359c 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java @@ -34,7 +34,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return discount * scoreUnigram(word); } - return count / (w_1.termStats.totalTermFreq + 0.00000000001d); + return count / (w_1.termStats.totalTermFreq() + 0.00000000001d); } @Override @@ -49,7 +49,7 @@ protected double scoreTrigram(Candidate w, Candidate w_1, Candidate w_2) throws join(separator, spare, w_2.term, w_1.term, w.term); long trigramCount = frequency(spare.get()); if (trigramCount < 1) { - return discount * (bigramCount / (w_1.termStats.totalTermFreq + 0.00000000001d)); + return discount * (bigramCount / (w_1.termStats.totalTermFreq() + 0.00000000001d)); } return trigramCount / (bigramCount + 0.00000000001d); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 556ca6411ce83..faae0d90344b7 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -6776,7 +6776,7 @@ public void testStoreHonorsLuceneVersion() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { LeafReader leafReader = getOnlyLeafReader(searcher.getIndexReader()); - assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().getCreatedVersionMajor()); + assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().createdVersionMajor()); } } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 80594f6766bac..2eb3aeab21ff1 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -2794,7 +2794,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "DFS supports parallel collection, so the number of slices should be > 1.", @@ -2824,7 +2824,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", @@ -2839,7 +2839,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { ContextIndexSearcher searcher = searchContext.searcher(); assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 as FETCH does not support parallel collection and thus runs on the calling" @@ -2855,7 +2855,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { ContextIndexSearcher searcher = searchContext.searcher(); assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 as NONE does not support parallel collection.", @@ -2878,7 +2878,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { ContextIndexSearcher searcher = searchContext.searcher(); assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 when QUERY parallel collection is disabled.", @@ -2915,7 +2915,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index d1f4cb0e1d857..06f9d880c34d7 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -75,7 +75,6 @@ import java.io.IOException; import java.io.UncheckedIOException; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.IdentityHashMap; @@ -250,7 +249,7 @@ public void testConcurrentCollection() throws Exception { Integer.MAX_VALUE, 1 ); - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertEquals(numDocs, totalHits.intValue()); int numExpectedTasks = ContextIndexSearcher.computeSlices(searcher.getIndexReader().leaves(), Integer.MAX_VALUE, 1).length; // check that each slice except for one that executes on the calling thread goes to the executor, no matter the queue size @@ -405,7 +404,7 @@ private static void assertSlices(LeafSlice[] slices, int numDocs, int numThreads int sumDocs = 0; assertThat(slices.length, lessThanOrEqualTo(numThreads)); for (LeafSlice slice : slices) { - int sliceDocs = Arrays.stream(slice.leaves).mapToInt(l -> l.reader().maxDoc()).sum(); + int sliceDocs = slice.getMaxDocs(); assertThat(sliceDocs, greaterThanOrEqualTo((int) (0.1 * numDocs))); sumDocs += sliceDocs; } @@ -587,7 +586,10 @@ public Query rewrite(IndexSearcher indexSearcher) { return null; } }; - Integer hitCount = contextIndexSearcher.search(testQuery, new TotalHitCountCollectorManager()); + Integer hitCount = contextIndexSearcher.search( + testQuery, + new TotalHitCountCollectorManager(contextIndexSearcher.getSlices()) + ); assertEquals(0, hitCount.intValue()); assertTrue(contextIndexSearcher.timeExceeded()); } finally { diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java index dbfd9d83ee887..92e051aa799b4 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java @@ -1510,7 +1510,7 @@ public void setScorer(Scorable scorer) throws IOException { setScorerCalled = true; if (expectedScorable != null) { while (expectedScorable.equals(scorer.getClass()) == false && scorer instanceof FilterScorable) { - scorer = scorer.getChildren().iterator().next().child; + scorer = scorer.getChildren().iterator().next().child(); } assertEquals(expectedScorable, scorer.getClass()); } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index cd618279c091d..ad2cb068f3971 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -210,8 +210,8 @@ public void testFrequencyThreshold() throws Exception { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); @@ -226,8 +226,8 @@ public void testFrequencyThreshold() throws Exception { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index 1c9c97a364fc7..9633051781f4a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -120,7 +120,7 @@ static List> docSlices(IndexReader indexReader, i } static List> segmentSlices(List leafContexts) { - IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE); - return Arrays.stream(gs).map(g -> Arrays.stream(g.leaves).map(PartialLeafReaderContext::new).toList()).toList(); + IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE, false); + return Arrays.stream(gs).map(g -> Arrays.stream(g.partitions).map(PartialLeafReaderContext::new).toList()).toList(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java index e9063c9597c5f..c92dc75397729 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; /** * A subset of a {@link LeafReaderContext}. @@ -16,6 +17,10 @@ * @param maxDoc one more than the last document */ public record PartialLeafReaderContext(LeafReaderContext leafReaderContext, int minDoc, int maxDoc) { + public PartialLeafReaderContext(IndexSearcher.LeafReaderContextPartition partition) { + this(partition.ctx, partition.minDocId, partition.maxDocId); + } + public PartialLeafReaderContext(LeafReaderContext leafReaderContext) { this(leafReaderContext, 0, leafReaderContext.reader().maxDoc()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java index 6937f1a8c7772..765828359da42 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java @@ -89,7 +89,10 @@ public Page getOutput() { continue; } final DocCollector collector = new DocCollector(docsBuilder); - scorer.score(collector, leaf.reader().getLiveDocs()); + IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment( + leaf + ); + scorer.score(collector, leaf.reader().getLiveDocs(), partition.minDocId, partition.maxDocId); int matches = collector.matches; if (segmentsBuilder != null) { diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java index a567f25869407..007b398624d56 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java @@ -105,14 +105,14 @@ private static int encodedSize(PackedInts.Format format, int packedIntsVersion, for (int bpv = 1; bpv <= 32; ++bpv) { final FormatAndBits formatAndBits = PackedInts.fastestFormatAndBits(BLOCK_SIZE, bpv, acceptableOverheadRatio); - assert formatAndBits.format.isSupported(formatAndBits.bitsPerValue); - assert formatAndBits.bitsPerValue <= 32; - encodedSizes[bpv] = encodedSize(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - encoders[bpv] = PackedInts.getEncoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - decoders[bpv] = PackedInts.getDecoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); + assert formatAndBits.format().isSupported(formatAndBits.bitsPerValue()); + assert formatAndBits.bitsPerValue() <= 32; + encodedSizes[bpv] = encodedSize(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + encoders[bpv] = PackedInts.getEncoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + decoders[bpv] = PackedInts.getDecoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); iterations[bpv] = computeIterations(decoders[bpv]); - out.writeVInt(formatAndBits.format.getId() << 5 | (formatAndBits.bitsPerValue - 1)); + out.writeVInt(formatAndBits.format().getId() << 5 | (formatAndBits.bitsPerValue() - 1)); } } From ced0f7ba7927b48b6299ad64bab35b7605acd38d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 11 Sep 2024 16:16:14 +0200 Subject: [PATCH 276/417] Fix UOE in ContextIndexSearcher after last Lucene merge --- .../search/internal/ContextIndexSearcher.java | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 4de4b3e48e6f5..09b3bd886dd05 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -55,6 +55,7 @@ import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.Executor; +import java.util.stream.Collectors; /** * Context-aware extension of {@link IndexSearcher}. @@ -247,7 +248,13 @@ public static LeafSlice[] computeSlices(List leaves, int maxS } if (maxSliceNum == 1) { return new LeafSlice[] { - new LeafSlice(new ArrayList<>(leaves.stream().map(LeafReaderContextPartition::createForEntireSegment).toList())) }; + new LeafSlice( + new ArrayList<>( + leaves.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ) + ) }; } // total number of documents to be searched final int numDocs = leaves.stream().mapToInt(l -> l.reader().maxDoc()).sum(); @@ -295,7 +302,11 @@ private static LeafSlice[] computeSlices(List leaves, int min for (List currentLeaf : queue) { // LeafSlice ctor reorders leaves so that leaves within a slice preserve the order they had within the IndexReader. // This is important given how Elasticsearch sorts leaves by descending @timestamp to get better query performance. - slices[upto++] = new LeafSlice(currentLeaf.stream().map(LeafReaderContextPartition::createForEntireSegment).toList()); + slices[upto++] = new LeafSlice( + currentLeaf.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ); } return slices; From 2430fff026f6e609d03b06acbb7024465831bf48 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 12 Sep 2024 06:12:06 +0000 Subject: [PATCH 277/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-74e3c44063a --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 2627132fb9c49..67593601bf75d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 10.0.0-snapshot-7c529ce092d +lucene = 10.0.0-snapshot-74e3c44063a bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1e29eceb8478b..cc52035770a37 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From d45dbc780f6b83431a26619205d5e97a533d5539 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 12 Sep 2024 06:12:07 +0000 Subject: [PATCH 278/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-ab262f917d4 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4e0318eaf84df..a25ac5a6e4c6a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-7964682ddf5 +lucene = 9.12.0-snapshot-ab262f917d4 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0587be8956a94..c22b238ae4ac0 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From d24200e95c0f284688eb0f3f96432e432b3d139e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 12 Sep 2024 12:23:12 +0200 Subject: [PATCH 279/417] Fix compile issues after latest Lucene snapshot update --- .../common/HighlighterWithAnalyzersTests.java | 2 +- .../datastreams/DataStreamIT.java | 4 +- .../datastreams/TSDBIndexingIT.java | 2 +- .../ingest/geoip/GeoIpDownloaderIT.java | 4 +- .../script/expression/MoreExpressionIT.java | 64 ++++---- .../mustache/SearchTemplateResponseTests.java | 2 +- .../painless/SimilarityScriptTests.java | 4 +- .../RankFeaturesMapperIntegrationIT.java | 8 +- .../TokenCountFieldMapperIntegrationIT.java | 2 +- .../extras/MatchOnlyTextFieldMapperTests.java | 4 +- .../join/aggregations/ChildrenIT.java | 8 +- .../join/query/ChildQuerySearchIT.java | 126 +++++++-------- .../elasticsearch/join/query/InnerHitsIT.java | 40 ++--- .../PercolatorMatchedSlotSubFetchPhase.java | 2 +- .../percolator/CandidateQueryTests.java | 38 ++--- .../percolator/PercolateQueryTests.java | 8 +- .../PercolatorQuerySearchTests.java | 2 +- .../index/reindex/CrossClusterReindexIT.java | 10 +- .../reindex/remote/RemoteResponseParsers.java | 4 +- .../AnnotatedTextHighlighterTests.java | 2 +- .../action/IndicesRequestIT.java | 4 +- .../admin/indices/create/CreateIndexIT.java | 4 +- .../admin/indices/create/SplitIndexIT.java | 2 +- .../action/bulk/BulkProcessor2RetryIT.java | 6 +- .../action/bulk/BulkProcessorRetryIT.java | 6 +- .../bulk/TransportSimulateBulkActionIT.java | 2 +- .../action/bulk/WriteAckDelayIT.java | 4 +- .../action/search/PointInTimeIT.java | 8 +- .../action/search/TransportSearchIT.java | 14 +- .../elasticsearch/aliases/IndexAliasesIT.java | 30 ++-- .../broadcast/BroadcastActionsIT.java | 2 +- .../document/DocumentActionsIT.java | 4 +- .../elasticsearch/index/FinalPipelineIT.java | 8 +- .../index/engine/MaxDocsLimitIT.java | 4 +- .../mapper/CopyToMapperIntegrationIT.java | 2 +- .../index/store/ExceptionRetryIT.java | 2 +- .../elasticsearch/indexing/IndexActionIT.java | 8 +- .../indices/IndicesRequestCacheIT.java | 46 +++--- .../state/CloseWhileRelocatingShardsIT.java | 2 +- .../recovery/RecoveryWhileUnderLoadIT.java | 6 +- .../elasticsearch/recovery/RelocationIT.java | 8 +- .../elasticsearch/routing/AliasRoutingIT.java | 2 +- .../routing/PartitionedRoutingIT.java | 6 +- .../elasticsearch/search/SearchTimeoutIT.java | 4 +- .../search/aggregations/CombiIT.java | 2 +- .../search/aggregations/EquivalenceIT.java | 4 +- .../aggregations/FiltersAggsRewriteIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 6 +- .../bucket/DateHistogramOffsetIT.java | 6 +- .../aggregations/bucket/DateRangeIT.java | 18 +-- .../search/aggregations/bucket/FilterIT.java | 2 +- .../search/aggregations/bucket/FiltersIT.java | 4 +- .../aggregations/bucket/GeoDistanceIT.java | 2 +- .../aggregations/bucket/HistogramIT.java | 2 +- .../search/aggregations/bucket/NestedIT.java | 2 +- .../search/aggregations/bucket/RangeIT.java | 2 +- .../aggregations/metrics/ExtendedStatsIT.java | 4 +- .../metrics/HDRPercentileRanksIT.java | 4 +- .../metrics/HDRPercentilesIT.java | 4 +- .../metrics/ScriptedMetricIT.java | 28 ++-- .../search/aggregations/metrics/StatsIT.java | 2 +- .../search/aggregations/metrics/SumIT.java | 2 +- .../metrics/TDigestPercentileRanksIT.java | 4 +- .../metrics/TDigestPercentilesIT.java | 4 +- .../aggregations/metrics/TopHitsIT.java | 40 ++--- .../aggregations/metrics/ValueCountIT.java | 2 +- .../basic/SearchWhileCreatingIndexIT.java | 6 +- .../search/basic/SearchWhileRelocatingIT.java | 6 +- .../basic/TransportTwoNodesSearchIT.java | 14 +- .../search/fetch/subphase/InnerHitsIT.java | 50 +++--- .../highlight/HighlighterSearchIT.java | 6 +- .../search/fields/SearchFieldsIT.java | 36 ++--- .../functionscore/DecayFunctionScoreIT.java | 30 ++-- .../functionscore/ExplainableScriptIT.java | 2 +- .../search/functionscore/FunctionScoreIT.java | 18 +-- .../search/functionscore/QueryRescorerIT.java | 10 +- .../functionscore/RandomScoreFunctionIT.java | 6 +- .../search/nested/SimpleNestedIT.java | 2 +- .../search/profile/query/QueryProfilerIT.java | 4 +- .../elasticsearch/search/query/ExistsIT.java | 2 +- .../search/query/MultiMatchQueryIT.java | 10 +- .../search/query/QueryStringIT.java | 2 +- .../search/query/SearchQueryIT.java | 8 +- .../search/query/SimpleQueryStringIT.java | 2 +- .../retriever/MinimalCompoundRetrieverIT.java | 2 +- .../retriever/RankDocRetrieverBuilderIT.java | 28 ++-- .../search/retriever/RetrieverRewriteIT.java | 8 +- .../search/routing/SearchPreferenceIT.java | 6 +- .../routing/SearchReplicaSelectionIT.java | 6 +- .../scriptfilter/ScriptQuerySearchIT.java | 8 +- .../search/scroll/DuelScrollIT.java | 10 +- .../search/scroll/SearchScrollIT.java | 34 ++-- .../search/searchafter/SearchAfterIT.java | 2 +- .../search/simple/SimpleSearchIT.java | 2 +- .../search/slice/SearchSliceIT.java | 10 +- .../search/sort/FieldSortIT.java | 60 +++---- .../search/sort/SimpleSortIT.java | 8 +- .../search/source/MetadataFetchingIT.java | 4 +- .../similarity/SimilarityIT.java | 4 +- .../search/BottomSortValuesCollector.java | 2 +- .../CountOnlyQueryPhaseResultConsumer.java | 4 +- .../action/search/SearchPhaseController.java | 4 +- .../elasticsearch/common/lucene/Lucene.java | 4 +- .../codec/DeduplicatingFieldInfosFormat.java | 2 +- .../index/engine/LuceneChangesSnapshot.java | 2 +- .../reindex/ClientScrollableHitSource.java | 2 +- .../lucene/grouping/TopFieldGroups.java | 4 +- .../rest/action/cat/RestCountAction.java | 4 +- .../rest/action/search/RestCountAction.java | 2 +- .../org/elasticsearch/search/SearchHits.java | 6 +- .../aggregations/metrics/InternalTopHits.java | 10 +- .../metrics/MetricInspectionHelper.java | 2 +- ...iversifyingChildrenByteKnnVectorQuery.java | 2 +- ...versifyingChildrenFloatKnnVectorQuery.java | 2 +- .../search/vectors/ESKnnByteVectorQuery.java | 2 +- .../search/vectors/ESKnnFloatVectorQuery.java | 2 +- .../SynonymsManagementAPIService.java | 6 +- ...ountOnlyQueryPhaseResultConsumerTests.java | 8 +- .../action/search/DfsQueryPhaseTests.java | 6 +- .../action/search/FetchSearchPhaseTests.java | 10 +- .../search/SearchPhaseControllerTests.java | 28 ++-- .../SearchQueryThenFetchAsyncActionTests.java | 16 +- .../search/SearchResponseMergerTests.java | 28 ++-- .../action/search/SearchResponseTests.java | 4 +- .../common/lucene/LuceneTests.java | 8 +- .../deps/lucene/VectorHighlighterTests.java | 8 +- .../index/IndexServiceTests.java | 6 +- .../index/engine/InternalEngineTests.java | 32 ++-- .../AbstractFieldDataImplTestCase.java | 12 +- .../AbstractStringFieldDataTestCase.java | 4 +- .../FieldStatsProviderRefreshTests.java | 6 +- .../index/mapper/DoubleIndexingDocTests.java | 14 +- .../index/mapper/TextFieldMapperTests.java | 2 +- .../query/TermsSetQueryBuilderTests.java | 8 +- .../IndexLevelReplicationTests.java | 2 +- .../AbstractNumberNestedSortingTestCase.java | 14 +- .../nested/DoubleNestedSortingTests.java | 2 +- .../nested/FloatNestedSortingTests.java | 2 +- .../search/nested/NestedSortingTests.java | 42 ++--- .../index/shard/IndexReaderWrapperTests.java | 8 +- .../index/shard/IndexShardTests.java | 8 +- .../similarity/ScriptedSimilarityTests.java | 4 +- .../indices/IndicesRequestCacheTests.java | 2 +- ...PassGroupingCollectorSearchAfterTests.java | 6 +- .../SinglePassGroupingCollectorTests.java | 6 +- .../lucene/queries/BlendedTermQueryTests.java | 8 +- .../CustomUnifiedHighlighterTests.java | 2 +- .../search/SearchServiceTests.java | 4 +- .../bucket/ShardSizeTestCase.java | 4 +- .../composite/CompositeAggregatorTests.java | 10 +- .../terms/RareTermsAggregatorTests.java | 2 +- .../bucket/terms/TermsAggregatorTests.java | 4 +- .../metrics/InternalTopHitsTests.java | 8 +- .../metrics/TopHitsAggregatorTests.java | 14 +- .../internal/ContextIndexSearcherTests.java | 2 +- .../query/ProfileCollectorManagerTests.java | 4 +- .../query/QueryPhaseCollectorTests.java | 64 ++++---- .../search/query/QueryPhaseTests.java | 150 +++++++++--------- .../search/query/QueryPhaseTimeoutTests.java | 8 +- .../AbstractDenseVectorQueryTestCase.java | 2 +- .../vectors/KnnScoreDocQueryBuilderTests.java | 4 +- .../vectors/VectorSimilarityQueryTests.java | 8 +- .../snapshots/SnapshotResiliencyTests.java | 9 +- ...ncySimulatingBlobStoreRepositoryTests.java | 2 +- .../search/SearchResponseUtils.java | 2 +- .../metrics/AbstractGeoTestCase.java | 2 +- .../metrics/CentroidAggregationTestBase.java | 2 +- .../SpatialBoundsAggregationTestBase.java | 2 +- .../geo/BasePointShapeQueryTestCase.java | 18 +-- .../search/geo/BaseShapeQueryTestCase.java | 8 +- .../geo/DatelinePointShapeQueryTestCase.java | 6 +- .../geo/GeoBoundingBoxQueryIntegTestCase.java | 12 +- .../search/geo/GeoShapeQueryTestCase.java | 2 +- .../elasticsearch/test/ESIntegTestCase.java | 6 +- .../hamcrest/ElasticsearchAssertions.java | 8 +- .../search/AsyncSearchIntegTestCase.java | 4 +- .../search/AsyncSearchSingleNodeTests.java | 4 +- .../elasticsearch/xpack/CcrIntegTestCase.java | 2 +- .../sourceonly/SourceOnlySnapshotIT.java | 2 +- .../AbstractTransportGetResourcesAction.java | 2 +- .../ml/dataframe/evaluation/Evaluation.java | 2 +- .../xpack/core/security/ScrollHelper.java | 6 +- .../profile/SuggestProfilesResponse.java | 4 +- .../SourceOnlySnapshotShardTests.java | 4 +- .../sourceonly/SourceOnlySnapshotTests.java | 2 +- .../DocumentSubsetReaderTests.java | 8 +- .../accesscontrol/FieldSubsetReaderTests.java | 4 +- .../xpack/enrich/EnrichPolicyRunnerTests.java | 70 ++++---- .../EnrichShardMultiSearchActionTests.java | 2 +- .../connector/ConnectorIndexService.java | 4 +- .../syncjob/ConnectorSyncJobIndexService.java | 2 +- .../rules/QueryRulesIndexService.java | 2 +- .../search/SearchApplicationIndexService.java | 2 +- .../xpack/eql/action/EqlSearchResponse.java | 4 +- .../ShardBulkInferenceActionFilterIT.java | 2 +- .../mapper/SemanticTextFieldMapperTests.java | 8 +- .../action/TransportGetPipelineAction.java | 8 +- .../versionfield/VersionStringFieldTests.java | 38 ++--- .../ml/integration/DeleteExpiredDataIT.java | 2 +- .../MlNativeAutodetectIntegTestCase.java | 2 +- .../xpack/ml/integration/PersistJobIT.java | 6 +- .../xpack/ml/integration/RegressionIT.java | 2 +- .../ml/integration/RevertModelSnapshotIT.java | 2 +- .../integration/RunDataFrameAnalyticsIT.java | 4 +- .../BucketCorrelationAggregationIT.java | 2 +- .../xpack/ml/integration/DatafeedCcsIT.java | 2 +- .../integration/MlDistributedFailureIT.java | 2 +- .../TransportGetOverallBucketsAction.java | 2 +- .../TransportPutTrainedModelAction.java | 4 +- ...ransportStartDataFrameAnalyticsAction.java | 2 +- .../extractor/DataExtractorUtils.java | 2 +- .../persistence/DatafeedConfigProvider.java | 4 +- .../extractor/DataFrameDataExtractor.java | 4 +- .../dataframe/inference/InferenceRunner.java | 2 +- .../ml/dataframe/steps/InferenceStep.java | 2 +- .../TrainTestSplitterFactory.java | 2 +- .../ChunkedTrainedModelRestorer.java | 2 +- .../persistence/TrainedModelProvider.java | 2 +- .../ml/job/persistence/JobConfigProvider.java | 2 +- .../ml/job/persistence/JobDataDeleter.java | 2 +- .../job/persistence/JobResultsProvider.java | 18 +-- .../retention/ExpiredForecastsRemover.java | 2 +- .../persistence/BatchedDocumentsIterator.java | 2 +- .../SearchAfterDocumentsIterator.java | 2 +- .../monitoring/integration/MonitoringIT.java | 4 +- .../local/LocalExporterIntegTests.java | 4 +- .../LocalExporterResourceIntegTests.java | 6 +- .../action/TransportGetStackTracesAction.java | 4 +- .../action/TransportGetStatusAction.java | 2 +- .../xpack/rank/rrf/RRFRankMultiShardIT.java | 6 +- .../xpack/rank/rrf/RRFRankSingleShardIT.java | 6 +- .../PinnedQueryBuilderIT.java | 4 +- ...pshotsCanMatchOnCoordinatorIntegTests.java | 8 +- ...napshotsRecoverFromSnapshotIntegTests.java | 2 +- ...archableSnapshotsRepositoryIntegTests.java | 8 +- ...tsBlobStoreCacheMaintenanceIntegTests.java | 2 +- .../BlobStoreCacheMaintenanceService.java | 2 +- ...tRemoteClusterSecurityDlsAndFlsRestIT.java | 2 +- .../RemoteClusterSecurityCcrIT.java | 2 +- .../RemoteClusterSecurityCcrMigrationIT.java | 2 +- .../RemoteClusterSecurityMutualTlsIT.java | 2 +- .../RemoteClusterSecurityRestIT.java | 2 +- .../RemoteClusterSecuritySpecialUserIT.java | 2 +- .../RemoteClusterSecurityTopologyRestIT.java | 4 +- .../DateMathExpressionIntegTests.java | 2 +- .../integration/DlsFlsRequestCacheTests.java | 2 +- .../DocumentLevelSecurityRandomTests.java | 2 +- .../DocumentLevelSecurityTests.java | 22 +-- .../FieldLevelSecurityRandomTests.java | 6 +- .../integration/FieldLevelSecurityTests.java | 48 +++--- .../integration/KibanaUserRoleIntegTests.java | 8 +- .../MultipleIndicesPermissionsTests.java | 2 +- .../authc/esnative/NativeRealmIntegTests.java | 8 +- .../security/authz/ReadActionsTests.java | 12 +- .../security/authz/SecurityScrollTests.java | 4 +- .../security/profile/ProfileIntegTests.java | 2 +- .../xpack/security/authc/ApiKeyService.java | 2 +- .../authc/esnative/NativeUsersStore.java | 6 +- .../authz/store/NativeRolesStore.java | 12 +- .../security/profile/ProfileService.java | 6 +- .../security/support/SecurityMigrations.java | 4 +- .../slm/SLMSnapshotBlockingIntegTests.java | 2 +- .../SnapshotBasedIndexRecoveryIT.java | 4 +- .../GeoGridAggAndQueryConsistencyIT.java | 4 +- .../search/GeoShapeWithDocValuesIT.java | 2 +- .../search/ShapeQueryOverShapeTests.java | 2 +- .../spatial/search/ShapeQueryTestCase.java | 14 +- .../spatial/ingest/CircleProcessorTests.java | 8 +- .../xpack/sql/execution/search/Querier.java | 4 +- .../search/extractor/TopHitsAggExtractor.java | 2 +- .../TransformUsageTransportAction.java | 2 +- .../TimeBasedCheckpointProvider.java | 2 +- .../IndexBasedTransformConfigManager.java | 4 +- .../common/AbstractCompositeAggFunction.java | 2 +- .../watcher/WatcherConcreteIndexTests.java | 2 +- .../actions/TimeThrottleIntegrationTests.java | 2 +- .../history/HistoryActionConditionTests.java | 6 +- .../HistoryTemplateEmailMappingsTests.java | 2 +- .../HistoryTemplateHttpMappingsTests.java | 2 +- ...storyTemplateIndexActionMappingsTests.java | 2 +- ...storyTemplateSearchInputMappingsTests.java | 2 +- .../AbstractWatcherIntegrationTestCase.java | 10 +- .../test/integration/BootStrapTests.java | 6 +- .../integration/RejectedExecutionTests.java | 2 +- .../test/integration/SingleNodeTests.java | 2 +- .../test/integration/WatchMetadataTests.java | 2 +- .../transform/TransformIntegrationTests.java | 12 +- .../xpack/watcher/WatcherService.java | 2 +- .../execution/TriggeredWatchStore.java | 2 +- .../input/search/ExecutableSearchInput.java | 2 +- .../actions/TransportQueryWatchesAction.java | 4 +- .../mapper/WildcardFieldMapperTests.java | 16 +- .../oldrepos/OldRepositoryAccessIT.java | 8 +- .../TokenBackwardsCompatibilityIT.java | 2 +- 294 files changed, 1221 insertions(+), 1214 deletions(-) diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java index 2693245ac2757..9269808045bfa 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -277,7 +277,7 @@ public void testPhrasePrefix() throws IOException { boolQuery().should(matchPhrasePrefixQuery("field1", "test")).should(matchPhrasePrefixQuery("field1", "bro")) ).highlighter(highlight().field("field1").order("score").preTags("").postTags("")), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); for (int i = 0; i < 2; i++) { assertHighlight( resp, diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index ebe5546c0907f..d58da2e2f841a 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1705,7 +1705,7 @@ public void testSegmentsSortedOnTimestampDesc() throws Exception { assertResponse( prepareSearch("metrics-foo").addFetchField(new FieldAndFormat(DEFAULT_TIMESTAMP_FIELD, "epoch_millis")).setSize(totalDocs), resp -> { - assertEquals(totalDocs, resp.getHits().getTotalHits().value); + assertEquals(totalDocs, resp.getHits().getTotalHits().value()); SearchHit[] hits = resp.getHits().getHits(); assertEquals(totalDocs, hits.length); @@ -2026,7 +2026,7 @@ static void indexDocs(String dataStream, int numDocs) { static void verifyDocs(String dataStream, long expectedNumHits, List expectedIndices) { assertResponse(prepareSearch(dataStream).setSize((int) expectedNumHits), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); Arrays.stream(resp.getHits().getHits()).forEach(hit -> assertTrue(expectedIndices.contains(hit.getIndex()))); }); } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index a0a0681dbd245..4e2b60dc44301 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -544,7 +544,7 @@ public void testTrimId() throws Exception { var searchRequest = new SearchRequest(dataStreamName); searchRequest.source().trackTotalHits(true); assertResponse(client().search(searchRequest), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numBulkRequests * numDocsPerBulk)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numBulkRequests * numDocsPerBulk)); String id = searchResponse.getHits().getHits()[0].getId(); assertThat(id, notNullValue()); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index d994bd70eb7a0..5444eaeb8be5c 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -255,8 +255,8 @@ public void testGeoIpDatabasesDownload() throws Exception { res -> { try { TotalHits totalHits = res.getHits().getTotalHits(); - assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation); - assertEquals(size, totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation()); + assertEquals(size, totalHits.value()); assertEquals(size, res.getHits().getHits().length); List data = new ArrayList<>(); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java index 69e33863b0f2b..40cb434f41b9b 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java @@ -80,7 +80,7 @@ public void testBasic() throws Exception { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'] + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -90,7 +90,7 @@ public void testFunction() throws Exception { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertNoFailuresAndResponse(buildRequest("doc['foo'] + abs(1)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -101,7 +101,7 @@ public void testBasicUsingDotValue() throws Exception { prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'].value + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -124,7 +124,7 @@ public void testScore() throws Exception { assertResponse(req, rsp -> { assertNoFailures(rsp); SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals("1", hits.getAt(0).getId()); assertEquals("3", hits.getAt(1).getId()); assertEquals("2", hits.getAt(2).getId()); @@ -147,25 +147,25 @@ public void testDateMethods() throws Exception { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].getHourOfDay() + doc['date1'].getDayOfMonth()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getMonth() + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(9.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(10.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getYear()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -181,25 +181,25 @@ public void testDateObjectMethods() throws Exception { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].date.getHourOfDay() + doc['date1'].date.dayOfMonth"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.monthOfYear + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(10.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.year"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -237,7 +237,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].count() + doc['double1'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(2.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -245,7 +245,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -253,7 +253,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].avg() + doc['double1'].avg()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(8.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -261,7 +261,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].median()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -269,7 +269,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].min()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -277,7 +277,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].max()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -285,7 +285,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()/doc['double0'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -294,7 +294,7 @@ public void testMultiValueMethods() throws Exception { // make sure count() works for missing assertNoFailuresAndResponse(buildRequest("doc['double2'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -303,7 +303,7 @@ public void testMultiValueMethods() throws Exception { // make sure .empty works in the same way assertNoFailuresAndResponse(buildRequest("doc['double2'].empty ? 5.0 : 2.0"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -341,7 +341,7 @@ public void testSparseField() throws Exception { ); assertNoFailuresAndResponse(buildRequest("doc['x'] + 1"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); @@ -377,7 +377,7 @@ public void testParams() throws Exception { String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)"; assertResponse(buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(24.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(9.5, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(13.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -500,7 +500,7 @@ public void testSpecialValueVariable() throws Exception { ); assertResponse(req, rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); Stats stats = rsp.getAggregations().get("int_agg"); assertEquals(39.0, stats.getMax(), 0.0001); @@ -654,22 +654,22 @@ public void testGeo() throws Exception { refresh(); // access .lat assertNoFailuresAndResponse(buildRequest("doc['location'].lat"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .lon assertNoFailuresAndResponse(buildRequest("doc['location'].lon"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['location'].empty ? 1 : 0"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // call haversin assertNoFailuresAndResponse(buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D); }); } @@ -692,14 +692,14 @@ public void testBoolean() throws Exception { ); // access .value assertNoFailuresAndResponse(buildRequest("doc['vip'].value"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['vip'].empty ? 1 : 0"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -707,7 +707,7 @@ public void testBoolean() throws Exception { // ternary operator // vip's have a 50% discount assertNoFailuresAndResponse(buildRequest("doc['vip'] ? doc['price']/2 : doc['price']"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -726,7 +726,7 @@ public void testFilterScript() throws Exception { Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap()); builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script))); assertNoFailuresAndResponse(builder, rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index 26b4a45d4da53..8174899e87646 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -137,7 +137,7 @@ protected void assertEqualInstances(SearchTemplateResponse expectedInstance, Sea SearchResponse expectedResponse = expectedInstance.getResponse(); SearchResponse newResponse = newInstance.getResponse(); - assertEquals(expectedResponse.getHits().getTotalHits().value, newResponse.getHits().getTotalHits().value); + assertEquals(expectedResponse.getHits().getTotalHits().value(), newResponse.getHits().getTotalHits().value()); assertEquals(expectedResponse.getHits().getMaxScore(), newResponse.getHits().getMaxScore(), 0.0001); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java index dbfd1327fb998..ef94cf3f19867 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java @@ -84,7 +84,7 @@ public void testBasics() throws IOException { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } @@ -133,7 +133,7 @@ public void testWeightScript() throws IOException { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java index c6544bac2b13c..479524258f66b 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java @@ -42,7 +42,7 @@ public void testRankFeaturesTermQuery() throws IOException { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(20f)); } @@ -51,7 +51,7 @@ public void testRankFeaturesTermQuery() throws IOException { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(2000f)); } @@ -66,7 +66,7 @@ public void testRankFeaturesTermQuery() throws IOException { .minimumShouldMatch(1) ), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); for (SearchHit hit : searchResponse.getHits().getHits()) { if (hit.getId().equals("all")) { assertThat(hit.getScore(), equalTo(50f)); @@ -82,7 +82,7 @@ public void testRankFeaturesTermQuery() throws IOException { ); assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index 08a3d046b00f7..330af4acdfb5a 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -202,7 +202,7 @@ private SearchRequestBuilder prepareTokenCountFieldMapperSearch() { } private void assertSearchReturns(SearchResponse result, String... ids) { - assertThat(result.getHits().getTotalHits().value, equalTo((long) ids.length)); + assertThat(result.getHits().getTotalHits().value(), equalTo((long) ids.length)); assertThat(result.getHits().getHits().length, equalTo(ids.length)); List foundIds = new ArrayList<>(); for (SearchHit hit : result.getHits()) { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java index e1fbc2e149441..0a4b01887f9f7 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java @@ -88,8 +88,8 @@ private void assertPhraseQuery(MapperService mapperService) throws IOException { SearchExecutionContext context = createSearchExecutionContext(mapperService, newSearcher(reader)); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "brown fox"); TopDocs docs = context.searcher().search(queryBuilder.toQuery(context), 1); - assertThat(docs.totalHits.value, equalTo(1L)); - assertThat(docs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(docs.totalHits.value(), equalTo(1L)); + assertThat(docs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(docs.scoreDocs[0].doc, equalTo(0)); } } diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java index 595d845d40b3d..b3d8adbba0eb3 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -114,7 +114,7 @@ public void testParentWithMultipleBuckets() { logger.info("bucket={}", bucket.getKey()); Children childrenBucket = bucket.getAggregations().get("to_comment"); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - logger.info("total_hits={}", topHits.getHits().getTotalHits().value); + logger.info("total_hits={}", topHits.getHits().getTotalHits().value()); for (SearchHit searchHit : topHits.getHits()) { logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId()); } @@ -128,7 +128,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(2L)); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(2L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(2L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("e")); assertThat(topHits.getHits().getAt(1).getId(), equalTo("f")); @@ -140,7 +140,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); categoryBucket = categoryTerms.getBucketByKey("c"); @@ -151,7 +151,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); } ); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index dc0f3ea8bb8c6..3f22c430e903f 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -106,7 +106,7 @@ public void testMultiLevelChild() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -116,7 +116,7 @@ public void testMultiLevelChild() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -126,7 +126,7 @@ public void testMultiLevelChild() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -134,7 +134,7 @@ public void testMultiLevelChild() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -142,7 +142,7 @@ public void testMultiLevelChild() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -160,7 +160,7 @@ public void test2744() throws IOException { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } ); @@ -181,7 +181,7 @@ public void testSimpleChildQuery() throws Exception { // TEST FETCHING _parent from child assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -194,7 +194,7 @@ public void testSimpleChildQuery() throws Exception { boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -207,7 +207,7 @@ public void testSimpleChildQuery() throws Exception { // HAS CHILD assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")), response -> { assertHitCount(response, 1L); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); }); @@ -306,8 +306,8 @@ public void testHasParentFilter() throws Exception { ).setSize(numChildDocsPerParent), response -> { Set childIds = parentToChildrenEntry.getValue(); - assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size())); - for (int i = 0; i < response.getHits().getTotalHits().value; i++) { + assertThat(response.getHits().getTotalHits().value(), equalTo((long) childIds.size())); + for (int i = 0; i < response.getHits().getTotalHits().value(); i++) { assertThat(childIds.remove(response.getHits().getAt(i).getId()), is(true)); assertThat(response.getHits().getAt(i).getScore(), is(1.0f)); } @@ -340,7 +340,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -348,7 +348,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -356,7 +356,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -366,7 +366,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -374,7 +374,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -382,7 +382,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -425,7 +425,7 @@ public void testScopedFacet() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); @@ -457,7 +457,7 @@ public void testDeletedParent() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -471,7 +471,7 @@ public void testDeletedParent() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1_updated\"")); } @@ -646,7 +646,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -666,7 +666,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -686,7 +686,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -706,7 +706,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ).addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("16")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("17")); @@ -767,7 +767,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); } ); @@ -777,7 +777,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); } ); @@ -800,7 +800,7 @@ public void testHasChildInnerHitsHighlighting() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits(); assertThat(searchHits.length, equalTo(1)); @@ -887,7 +887,7 @@ public void testSimpleQueryRewrite() throws Exception { .addSort("p_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("p000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("p001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("p002")); @@ -902,7 +902,7 @@ public void testSimpleQueryRewrite() throws Exception { .addSort("c_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(500L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(500L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("c000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("c001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("c002")); @@ -931,7 +931,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -942,7 +942,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c3")); assertThat(response.getHits().getAt(1).getId(), equalTo("c4")); } @@ -960,7 +960,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -971,7 +971,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); assertThat(response.getHits().getAt(1).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); } @@ -995,7 +995,7 @@ public void testHasChildQueryWithMinimumScore() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)).setMinScore(3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); assertThat(response.getHits().getAt(0).getScore(), equalTo(3.0f)); } @@ -1410,7 +1410,7 @@ public void testParentChildQueriesViaScrollApi() throws Exception { 10, (respNum, response) -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); } ); } @@ -1468,7 +1468,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = NONE assertResponse(minMaxQuery(ScoreMode.None, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1478,7 +1478,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("4")); @@ -1486,7 +1486,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1494,7 +1494,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.None, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.None, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1504,7 +1504,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1514,7 +1514,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1522,7 +1522,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1532,7 +1532,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = SUM assertResponse(minMaxQuery(ScoreMode.Total, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1542,7 +1542,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1550,7 +1550,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); }); @@ -1558,7 +1558,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Total, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Total, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1568,7 +1568,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1578,7 +1578,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1586,7 +1586,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1596,7 +1596,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = MAX assertResponse(minMaxQuery(ScoreMode.Max, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1606,7 +1606,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1614,7 +1614,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1622,7 +1622,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Max, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Max, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1632,7 +1632,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1642,7 +1642,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1650,7 +1650,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1660,7 +1660,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = AVG assertResponse(minMaxQuery(ScoreMode.Avg, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1670,7 +1670,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1678,7 +1678,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1686,7 +1686,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Avg, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1696,7 +1696,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1706,7 +1706,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1714,7 +1714,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); }); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java index f851678b6c9d6..44b6dc8328aaf 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java @@ -127,7 +127,7 @@ public void testSimpleParentChild() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getAt(0).getId(), equalTo("c1")); assertThat(innerHits.getAt(1).getId(), equalTo("c2")); @@ -147,7 +147,7 @@ public void testSimpleParentChild() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getAt(0).getId(), equalTo("c4")); assertThat(innerHits.getAt(1).getId(), equalTo("c5")); @@ -279,7 +279,7 @@ public void testRandomParentChild() throws Exception { assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child1InnerObjects[parent])); for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); @@ -289,7 +289,7 @@ public void testRandomParentChild() throws Exception { offset1 += child1InnerObjects[parent]; inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child2InnerObjects[parent])); for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child); @@ -346,12 +346,12 @@ public void testInnerHitsOnHasParent() throws Exception { SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.getId(), equalTo("3")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1")); searchHit = response.getHits().getAt(1); assertThat(searchHit.getId(), equalTo("4")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2")); } ); @@ -393,11 +393,11 @@ public void testParentChildMultipleLayers() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("3")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("5")); } ); @@ -416,11 +416,11 @@ public void testParentChildMultipleLayers() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("4")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("6")); } ); @@ -481,34 +481,34 @@ public void testRoyals() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("duke")); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls"); - assertThat(innerHits.getTotalHits().value, equalTo(4L)); + assertThat(innerHits.getTotalHits().value(), equalTo(4L)); assertThat(innerHits.getAt(0).getId(), equalTo("earl1")); assertThat(innerHits.getAt(1).getId(), equalTo("earl2")); assertThat(innerHits.getAt(2).getId(), equalTo("earl3")); assertThat(innerHits.getAt(3).getId(), equalTo("earl4")); SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1")); innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2")); innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3")); innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4")); innerHits = response.getHits().getAt(0).getInnerHits().get("princes"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("prince")); innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("king")); } ); @@ -531,12 +531,12 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { response -> { assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); } @@ -548,7 +548,7 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { assertResponse(prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC), response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2")); }); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index fe4bfc7741c87..dce8a3a9b1886 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -90,7 +90,7 @@ public void process(HitContext hitContext) throws IOException { query = percolatorIndexSearcher.rewrite(query); int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc(); TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc, new Sort(SortField.FIELD_DOC)); - if (topDocs.totalHits.value == 0) { + if (topDocs.totalHits.value() == 0) { // This hit didn't match with a percolate query, // likely to happen when percolating multiple documents continue; diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 12a9d8560f946..06cac20b066e6 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -646,7 +646,7 @@ public void testRangeQueries() throws Exception { v ); TopDocs topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); @@ -654,7 +654,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); @@ -662,7 +662,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(2, topDocs.scoreDocs[0].doc); @@ -670,7 +670,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3, topDocs.scoreDocs[0].doc); @@ -678,7 +678,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(4, topDocs.scoreDocs[0].doc); @@ -689,7 +689,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(5, topDocs.scoreDocs[0].doc); } @@ -835,14 +835,14 @@ public void testPercolateMatchAll() throws Exception { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); assertEquals(4, topDocs.scoreDocs[2].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); @@ -874,7 +874,7 @@ public void testFunctionScoreQuery() throws Exception { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -932,13 +932,13 @@ public void testPercolateSmallAndLargeDocument() throws Exception { BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); assertThat(candidateQuery.clauses().get(0).query(), instanceOf(CoveringQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -972,13 +972,13 @@ public void testPercolateSmallAndLargeDocument() throws Exception { assertThat(candidateQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -1031,7 +1031,7 @@ public void testDuplicatedClauses() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); } @@ -1065,7 +1065,7 @@ public void testDuplicatedClauses2() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1073,7 +1073,7 @@ public void testDuplicatedClauses2() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1081,7 +1081,7 @@ public void testDuplicatedClauses2() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1116,7 +1116,7 @@ public void testMsmAndRanges_disjunction() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1140,7 +1140,7 @@ private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100); try { - assertThat(topDocs.totalHits.value, equalTo(controlTopDocs.totalHits.value)); + assertThat(topDocs.totalHits.value(), equalTo(controlTopDocs.totalHits.value())); assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length)); for (int j = 0; j < topDocs.scoreDocs.length; j++) { assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc)); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java index 8ae24dfc47475..14d5bec64e9bf 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java @@ -117,7 +117,7 @@ public void testPercolateQuery() throws Exception { ) ); TopDocs topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); assertThat(topDocs.scoreDocs.length, equalTo(1)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); Explanation explanation = shardSearcher.explain(query, 0); @@ -136,7 +136,7 @@ public void testPercolateQuery() throws Exception { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); explanation = shardSearcher.explain(query, 1); @@ -165,7 +165,7 @@ public void testPercolateQuery() throws Exception { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(4L)); + assertThat(topDocs.totalHits.value(), equalTo(4L)); query = new PercolateQuery( "_name", @@ -177,7 +177,7 @@ public void testPercolateQuery() throws Exception { new MatchNoDocsQuery("") ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); explanation = shardSearcher.explain(query, 3); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java index a4c3ce6e7a3f7..4456d8c182b94 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java @@ -408,7 +408,7 @@ public void testPercolateNamedQueries() { QueryBuilder query = new PercolateQueryBuilder("my_query", List.of(house1_doc, house2_doc), XContentType.JSON); assertResponse(client().prepareSearch("houses").setQuery(query), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertThat(hits[0].getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java index e0396039029c5..1ee516df44667 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java @@ -69,7 +69,7 @@ public void testReindexFromRemoteGivenIndexExists() throws Exception { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -84,7 +84,7 @@ public void testReindexFromRemoteGivenSameIndexNames() throws Exception { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -113,7 +113,7 @@ public void testReindexManyTimesFromRemoteGivenSameIndexNames() throws Exception final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } } @@ -145,7 +145,7 @@ public void testReindexFromRemoteGivenSimpleDateMathIndexName() throws Interrupt final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -161,7 +161,7 @@ public void testReindexFromRemoteGivenComplexDateMathIndexName() throws Interrup final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java index 76c24aef01f6c..a5c2b5ac73234 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java @@ -96,8 +96,8 @@ class Fields { HITS_PARSER.declareField(constructorArg(), (p, c) -> { if (p.currentToken() == XContentParser.Token.START_OBJECT) { final TotalHits totalHits = SearchHits.parseTotalHitsFragment(p); - assert totalHits.relation == TotalHits.Relation.EQUAL_TO; - return totalHits.value; + assert totalHits.relation() == TotalHits.Relation.EQUAL_TO; + return totalHits.value(); } else { // For BWC with nodes pre 7.0 return p.longValue(); diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java index 5a121ffa53658..97ba79d6a8683 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java @@ -129,7 +129,7 @@ private void assertHighlightOneDoc( } TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.collectionToDelimitedString(plainTextForHighlighter, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, hiliteAnalyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index 27f8fc915cdd7..c6da66a43460a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -570,7 +570,7 @@ public void testSearchQueryThenFetch() throws Exception { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); @@ -600,7 +600,7 @@ public void testSearchDfsQueryThenFetch() throws Exception { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 3723a415ab061..a8d48b22df321 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -289,8 +289,8 @@ public void onFailure(Exception e) { prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)), expected -> assertNoFailuresAndResponse(prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()), all -> { - assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value); - logger.info("total: {}", expected.getHits().getTotalHits().value); + assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value(), all.getHits().getTotalHits().value()); + logger.info("total: {}", expected.getHits().getTotalHits().value()); }) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index 41646496c59c4..796d49c6f4169 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -252,7 +252,7 @@ public void assertNested(String index, int numDocs) { // now, do a nested query assertNoFailuresAndResponse( prepareSearch(index).setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocs)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java index 3768daaa20ffc..7e3c0e92c2708 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java @@ -140,11 +140,11 @@ public void afterBulk(long executionId, BulkRequest request, Exception failure) assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { assertThat(bulkProcessor.getTotalBytesInFlight(), equalTo(0L)); if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index cfdaacfae9cfb..31bc674888807 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -134,11 +134,11 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) final boolean finalRejectedAfterAllRetries = rejectedAfterAllRetries; assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java index 573d929ee30a9..8c3443026ba0e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java @@ -78,7 +78,7 @@ public void testMappingValidationIndexExists() { ); indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java index 29a5e491dd3fd..438a67a3bf692 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java @@ -44,9 +44,9 @@ public void testIndexWithWriteDelayEnabled() throws Exception { try { logger.debug("running search"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index da2dfc50d7fe9..3461834dfc9f9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -611,7 +611,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getSuccessfulShards(), equalTo(numShards - shardsRemoved)); assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); }); // create a PIT when some shards are missing @@ -636,7 +636,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertThat(resp.pointInTimeId(), equalTo(pointInTimeResponseOneNodeDown.getPointInTimeId())); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); @@ -660,7 +660,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getSuccessfulShards(), equalTo(numShards)); assertThat(resp.getFailedShards(), equalTo(0)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, greaterThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), greaterThan((long) numDocs)); }); // ensure that when using the previously created PIT, we'd see the same number of documents as before regardless of the @@ -680,7 +680,7 @@ public void testMissingShardsWithPointInTime() throws Exception { } assertNotNull(resp.getHits().getTotalHits()); // we expect less documents as the newly indexed ones should not be part of the PIT - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index 5435389452a51..d15d256302b4c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -142,7 +142,7 @@ public void testLocalClusterAlias() throws ExecutionException, InterruptedExcept randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -161,7 +161,7 @@ public void testLocalClusterAlias() throws ExecutionException, InterruptedExcept randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -220,7 +220,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce ); searchRequest.indices(""); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -240,7 +240,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce sourceBuilder.query(rangeQuery); searchRequest.source(sourceBuilder); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -279,7 +279,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { ? originalRequest : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(1, longTerms.getBuckets().size()); @@ -295,7 +295,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { false ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(2, longTerms.getBuckets().size()); @@ -431,7 +431,7 @@ public void testSearchIdle() throws Exception { () -> assertResponse( prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setPreFilterShardSize(randomIntBetween(1, 3)), - resp -> assertThat(resp.getHits().getTotalHits().value, equalTo(2L)) + resp -> assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)) ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java index 91903fd700034..98e315f67fd28 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -395,7 +395,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); logger.info("--> checking filtering alias for one index"); @@ -405,7 +405,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); logger.info("--> checking filtering alias for two indices and one complete index"); @@ -415,7 +415,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "test1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for one index"); @@ -425,17 +425,17 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "aliasToTest1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); assertResponse( prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); @@ -445,7 +445,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.termQuery("name", "something")), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); } @@ -507,7 +507,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -516,7 +516,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); assertResponse( @@ -525,7 +525,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -534,7 +534,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter13", "filter1", "filter23").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -543,7 +543,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -552,7 +552,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13", "test1", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); } @@ -607,7 +607,7 @@ public void testDeletingByQueryFilteringAliases() throws Exception { logger.info("--> checking counts before delete"); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); } @@ -1398,7 +1398,7 @@ private void checkAliases() { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java index c45f980553431..4d7a47085f91d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java @@ -43,7 +43,7 @@ public void testBroadcastOperations() throws IOException { for (int i = 0; i < 5; i++) { // test successful assertResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java index 709f6b866ba28..2eedf5538ead2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java @@ -151,7 +151,7 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { // test successful assertNoFailuresAndResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); @@ -163,7 +163,7 @@ public void testIndexActions() throws Exception { countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length, equalTo(0) ); - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index 216d5e25218e3..7c705001102ab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -124,7 +124,7 @@ public void testFinalPipelineOfOldDestinationIsNotInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -150,7 +150,7 @@ public void testFinalPipelineOfNewDestinationIsInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals(true, response.getHits().getAt(0).getSourceAsMap().get("final")); }); } @@ -176,7 +176,7 @@ public void testDefaultPipelineOfNewDestinationIsNotInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -202,7 +202,7 @@ public void testDefaultPipelineOfRerouteDestinationIsInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertTrue(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java index be7610e55b8e6..126c9b20827fd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java @@ -106,7 +106,7 @@ public void testMaxDocsLimit() throws Exception { indicesAdmin().prepareRefresh("test").get(); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); if (randomBoolean()) { indicesAdmin().prepareFlush("test").get(); @@ -116,7 +116,7 @@ public void testMaxDocsLimit() throws Exception { ensureGreen("test"); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java index c1f06aeceebde..92a62de28164e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java @@ -45,7 +45,7 @@ public void testDynamicTemplateCopyTo() throws Exception { AggregationBuilders.terms("test_raw").field("test_field_raw").size(recordCount * 2).collectMode(aggCollectionMode) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) recordCount)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) recordCount)); assertThat(((Terms) response.getAggregations().get("test")).getBuckets().size(), equalTo(recordCount + 1)); assertThat(((Terms) response.getAggregations().get("test_raw")).getBuckets().size(), equalTo(recordCount)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java index 423e5c14c472a..126f41197307a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -114,7 +114,7 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I assertResponse( prepareSearch("index").setQuery(termQuery("_id", response.getHits().getHits()[i].getId())).setExplain(true), dupIdResponse -> { - assertThat(dupIdResponse.getHits().getTotalHits().value, greaterThan(1L)); + assertThat(dupIdResponse.getHits().getTotalHits().value(), greaterThan(1L)); logger.info("found a duplicate id:"); for (SearchHit hit : dupIdResponse.getHits()) { logger.info("Doc {} was found on shard {}", hit.getId(), hit.getShard().getShardId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java index bc2f0ec94f0ab..03b87552e908f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java @@ -56,9 +56,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { try { logger.debug("running search with all types"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " @@ -76,9 +76,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { try { logger.debug("running search with a specific type"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 08ce9af14ab13..95bf69bcdc3d8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -148,7 +148,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 5); @@ -160,7 +160,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); @@ -173,7 +173,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 6, 9); @@ -216,7 +216,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 0, 1); @@ -228,7 +228,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 1, 1); @@ -240,7 +240,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 2, 1); @@ -285,7 +285,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 0, 1); @@ -298,7 +298,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 1, 1); @@ -311,7 +311,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 2, 1); @@ -363,7 +363,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 0, 1); @@ -380,7 +380,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 1, 1); @@ -394,7 +394,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 2, 1); @@ -439,7 +439,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -452,7 +452,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -467,7 +467,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -482,7 +482,7 @@ public void testCanCache() throws Exception { .addAggregation(dateRange("foo").field("s").addRange("now-10y", "now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -496,7 +496,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 2); @@ -511,7 +511,7 @@ public void testCanCache() throws Exception { .addAggregation(filter("foo", QueryBuilders.rangeQuery("s").from("now-10y").to("now"))), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 4); @@ -542,7 +542,7 @@ public void testCacheWithFilteredAlias() { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 0, 1); @@ -554,20 +554,20 @@ public void testCacheWithFilteredAlias() { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 1, 1); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 1, 2); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 2, 2); } @@ -590,7 +590,7 @@ public void testProfileDisableCache() throws Exception { client.prepareSearch("index").setRequestCache(true).setProfile(profile).setQuery(QueryBuilders.termQuery("k", "hello")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); if (profile == false) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index 6647356f070ae..8f9da497dfc98 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -228,7 +228,7 @@ public void testCloseWhileRelocatingShards() throws Exception { for (String index : acknowledgedCloses) { assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> { - long docsCount = response.getHits().getTotalHits().value; + long docsCount = response.getHits().getTotalHits().value(); assertEquals( "Expected " + docsPerIndex.get(index) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index c066e3098df6f..5c2de2d548242 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -343,7 +343,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).setTrackTotalHits(true).addSort("id", SortOrder.ASC), response -> { logSearchResponse(numberOfShards, numberOfDocs, finalI, response); - iterationHitCount[finalI] = response.getHits().getTotalHits().value; + iterationHitCount[finalI] = response.getHits().getTotalHits().value(); if (iterationHitCount[finalI] != numberOfDocs) { error[0] = true; } @@ -390,7 +390,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, boolean[] errorOccurred = new boolean[1]; for (int i = 0; i < iterations; i++) { assertResponse(prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()), response -> { - if (response.getHits().getTotalHits().value != numberOfDocs) { + if (response.getHits().getTotalHits().value() != numberOfDocs) { errorOccurred[0] = true; } }); @@ -420,7 +420,7 @@ private void logSearchResponse(int numberOfShards, long numberOfDocs, int iterat logger.info( "iteration [{}] - returned documents: {} (expected {})", iteration, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), numberOfDocs ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index 52a95b2065866..aec1cb14edb32 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -239,7 +239,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(), response -> { var hits = response.getHits(); - if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { + if (hits.getTotalHits().value() != indexer.totalIndexedDocs()) { int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { hitIds[hit] = hit + 1; @@ -253,7 +253,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { } set.forEach(value -> logger.error("Missing id [{}]", value)); } - assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); + assertThat(hits.getTotalHits().value(), equalTo(indexer.totalIndexedDocs())); logger.info("--> DONE search test round {}", idx + 1); } ); @@ -363,9 +363,9 @@ public void indexShardStateChanged( for (Client client : clients()) { assertNoFailuresAndResponse(client.prepareSearch("test").setPreference("_local").setSize(0), response -> { if (expectedCount[0] < 0) { - expectedCount[0] = response.getHits().getTotalHits().value; + expectedCount[0] = response.getHits().getTotalHits().value(); } else { - assertEquals(expectedCount[0], response.getHits().getTotalHits().value); + assertEquals(expectedCount[0], response.getHits().getTotalHits().value()); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java index 8fb56d17b93ff..c8911179c923c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -295,7 +295,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue3268() thro prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1).setQuery(QueryBuilders.matchAllQuery()), response -> { logger.info("--> search all on index_* should find two"); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); // Let's make sure that, even though 2 docs are available, only one is returned according to the size we set in the request // Therefore the reduce phase has taken place, which proves that the QUERY_AND_FETCH search type wasn't erroneously forced. assertThat(response.getHits().getHits().length, equalTo(1)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java index 20c197bf73893..ab79da0060fc8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java @@ -159,7 +159,7 @@ private void verifyRoutedSearches(String index, Map> routing + "] shards for routing [" + routing + "] and got hits [" - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + "]" ); @@ -167,7 +167,7 @@ private void verifyRoutedSearches(String index, Map> routing response.getTotalShards() + " was not in " + expectedShards + " for " + index, expectedShards.contains(response.getTotalShards()) ); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); @@ -187,7 +187,7 @@ private void verifyBroadSearches(String index, Map> routingT prepareSearch().setQuery(QueryBuilders.termQuery("_routing", routing)).setIndices(index).setSize(100), response -> { assertEquals(expectedShards, response.getTotalShards()); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java index 702d4a99df2f1..ec6c02c03e051 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java @@ -63,7 +63,7 @@ public void testTopHitsTimeout() { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); } @@ -80,7 +80,7 @@ public void testAggsTimeout() { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertEquals(searchResponse.getHits().getHits().length, 0); StringTerms terms = searchResponse.getAggregations().get("terms"); assertEquals(1, terms.getBuckets().size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java index 5b8c238d7b7db..1d73645eac181 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -114,7 +114,7 @@ public void testSubAggregationForTopAggregationOnUnmappedField() throws Exceptio histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode)) ), response -> { - assertThat(response.getHits().getTotalHits().value, Matchers.equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), Matchers.equalTo(0L)); Histogram values = response.getAggregations().get("values"); assertThat(values, notNullValue()); assertThat(values.getBuckets().isEmpty(), is(true)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index bd9e154c394e7..92a26c007f889 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -292,7 +292,7 @@ public void testDuelTerms() throws Exception { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); final Terms longTerms = response.getAggregations().get("long"); final Terms doubleTerms = response.getAggregations().get("double"); @@ -412,7 +412,7 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); } ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java index 3568391279a7a..6edcec21fe04c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java @@ -56,7 +56,7 @@ public void testWrapperQueryIsRewritten() throws IOException { metadata.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); builder.setMetadata(metadata); assertResponse(client().prepareSearch("test").setSize(0).addAggregation(builder), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); InternalFilters filters = response.getAggregations().get("titles"); assertEquals(1, filters.getBuckets().size()); assertEquals(2, filters.getBuckets().get(0).getDocCount()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index b83b74ca8b639..3777242f3f3da 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -973,7 +973,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(dateHistogram("date_histo").field("value").fixedInterval(DateHistogramInterval.HOUR)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); @@ -1010,7 +1010,7 @@ public void testSingleValueWithTimeZone() throws Exception { .format("yyyy-MM-dd:HH-mm-ssZZZZZ") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -1174,7 +1174,7 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { assertThat( "Expected 24 buckets for one day aggregation with hourly interval", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), equalTo(2L) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 5abf52cf37f88..85eef4d4bd01d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -77,7 +77,7 @@ public void testSingleValueWithPositiveOffset() throws Exception { dateHistogram("date_histo").field("date").offset("2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -98,7 +98,7 @@ public void testSingleValueWithNegativeOffset() throws Exception { dateHistogram("date_histo").field("date").offset("-2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -127,7 +127,7 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception { .fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(24L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(24L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 9df1fae2431f0..dc9bc53e38e32 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -577,7 +577,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(dateRange("date_range").field("value").addRange("0-1", 0, 1)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -721,7 +721,7 @@ public void testRangeWithFormatStringValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("00:16:40", "00:50:00").addRange("00:50:00", "01:06:40")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00:16:40-00:50:00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00:50:00-01:06:40", 3000000L, 4000000L); @@ -738,7 +738,7 @@ public void testRangeWithFormatStringValue() throws Exception { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -752,7 +752,7 @@ public void testRangeWithFormatStringValue() throws Exception { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); @@ -787,7 +787,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -798,7 +798,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -809,7 +809,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -826,7 +826,7 @@ public void testRangeWithFormatNumericValue() throws Exception { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -840,7 +840,7 @@ public void testRangeWithFormatNumericValue() throws Exception { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index b5dea9cbbba49..65f0b081e2242 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -158,7 +158,7 @@ public void testEmptyAggregation() throws Exception { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(filter("filter", matchAllQuery())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index b04cb5325a82d..176d37991b697 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -246,7 +246,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(filters("filters", new KeyedFilter("all", matchAllQuery()))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -454,7 +454,7 @@ public void testEmptyAggregationWithOtherBucket() throws Exception { .subAggregation(filters("filters", new KeyedFilter("foo", matchAllQuery())).otherBucket(true).otherBucketKey("bar")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index 17b976bdd3748..7f77ae43cdfc8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -412,7 +412,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index d117f593348d6..f01ea03e86818 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -914,7 +914,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(histogram("sub_histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 1cce1ab3b1c4c..27017692651ff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -345,7 +345,7 @@ public void testEmptyAggregation() throws Exception { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(nested("nested", "nested"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index 0c39859856d56..4a47204c252a7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -865,7 +865,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addRange("0-2", 0.0, 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index fde18fb283a6e..7936f5fa82f15 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -96,7 +96,7 @@ public void testEmptyAggregation() throws Exception { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -129,7 +129,7 @@ public void testUnmapped() throws Exception { assertResponse( prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ExtendedStats stats = response.getAggregations().get("stats"); assertThat(stats, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index a4da7c4e893be..73b5085ca1f72 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -111,7 +111,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -137,7 +137,7 @@ public void testUnmapped() throws Exception { .field("value") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 43e4aecb07f7f..9faacc11fd741 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -115,7 +115,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -142,7 +142,7 @@ public void testUnmapped() throws Exception { .percentiles(0, 10, 15, 100) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 042c8c1fb0e35..11e656c9547ab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -357,7 +357,7 @@ public void testMap() { prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -406,7 +406,7 @@ public void testMapWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -466,7 +466,7 @@ public void testInitMutatesParams() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -521,7 +521,7 @@ public void testMapCombineWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -585,7 +585,7 @@ public void testInitMapCombineWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -654,7 +654,7 @@ public void testInitMapCombineReduceWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -713,7 +713,7 @@ public void testInitMapCombineReduceGetProperty() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Global global = response.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -772,7 +772,7 @@ public void testMapCombineReduceWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -823,7 +823,7 @@ public void testInitMapReduceWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -868,7 +868,7 @@ public void testMapReduceWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -927,7 +927,7 @@ public void testInitMapCombineReduceWithParamsAndReduceParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -963,7 +963,7 @@ public void testInitMapCombineReduceWithParamsStored() { .reduceScript(new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -1024,7 +1024,7 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("histo"); assertThat(aggregation, notNullValue()); assertThat(aggregation, instanceOf(Histogram.class)); @@ -1098,7 +1098,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index 78adca3377f0b..978057b82e648 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -55,7 +55,7 @@ public void testEmptyAggregation() throws Exception { ), response -> { assertShardExecutionState(response, 0); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index fd173b8f48a12..998c830c08538 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -81,7 +81,7 @@ public void testEmptyAggregation() throws Exception { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(sum("sum").field("value"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 9c11b6cd14d54..70518fbaae59b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -104,7 +104,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 10, 15 }).field("value"))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -145,7 +145,7 @@ public void testUnmapped() throws Exception { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 })).field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index c67a237b2fc17..06bcfa33a7b03 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -110,7 +110,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(randomCompression(percentiles("percentiles").field("value")).percentiles(10, 15)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -131,7 +131,7 @@ public void testUnmapped() throws Exception { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(0, 10, 15, 100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 42f04ff54c82a..1c1f8e51a059b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -327,7 +327,7 @@ public void testBasics() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); higestSortValue += 10; assertThat((Long) hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); @@ -347,7 +347,7 @@ public void testIssue11119() throws Exception { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -380,7 +380,7 @@ public void testIssue11119() throws Exception { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -412,7 +412,7 @@ public void testBreadthFirstWithScoreNeeded() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -443,7 +443,7 @@ public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -500,7 +500,7 @@ public void testPagination() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(controlHits.getTotalHits().value)); + assertThat(hits.getTotalHits().value(), equalTo(controlHits.getTotalHits().value())); assertThat(hits.getHits().length, equalTo(controlHits.getHits().length)); for (int i = 0; i < hits.getHits().length; i++) { logger.info( @@ -542,7 +542,7 @@ public void testSortByBucket() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); assertThat(hits.getAt(1).getSortValues()[0], equalTo(higestSortValue - 1)); @@ -577,7 +577,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("b")); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("6")); @@ -585,7 +585,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("c")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(3L)); + assertThat(hits.getTotalHits().value(), equalTo(3L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("9")); @@ -593,7 +593,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("a")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); + assertThat(hits.getTotalHits().value(), equalTo(2L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("2")); } @@ -629,7 +629,7 @@ public void testFetchFeatures() throws IOException { for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(1)); SearchHit hit = hits.getAt(0); @@ -681,7 +681,7 @@ public void testEmptyIndex() throws Exception { TopHits hits = response.getAggregations().get("hits"); assertThat(hits, notNullValue()); assertThat(hits.getName(), equalTo("hits")); - assertThat(hits.getHits().getTotalHits().value, equalTo(0L)); + assertThat(hits.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -743,7 +743,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(1L)); TopHits topHits = bucket.getAggregations().get("top-comments"); SearchHits searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(1)); @@ -752,7 +752,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(2L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(2)); @@ -764,7 +764,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(1L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(4)); @@ -788,7 +788,7 @@ public void testTopHitsInSecondLayerNested() throws Exception { assertThat(toComments.getDocCount(), equalTo(4L)); TopHits topComments = toComments.getAggregations().get("top-comments"); - assertThat(topComments.getHits().getTotalHits().value, equalTo(4L)); + assertThat(topComments.getHits().getTotalHits().value(), equalTo(4L)); assertThat(topComments.getHits().getHits().length, equalTo(4)); assertThat(topComments.getHits().getAt(0).getId(), equalTo("2")); @@ -815,7 +815,7 @@ public void testTopHitsInSecondLayerNested() throws Exception { assertThat(toReviewers.getDocCount(), equalTo(7L)); TopHits topReviewers = toReviewers.getAggregations().get("top-reviewers"); - assertThat(topReviewers.getHits().getTotalHits().value, equalTo(7L)); + assertThat(topReviewers.getHits().getTotalHits().value(), equalTo(7L)); assertThat(topReviewers.getHits().getHits().length, equalTo(7)); assertThat(topReviewers.getHits().getAt(0).getId(), equalTo("1")); @@ -898,7 +898,7 @@ public void testNestedFetchFeatures() { assertThat(nested.getDocCount(), equalTo(4L)); SearchHits hits = ((TopHits) nested.getAggregations().get("top-comments")).getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); SearchHit searchHit = hits.getAt(0); assertThat(searchHit.getId(), equalTo("1")); assertThat(searchHit.getNestedIdentity().getField().string(), equalTo("comments")); @@ -959,7 +959,7 @@ public void testTopHitsInNested() throws Exception { TopHits hits = nested.getAggregations().get("comments"); SearchHits searchHits = hits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(numNestedDocs)); + assertThat(searchHits.getTotalHits().value(), equalTo(numNestedDocs)); for (int j = 0; j < 3; j++) { assertThat(searchHits.getAt(j).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(j).getNestedIdentity().getOffset(), equalTo(0)); @@ -1063,7 +1063,7 @@ public void testNoStoredFields() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); for (SearchHit hit : hits) { assertThat(hit.getSourceAsMap(), nullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 445ad8e0b9b11..61c535ad5aad9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -66,7 +66,7 @@ protected Collection> nodePlugins() { public void testUnmapped() throws Exception { assertResponse(prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(count("count").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ValueCount valueCount = response.getAggregations().get("count"); assertThat(valueCount, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index df6994c57f425..46d137469747d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -71,14 +71,14 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) .setPreference(preference + Integer.toString(counter++)) .setQuery(QueryBuilders.termQuery("field", "test")), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { refresh(); assertResponse( client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterRefresh -> { logger.info( "hits count mismatch on any shard search failed, post explicit refresh hits are {}", - searchResponseAfterRefresh.getHits().getTotalHits().value + searchResponseAfterRefresh.getHits().getTotalHits().value() ); ensureGreen(); assertResponse( @@ -87,7 +87,7 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) .setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterGreen -> logger.info( "hits count mismatch on any shard search failed, post explicit wait for green hits are {}", - searchResponseAfterGreen.getHits().getTotalHits().value + searchResponseAfterGreen.getHits().getTotalHits().value() ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index a9b0f75fe45ba..a25cb0e7d66e0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -76,7 +76,7 @@ public void run() { try { while (stop.get() == false) { assertResponse(prepareSearch().setSize(numDocs), response -> { - if (response.getHits().getTotalHits().value != numDocs) { + if (response.getHits().getTotalHits().value() != numDocs) { // if we did not search all shards but had no serious failures that is potentially fine // if only the hit-count is wrong. this can happen if the cluster-state is behind when the // request comes in. It's a small window but a known limitation. @@ -85,7 +85,7 @@ public void run() { .allMatch(ssf -> ssf.getCause() instanceof NoShardAvailableActionException)) { nonCriticalExceptions.add( "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numDocs + " was expected. " @@ -99,7 +99,7 @@ public void run() { final SearchHits sh = response.getHits(); assertThat( "Expected hits to be the same size the actual hits array", - sh.getTotalHits().value, + sh.getTotalHits().value(), equalTo((long) (sh.getHits().length)) ); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index d4a4debbd61d6..6121d826b6599 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -125,7 +125,7 @@ public void testDfsQueryThenFetch() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -168,7 +168,7 @@ public void testDfsQueryThenFetchWithSort() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -207,7 +207,7 @@ public void testQueryThenFetch() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -236,7 +236,7 @@ public void testQueryThenFetchWithFrom() throws Exception { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(60)); for (int i = 0; i < 60; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -247,7 +247,7 @@ public void testQueryThenFetchWithFrom() throws Exception { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(40)); for (int i = 0; i < 40; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -270,7 +270,7 @@ public void testQueryThenFetchWithSort() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -300,7 +300,7 @@ public void testSimpleFacets() throws Exception { .aggregation(AggregationBuilders.filter("test1", termQuery("name", "test1"))); assertNoFailuresAndResponse(client().search(new SearchRequest("test").source(sourceBuilder)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(100L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(100L)); Global global = response.getAggregations().get("global"); Filter all = global.getAggregations().get("all"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index b536db040e39f..acaf59b1a8269 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -150,7 +150,7 @@ public void testSimpleNested() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(2)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -170,7 +170,7 @@ public void testSimpleNested() throws Exception { assertThat(response.getHits().getAt(0).getShard(), notNullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getHits().length, equalTo(3)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -195,7 +195,7 @@ public void testSimpleNested() throws Exception { ), response -> { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(1)); HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("comments.message"); assertThat(highlightField.fragments()[0].string(), equalTo("fox eat quick")); @@ -263,7 +263,7 @@ public void testRandomNested() throws Exception { SearchHit searchHit = response.getHits().getAt(i); assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) field1InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field1InnerObjects[i])); for (int j = 0; j < field1InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field1")); @@ -272,7 +272,7 @@ public void testRandomNested() throws Exception { } inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) field2InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field2InnerObjects[i])); for (int j = 0; j < field2InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field2")); @@ -377,13 +377,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -408,13 +408,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -435,7 +435,7 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -459,13 +459,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -537,7 +537,7 @@ public void testNestedDefinedAsObject() throws Exception { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1")); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), @@ -612,7 +612,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit parent = response.getHits().getAt(0); assertThat(parent.getId(), equalTo("1")); SearchHits inner = parent.getInnerHits().get("comments.messages"); - assertThat(inner.getTotalHits().value, equalTo(2L)); + assertThat(inner.getTotalHits().value(), equalTo(2L)); assertThat(inner.getAt(0).getSourceAsString(), equalTo("{\"message\":\"no fox\"}")); assertThat(inner.getAt(1).getSourceAsString(), equalTo("{\"message\":\"fox eat quick\"}")); } @@ -628,7 +628,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(2L)); + assertThat(messages.getTotalHits().value(), equalTo(2L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(2)); @@ -650,7 +650,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); @@ -684,7 +684,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); @@ -785,22 +785,22 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { ); assertNoFailuresAndResponse(prepareSearch("test").setQuery(query).setSize(numDocs).addSort("field1", SortOrder.ASC), response -> { assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getAt(0).getId(), equalTo("0")); - assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1")); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries()[0], equalTo("test3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2")); for (int i = 2; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(String.valueOf(i))); - assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3")); } @@ -843,7 +843,7 @@ public void testNestedSource() throws Exception { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -864,7 +864,7 @@ public void testNestedSource() throws Exception { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -890,7 +890,7 @@ public void testNestedSource() throws Exception { ), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0)); } ); @@ -900,7 +900,7 @@ public void testNestedSource() throws Exception { .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertFalse(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().isEmpty()); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index d9d6979ffd710..70fbc86399919 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -3339,7 +3339,7 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().field("*").highlighterType(highlighterType)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getHighlightFields().get("text").fragments().length, equalTo(1)); } ); @@ -3411,7 +3411,7 @@ public void testKeywordFieldHighlighting() throws IOException { .highlighter(new HighlightBuilder().field("*")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("keyword_field"); assertThat(highlightField.fragments()[0].string(), equalTo("some text")); } @@ -3568,7 +3568,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { .should(QueryBuilders.termQuery("field", "hello")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertHighlight(response, 0, "field", 0, 1, equalTo("hello world")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 9ad6363d0e57d..2e61c9f4b77e1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -190,26 +190,26 @@ public void testStoredFields() throws Exception { indicesAdmin().prepareRefresh().get(); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field1"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); // field2 is not stored, check that it is not extracted from source. assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); assertThat(response.getHits().getAt(0).getFields().get("field2"), nullValue()); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -217,7 +217,7 @@ public void testStoredFields() throws Exception { assertResponse( prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -225,20 +225,20 @@ public void testStoredFields() throws Exception { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), nullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -246,7 +246,7 @@ public void testStoredFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), notNullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -310,7 +310,7 @@ public void testScriptDocAndFields() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.millis", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertFalse(response.getHits().getAt(0).hasSource()); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); @@ -341,7 +341,7 @@ public void testScriptDocAndFields() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value * factor", Map.of("factor", 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(singleton("sNum1"))); @@ -428,7 +428,7 @@ public void testIdBasedScriptFields() throws Exception { .setSize(numDocs) .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); @@ -637,7 +637,7 @@ public void testStoredFieldsWithoutSource() throws Exception { .addStoredField("boolean_field") .addStoredField("binary_field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -680,7 +680,7 @@ public void testSearchFieldsMetadata() throws Exception { .get(); assertResponse(prepareSearch("my-index").addStoredField("field1").addStoredField("_routing"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field("field1"), nullValue()); assertThat(response.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1")); }); @@ -748,7 +748,7 @@ public void testGetFieldsComplexField() throws Exception { String field = "field1.field2.field3.field4"; assertResponse(prepareSearch("my-index").addStoredField(field), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(response.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); assertThat(response.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2")); @@ -865,7 +865,7 @@ public void testDocValueFields() throws Exception { builder.addDocValueField("*_field"); } assertResponse(builder, response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -905,7 +905,7 @@ public void testDocValueFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("ip_field").getValues(), equalTo(List.of("::1"))); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -954,7 +954,7 @@ public void testDocValueFields() throws Exception { .addDocValueField("double_field", "#.0") .addDocValueField("date_field", "epoch_millis"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index dcbf4996358d7..0a899dcc85430 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -249,7 +249,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -275,7 +275,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -299,7 +299,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -372,7 +372,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -385,7 +385,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -404,7 +404,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("2")); assertThat(sh.getAt(1).getId(), equalTo("1")); } @@ -460,7 +460,7 @@ public void testParseGeoPoint() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -480,7 +480,7 @@ public void testParseGeoPoint() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0f, 1.e-5)); } @@ -527,7 +527,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -545,7 +545,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -563,7 +563,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0 + 0.5, 1.e-5)); logger.info( @@ -587,7 +587,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo((2.0 + 0.5) / 2, 1.e-5)); } @@ -605,7 +605,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -623,7 +623,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0, 1.e-5)); } @@ -1130,7 +1130,7 @@ public void testMultiFieldOptions() throws Exception { assertResponse(client().search(new SearchRequest(new String[] {}).source(searchSource().query(baseQuery))), response -> { assertSearchHits(response, "1", "2"); SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); }); List lonlat = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index c59fc0f68c4d4..4b977fa6f8967 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -143,7 +143,7 @@ public void testExplainScript() throws InterruptedException, IOException, Execut ), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(20L)); + assertThat(hits.getTotalHits().value(), equalTo(20L)); int idCounter = 19; for (SearchHit hit : hits.getHits()) { assertThat(hit.getId(), equalTo(Integer.toString(idCounter))); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index 422d6f06f2988..fdf1d908fbf5b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -144,9 +144,9 @@ public void testMinScoreFunctionScoreBasic() throws Exception { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -166,9 +166,9 @@ public void testMinScoreFunctionScoreBasic() throws Exception { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -223,9 +223,9 @@ public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOExcept protected void assertMinScoreSearchResponses(int numDocs, SearchResponse searchResponse, int numMatchingDocs) { assertNoFailures(searchResponse); - assertThat((int) searchResponse.getHits().getTotalHits().value, is(numMatchingDocs)); + assertThat((int) searchResponse.getHits().getTotalHits().value(), is(numMatchingDocs)); int pos = 0; - for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value; hitId--) { + for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value(); hitId--) { assertThat(searchResponse.getHits().getAt(pos).getId(), equalTo(Integer.toString(hitId))); pos++; } @@ -241,7 +241,7 @@ public void testWithEmptyFunctions() throws IOException, ExecutionException, Int assertNoFailuresAndResponse( client().search(new SearchRequest(new String[] {}).source(searchSource().explain(true).query(termQuery("text", "text")))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); termQueryScore[0] = response.getHits().getAt(0).getScore(); } ); @@ -258,7 +258,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getScore(), equalTo(expectedScore)); } ); @@ -268,7 +268,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(2f)) ) ), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 2b61e6ae5d1ad..d83ab705f46be 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -134,7 +134,7 @@ public void testRescorePhrase() throws Exception { 5 ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -414,7 +414,7 @@ private static void assertEquivalent(String query, SearchResponse plain, SearchR assertNoFailures(rescored); SearchHits leftHits = plain.getHits(); SearchHits rightHits = rescored.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); @@ -840,7 +840,7 @@ public void testRescorePhaseWithInvalidSort() throws Exception { .setTrackScores(true) .addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); assertThat(response.getHits().getHits().length, equalTo(5)); for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.getScore(), equalTo(101f)); @@ -887,7 +887,7 @@ public void testRescoreAfterCollapse() throws Exception { .addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore"))) .setCollapse(new CollapseBuilder("group")); assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); assertThat(resp.getHits().getHits().length, equalTo(3)); SearchHit hit1 = resp.getHits().getAt(0); @@ -967,7 +967,7 @@ public void testRescoreAfterCollapseRandom() throws Exception { .setSize(Math.min(numGroups, 10)); long expectedNumHits = numHits; assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); for (int pos = 0; pos < resp.getHits().getHits().length; pos++) { SearchHit hit = resp.getHits().getAt(pos); assertThat(hit.getId(), equalTo(sortedGroups[pos].id())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 1fe128da6889c..e847e17645b17 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -267,7 +267,7 @@ public void testSeedReportedInExplain() throws Exception { .setExplain(true), response -> { assertNoFailures(response); - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); SearchHit firstHit = response.getHits().getAt(0); assertThat(firstHit.getExplanation().toString(), containsString("" + seed)); } @@ -282,12 +282,12 @@ public void testNoDocs() throws Exception { prepareSearch("test").setQuery( functionScoreQuery(matchAllQuery(), randomFunction().seed(1234).setField(SeqNoFieldMapper.NAME)) ), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); assertNoFailuresAndResponse( prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction())), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java index a9d33b268e73f..4900591e5f6ea 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -425,7 +425,7 @@ public void testExplain() throws Exception { .setExplain(true), response -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); Explanation explanation = response.getHits().getHits()[0].getExplanation(); assertThat(explanation.getValue(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(explanation.toString(), startsWith("0.36464313 = Score based on 2 child docs in range from 0 to 1")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index e02bed8409bc4..49e6d450a637f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -146,10 +146,10 @@ public void testProfileMatchesRegular() throws Exception { ); } - if (vanillaResponse.getHits().getTotalHits().value != profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() != profileResponse.getHits().getTotalHits().value()) { Set vanillaSet = new HashSet<>(Arrays.asList(vanillaResponse.getHits().getHits())); Set profileSet = new HashSet<>(Arrays.asList(profileResponse.getHits().getHits())); - if (vanillaResponse.getHits().getTotalHits().value > profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() > profileResponse.getHits().getTotalHits().value()) { vanillaSet.removeAll(profileSet); fail("Vanilla hits were larger than profile hits. Non-overlapping elements were: " + vanillaSet.toString()); } else { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java index 81c612107e44a..956b03c6eb902 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java @@ -132,7 +132,7 @@ public void testExists() throws Exception { response ), count, - response.getHits().getTotalHits().value + response.getHits().getTotalHits().value() ); } catch (AssertionError e) { for (SearchHit searchHit : allDocs.getHits()) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 2d6bb8176b091..72ffc87e2d806 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -346,7 +346,7 @@ public void testPhraseType() { ).type(MatchQueryParser.Type.PHRASE) ) ), - response -> assertThat(response.getHits().getTotalHits().value, greaterThan(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), greaterThan(1L)) ); assertSearchHitsWithoutFailures( @@ -427,8 +427,8 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio matchResp -> { assertThat( "field: " + field + " query: " + builder.toString(), - multiMatchResp.getHits().getTotalHits().value, - equalTo(matchResp.getHits().getTotalHits().value) + multiMatchResp.getHits().getTotalHits().value(), + equalTo(matchResp.getHits().getTotalHits().value()) ); SearchHits hits = multiMatchResp.getHits(); if (field.startsWith("missing")) { @@ -450,7 +450,7 @@ public void testEquivalence() { var response = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); final int numDocs; try { - numDocs = (int) response.getHits().getTotalHits().value; + numDocs = (int) response.getHits().getTotalHits().value(); } finally { response.decRef(); } @@ -943,7 +943,7 @@ private static void assertEquivalent(String query, SearchResponse left, SearchRe assertNoFailures(right); SearchHits leftHits = left.getHits(); SearchHits rightHits = right.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java index a7ce84f3cd02d..397ad91388004 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java @@ -262,7 +262,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 3cf00bf085e99..cdcf241d221f3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -263,7 +263,7 @@ public void testConstantScoreQuery() throws Exception { MatchQueryBuilder matchQuery = matchQuery("f", English.intToEnglish(between(0, num))); final long[] constantScoreTotalHits = new long[1]; assertResponse(prepareSearch("test_1").setQuery(constantScoreQuery(matchQuery)).setSize(num), response -> { - constantScoreTotalHits[0] = response.getHits().getTotalHits().value; + constantScoreTotalHits[0] = response.getHits().getTotalHits().value(); SearchHits hits = response.getHits(); for (SearchHit searchHit : hits) { assertThat(searchHit, hasScore(1.0f)); @@ -276,7 +276,7 @@ public void testConstantScoreQuery() throws Exception { ).setSize(num), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(constantScoreTotalHits[0])); + assertThat(hits.getTotalHits().value(), equalTo(constantScoreTotalHits[0])); if (constantScoreTotalHits[0] > 1) { float expected = hits.getAt(0).getScore(); for (SearchHit searchHit : hits) { @@ -1692,7 +1692,7 @@ public void testQueryStringParserCache() throws Exception { assertResponse( prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); first[0] = response.getHits().getAt(0).getScore(); } @@ -1703,7 +1703,7 @@ public void testQueryStringParserCache() throws Exception { prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); float actual = response.getHits().getAt(0).getScore(); assertThat(finalI + " expected: " + first[0] + " actual: " + actual, Float.compare(first[0], actual), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 7630ddb000140..e6bc64ea52187 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -582,7 +582,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java index 32dc34045cc8b..bce9fece11d52 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java @@ -74,7 +74,7 @@ public void testSimpleSearch() throws ExecutionException, InterruptedException { assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.RUNNING), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.FAILED), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(testClusterInfo.get("total_docs"))); + assertThat(response.getHits().getTotalHits().value(), equalTo(testClusterInfo.get("total_docs"))); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java index fa4cafc66c822..435521fac0b1f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java @@ -222,8 +222,8 @@ public void testRankDocsRetrieverBasicWithPagination() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_4")); @@ -271,8 +271,8 @@ public void testRankDocsRetrieverWithAggs() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertNotNull(resp.getAggregations()); assertNotNull(resp.getAggregations().get("topic")); @@ -329,8 +329,8 @@ public void testRankDocsRetrieverWithCollapse() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(4)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); @@ -391,8 +391,8 @@ public void testRankDocsRetrieverWithCollapseAndAggs() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertNotNull(resp.getAggregations()); assertNotNull(resp.getAggregations().get("topic")); @@ -443,8 +443,8 @@ public void testRankDocsRetrieverWithNestedQuery() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); @@ -502,8 +502,8 @@ public void testRankDocsRetrieverMultipleCompoundRetrievers() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_4")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_2")); @@ -540,8 +540,8 @@ public void testRankDocsRetrieverDifferentNestedSorting() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_4")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_7")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java index e6ecd9f1e3779..fd84366371ab7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java @@ -77,8 +77,8 @@ public void testRewrite() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_0")); }); } @@ -90,8 +90,8 @@ public void testRewriteCompound() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index 17a0d6441ca47..2ab55f3806570 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -122,17 +122,17 @@ public void testSimplePreference() { assertResponse( prepareSearch().setQuery(matchAllQuery()), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("_local"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("1234"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 439534c3e1743..f13278de28abd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -50,15 +50,15 @@ public void testNodeSelection() { // Before we've gathered stats for all nodes, we should try each node once. Set nodeIds = new HashSet<>(); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertEquals(3, nodeIds.size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index 9f6ad69a4eed6..cab575144df77 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -121,7 +121,7 @@ public void testCustomScriptBinaryField() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length", emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sbinaryData").getValues().get(0), equalTo(16)); } @@ -174,7 +174,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); @@ -195,7 +195,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); } @@ -213,7 +213,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java index 89d4c0cc74852..bd30f2afcc95b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -43,7 +43,7 @@ public void testDuelQueryThenFetch() throws Exception { prepareSearch("index").setSearchType(context.searchType).addSort(context.sort).setSize(context.numDocs), control -> { SearchHits sh = control.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(sh.getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(sh.getHits().length, equalTo(context.numDocs)); SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) @@ -54,7 +54,7 @@ public void testDuelQueryThenFetch() throws Exception { try { assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); int counter = 0; @@ -68,7 +68,7 @@ public void testDuelQueryThenFetch() throws Exception { searchScrollResponse.decRef(); searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(10)).get(); assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); if (searchScrollResponse.getHits().getHits().length == 0) { break; } @@ -240,7 +240,7 @@ private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int try { while (true) { assertNoFailures(scroll); - assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); + assertEquals(control.getHits().getTotalHits().value(), scroll.getHits().getTotalHits().value()); assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); if (scroll.getHits().getHits().length == 0) { break; @@ -254,7 +254,7 @@ private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int scroll.decRef(); scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll(TimeValue.timeValueMinutes(10)).get(); } - assertEquals(control.getHits().getTotalHits().value, scrollDocs); + assertEquals(control.getHits().getTotalHits().value(), scrollDocs); } catch (AssertionError e) { logger.info("Control:\n{}", control); logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index 24a3d3ac422f3..fa58da744124b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -88,7 +88,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -97,7 +97,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -106,7 +106,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(30)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -144,7 +144,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -154,7 +154,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -165,7 +165,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -175,7 +175,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(0)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -261,7 +261,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -277,7 +277,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -288,7 +288,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -299,7 +299,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -380,7 +380,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -396,7 +396,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -407,7 +407,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -418,7 +418,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -534,7 +534,7 @@ public void testCloseAndReopenOrDeleteWithActiveScroll() { prepareSearch().setQuery(matchAllQuery()).setSize(35).setScroll(TimeValue.timeValueMinutes(2)).addSort("field", SortOrder.ASC), searchResponse -> { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -600,7 +600,7 @@ public void testInvalidScrollKeepAlive() throws IOException { assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)), searchResponse -> { assertNotNull(searchResponse.getScrollId()); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); Exception ex = expectThrows( Exception.class, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index b9c3c27abf2d8..2c787990744f0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -149,7 +149,7 @@ public void testWithNullStrings() throws InterruptedException { .setQuery(matchAllQuery()) .searchAfter(new Object[] { 0, null }), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index dd8cf5e527055..32df462f93be2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -554,7 +554,7 @@ public void testStrictlyCountRequest() throws Exception { assertNoFailuresAndResponse( prepareSearch("test_count_1", "test_count_2").setTrackTotalHits(true).setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(11L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(11L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 59373380d539c..4856ee53f10d5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -116,7 +116,7 @@ public void testWithPreferenceAndRoutings() throws Exception { setupIndex(totalDocs, numShards); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -128,7 +128,7 @@ public void testWithPreferenceAndRoutings() throws Exception { }); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -146,7 +146,7 @@ public void testWithPreferenceAndRoutings() throws Exception { .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) ); assertResponse(prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()) @@ -165,7 +165,7 @@ private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String f SearchResponse searchResponse = request.slice(sliceBuilder).get(); try { totalResults += searchResponse.getHits().getHits().length; - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); int numSliceResults = searchResponse.getHits().getHits().length; String scrollId = searchResponse.getScrollId(); for (SearchHit hit : searchResponse.getHits().getHits()) { @@ -237,7 +237,7 @@ private void assertSearchSlicesWithPointInTime( SearchResponse searchResponse = request.get(); try { - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); while (true) { int numHits = searchResponse.getHits().getHits().length; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 7915637ca7ac5..6b1737e3016e7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -285,7 +285,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut assertNoFailuresAndResponse( prepareSearch("test").setQuery(matchAllQuery()).setSize(size).addSort("dense_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = denseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -306,7 +306,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut .setSize(size) .addSort("sparse_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) sparseBytes.size())); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) sparseBytes.size())); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = sparseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -817,7 +817,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -827,7 +827,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_last")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -837,7 +837,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_first")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -883,7 +883,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -895,7 +895,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -907,7 +907,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -919,7 +919,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -1182,7 +1182,7 @@ public void testSortMVField() throws Exception { refresh(); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1196,7 +1196,7 @@ public void testSortMVField() throws Exception { }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1213,7 +1213,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1231,7 +1231,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.AVG)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1249,7 +1249,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.MEDIAN)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1263,7 +1263,7 @@ public void testSortMVField() throws Exception { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1276,7 +1276,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1289,7 +1289,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1302,7 +1302,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1315,7 +1315,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1328,7 +1328,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1341,7 +1341,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1354,7 +1354,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(7f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1367,7 +1367,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(3f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1380,7 +1380,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(7d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1393,7 +1393,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(3d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1406,7 +1406,7 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("07")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1718,8 +1718,8 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception prepareSearch("test2").setFrom(from).setSize(size).addSort(sortField, order), singleShardResponse -> { assertThat( - multiShardResponse.getHits().getTotalHits().value, - equalTo(singleShardResponse.getHits().getTotalHits().value) + multiShardResponse.getHits().getTotalHits().value(), + equalTo(singleShardResponse.getHits().getTotalHits().value()) ); assertThat(multiShardResponse.getHits().getHits().length, equalTo(singleShardResponse.getHits().getHits().length)); for (int i = 0; i < multiShardResponse.getHits().getHits().length; i++) { @@ -1746,14 +1746,14 @@ public void testCustomFormat() throws Exception { ); assertNoFailuresAndResponse(prepareSearch("test").addSort(SortBuilders.fieldSort("ip")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertArrayEquals(new String[] { "192.168.1.7" }, response.getHits().getAt(0).getSortValues()); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(1).getSortValues()); }); assertNoFailuresAndResponse( prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).searchAfter(new Object[] { "192.168.1.7" }), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(0).getSortValues()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java index c7b934df0394f..f794d1ee27f13 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -361,7 +361,7 @@ public void testDocumentsWithNullValue() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addScriptField("id", scripField).addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -372,7 +372,7 @@ public void testDocumentsWithNullValue() throws Exception { .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'][0]", Collections.emptyMap())) .addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -390,7 +390,7 @@ public void testDocumentsWithNullValue() throws Exception { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -408,7 +408,7 @@ public void testDocumentsWithNullValue() throws Exception { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("2")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 9e0dd984c9a2a..ac394de068475 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -63,12 +63,12 @@ public void testInnerHits() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); assertThat(hits.getAt(0).getId(), nullValue()); assertThat(hits.getAt(0).getSourceAsString(), nullValue()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java index 55dca7810f845..f74d4699dedb5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java @@ -53,10 +53,10 @@ public void testCustomBM25Similarity() throws Exception { .get(); assertResponse(prepareSearch().setQuery(matchQuery("field1", "quick brown fox")), bm25SearchResponse -> { - assertThat(bm25SearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(bm25SearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float bm25Score = bm25SearchResponse.getHits().getHits()[0].getScore(); assertResponse(prepareSearch().setQuery(matchQuery("field2", "quick brown fox")), booleanSearchResponse -> { - assertThat(booleanSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(booleanSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float defaultScore = booleanSearchResponse.getHits().getHits()[0].getScore(); assertThat(bm25Score, not(equalTo(defaultScore))); }); diff --git a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java index 4461b71be9047..c24038f300680 100644 --- a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java @@ -53,7 +53,7 @@ SearchSortValuesAndFormats getBottomSortValues() { } synchronized void consumeTopDocs(TopFieldDocs topDocs, DocValueFormat[] sortValuesFormat) { - totalHits += topDocs.totalHits.value; + totalHits += topDocs.totalHits.value(); if (validateShardSortFields(topDocs.fields) == false) { return; } diff --git a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java index 2c4cb31584323..da422170e39ea 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java @@ -56,8 +56,8 @@ public void consumeResult(SearchPhaseResult result, Runnable next) { return; } // set the relation to the first non-equal relation - relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation); - totalHits.add(result.queryResult().getTotalHits().value); + relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation()); + totalHits.add(result.queryResult().getTotalHits().value()); terminatedEarly.compareAndSet(false, (result.queryResult().terminatedEarly() != null && result.queryResult().terminatedEarly())); timedOut.compareAndSet(false, result.queryResult().searchTimedOut()); next.run(); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 82c498c64e1c9..8989d001df6ad 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -877,8 +877,8 @@ TotalHits getTotalHits() { void add(TopDocsAndMaxScore topDocs, boolean timedOut, Boolean terminatedEarly) { if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { - totalHits += topDocs.topDocs.totalHits.value; - if (topDocs.topDocs.totalHits.relation == Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHits += topDocs.topDocs.totalHits.value(); + if (topDocs.topDocs.totalHits.relation() == Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 2cd3361a05822..9ea2f9808d4c1 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -392,8 +392,8 @@ public static ScoreDoc readScoreDoc(StreamInput in) throws IOException { private static final Class GEO_DISTANCE_SORT_TYPE_CLASS = LatLonDocValuesField.newDistanceSort("some_geo_field", 0, 0).getClass(); public static void writeTotalHits(StreamOutput out, TotalHits totalHits) throws IOException { - out.writeVLong(totalHits.value); - out.writeEnum(totalHits.relation); + out.writeVLong(totalHits.value()); + out.writeEnum(totalHits.relation()); } public static void writeTopDocs(StreamOutput out, TopDocsAndMaxScore topDocs) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java index e1bfb7a3ecbee..c591ae67cfe89 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java @@ -48,7 +48,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm deduplicated[i++] = new FieldInfo( FieldMapper.internFieldName(fi.getName()), fi.number, - fi.hasVectors(), + fi.hasTermVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index ffdaeeae00927..8d60fdbec7775 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -118,7 +118,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.parallelArray = new ParallelArray(this.searchBatchSize); this.indexVersionCreated = indexVersionCreated; final TopDocs topDocs = searchOperations(null, accessStats); - this.totalHits = Math.toIntExact(topDocs.totalHits.value); + this.totalHits = Math.toIntExact(topDocs.totalHits.value()); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 42cf8a185ec7a..44dc4cb860447 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -148,7 +148,7 @@ private static Response wrapSearchResponse(SearchResponse response) { } hits = unmodifiableList(hits); } - long total = response.getHits().getTotalHits().value; + long total = response.getHits().getTotalHits().value(); return new Response(response.isTimedOut(), failures, total, hits, response.getScrollId()); } diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java index 350c7d91e2e4c..70f517c76bd8d 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java @@ -169,10 +169,10 @@ public static TopFieldGroups merge(Sort sort, int start, int size, TopFieldGroup final TopFieldGroups shard = shardHits[shardIDX]; // totalHits can be non-zero even if no hits were // collected, when searchAfter was used: - totalHitCount += shard.totalHits.value; + totalHitCount += shard.totalHits.value(); // If any hit count is a lower bound then the merged // total hit count is a lower bound as well - if (shard.totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + if (shard.totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } if (CollectionUtils.isEmpty(shard.scoreDocs) == false) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java index 4758a54e0739f..048215b462692 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java @@ -71,7 +71,7 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli return channel -> client.search(countRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(SearchResponse countResponse) throws Exception { - assert countResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert countResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; return RestTable.buildResponse(buildTable(request, countResponse), channel); } }); @@ -89,7 +89,7 @@ protected Table getTableWithHeader(final RestRequest request) { private Table buildTable(RestRequest request, SearchResponse response) { Table table = getTableWithHeader(request); table.startRow(); - table.addCell(response.getHits().getTotalHits().value); + table.addCell(response.getHits().getTotalHits().value()); table.endRow(); return table; diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index 7dfd25e70134e..24cb98b8d31b4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -93,7 +93,7 @@ public RestResponse buildResponse(SearchResponse response, XContentBuilder build if (terminateAfter != DEFAULT_TERMINATE_AFTER) { builder.field("terminated_early", response.isTerminatedEarly()); } - builder.field("count", response.getHits().getTotalHits().value); + builder.field("count", response.getHits().getTotalHits().value()); buildBroadcastShardsHeader( builder, request, diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index b2bc3097af185..52d68befe6cfd 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -287,12 +287,12 @@ public Iterator toXContentChunked(ToXContent.Params params return Iterators.concat(Iterators.single((b, p) -> b.startObject(Fields.HITS)), Iterators.single((b, p) -> { boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { - long total = totalHits == null ? -1 : totalHits.value; + long total = totalHits == null ? -1 : totalHits.value(); b.field(Fields.TOTAL, total); } else if (totalHits != null) { b.startObject(Fields.TOTAL); - b.field("value", totalHits.value); - b.field("relation", totalHits.relation == Relation.EQUAL_TO ? "eq" : "gte"); + b.field("value", totalHits.value()); + b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); b.endObject(); } return b; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index 2e16298d5ab04..89bd09ceb2afd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -134,7 +134,7 @@ public InternalAggregation get() { maxScore = reduceAndFindMaxScore(aggregations, shardDocs); reducedTopDocs = TopDocs.merge(from, size, shardDocs); } - assert reducedTopDocs.totalHits.relation == Relation.EQUAL_TO; + assert reducedTopDocs.totalHits.relation() == Relation.EQUAL_TO; return new InternalTopHits( getName(), @@ -261,8 +261,8 @@ public boolean equals(Object obj) { InternalTopHits other = (InternalTopHits) obj; if (from != other.from) return false; if (size != other.size) return false; - if (topDocs.topDocs.totalHits.value != other.topDocs.topDocs.totalHits.value) return false; - if (topDocs.topDocs.totalHits.relation != other.topDocs.topDocs.totalHits.relation) return false; + if (topDocs.topDocs.totalHits.value() != other.topDocs.topDocs.totalHits.value()) return false; + if (topDocs.topDocs.totalHits.relation() != other.topDocs.topDocs.totalHits.relation()) return false; if (topDocs.topDocs.scoreDocs.length != other.topDocs.topDocs.scoreDocs.length) return false; for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc thisDoc = topDocs.topDocs.scoreDocs[d]; @@ -286,8 +286,8 @@ public int hashCode() { int hashCode = super.hashCode(); hashCode = 31 * hashCode + Integer.hashCode(from); hashCode = 31 * hashCode + Integer.hashCode(size); - hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value); - hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation.hashCode(); + hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value()); + hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation().hashCode(); for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc doc = topDocs.topDocs.scoreDocs[d]; hashCode = 31 * hashCode + doc.doc; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java index 038888398c4c5..8989dbb6cee26 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java @@ -50,7 +50,7 @@ public static boolean hasValue(InternalTDigestPercentiles agg) { } public static boolean hasValue(InternalTopHits agg) { - return (agg.getHits().getTotalHits().value == 0 + return (agg.getHits().getTotalHits().value() == 0 && Double.isNaN(agg.getHits().getMaxScore()) && Double.isNaN(agg.getTopDocs().maxScore)) == false; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java index bf250a2f35184..62cdc1d925e83 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java @@ -33,7 +33,7 @@ public ESDiversifyingChildrenByteKnnVectorQuery( @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java index 59b8f26902367..fc7923d10ca24 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java @@ -33,7 +33,7 @@ public ESDiversifyingChildrenFloatKnnVectorQuery( @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java index 9808d97ec8253..1221c25673901 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java @@ -26,7 +26,7 @@ public ESKnnByteVectorQuery(String field, byte[] target, Integer k, int numCands protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java index aad4005eb83ed..6b9a6bb2b9cee 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java @@ -26,7 +26,7 @@ public ESKnnFloatVectorQuery(String field, float[] target, Integer k, int numCan protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index 6b472336f08e9..ba3315c2efc2c 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -228,7 +228,7 @@ public void getSynonymSetRules(String synonymSetId, ActionListener { - long totalSynonymRules = countResponse.getHits().getTotalHits().value; + long totalSynonymRules = countResponse.getHits().getTotalHits().value(); if (totalSynonymRules > maxSynonymsSets) { logger.warn( "The number of synonym rules in the synonym set [{}] exceeds the maximum allowed." @@ -263,7 +263,7 @@ public void getSynonymSetRules(String synonymSetId, int from, int size, ActionLi .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(new DelegatingIndexNotFoundActionListener<>(synonymSetId, listener, (searchListener, searchResponse) -> { - final long totalSynonymRules = searchResponse.getHits().getTotalHits().value; + final long totalSynonymRules = searchResponse.getHits().getTotalHits().value(); // If there are no rules, check that the synonym set actually exists to return the proper error if (totalSynonymRules == 0) { checkSynonymSetExists(synonymSetId, searchListener.delegateFailure((existsListener, response) -> { @@ -381,7 +381,7 @@ public void putSynonymRule(String synonymsSetId, SynonymRule synonymRule, Action .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(l1.delegateFailureAndWrap((searchListener, searchResponse) -> { - long synonymsSetSize = searchResponse.getHits().getTotalHits().value; + long synonymsSetSize = searchResponse.getHits().getTotalHits().value(); if (synonymsSetSize >= maxSynonymsSets) { listener.onFailure( new IllegalArgumentException("The number of synonym rules in a synonyms set cannot exceed " + maxSynonymsSets) diff --git a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java index 4a7d0cc8208e2..bc86c47433460 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java @@ -78,8 +78,8 @@ public void testNullShardResultHandling() throws Exception { queryPhaseResultConsumer.consumeResult(querySearchResult, nextCounter::incrementAndGet); } var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertFalse(reducePhase.isEmptyResult()); assertEquals(10, nextCounter.get()); } @@ -93,8 +93,8 @@ public void testEmptyResults() throws Exception { ) ) { var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertTrue(reducePhase.isEmptyResult()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 3c698f1b790e5..f104ce001696f 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -150,11 +150,11 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNotNull(responseRef.get().get(1)); assertNull(responseRef.get().get(1).fetchResult()); - assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); assertEquals(2, mockSearchPhaseContext.numSuccess.get()); @@ -235,7 +235,7 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNull(responseRef.get().get(1)); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 54c98a8b72d7e..a1223848c2ef8 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -115,7 +115,7 @@ public void testShortcutQueryAndFetchOptimization() throws Exception { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); if (numHits != 0) { assertEquals(42, searchResponse.getHits().getAt(0).docId()); } @@ -243,7 +243,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(42, searchResponse.getHits().getAt(1).docId()); assertEquals(0, searchResponse.getFailedShards()); @@ -352,7 +352,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(1, searchResponse.getFailedShards()); assertEquals(1, searchResponse.getSuccessfulShards()); @@ -467,7 +467,7 @@ public void run() { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); assertEquals(Math.min(numHits, resultSetSize), searchResponse.getHits().getHits().length); SearchHit[] hits = searchResponse.getHits().getHits(); for (int i = 0; i < hits.length; i++) { @@ -702,7 +702,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(1, searchResponse.getHits().getHits().length); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(0, searchResponse.getFailedShards()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 585e7c775da35..34ce1c53cb52a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -296,8 +296,8 @@ public void testMerge() { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } for (SearchHit hit : mergedResponse.hits().getHits()) { SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); @@ -414,8 +414,8 @@ protected boolean lessThan(RankDoc a, RankDoc b) { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } int rank = 1; for (SearchHit hit : mergedResponse.hits().getHits()) { @@ -521,8 +521,8 @@ private static int getTotalQueryHits(AtomicArray results) { int resultCount = 0; for (SearchPhaseResult shardResult : results.asList()) { TopDocs topDocs = shardResult.queryResult().topDocs().topDocs; - assert topDocs.totalHits.relation == Relation.EQUAL_TO; - resultCount += (int) topDocs.totalHits.value; + assert topDocs.totalHits.relation() == Relation.EQUAL_TO; + resultCount += (int) topDocs.totalHits.value(); } return resultCount; } @@ -783,7 +783,7 @@ public void testConsumerConcurrently() throws Exception { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -843,7 +843,7 @@ public void testConsumerOnlyAggs() throws Exception { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(0, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -901,7 +901,7 @@ public void testConsumerOnlyHits() throws Exception { assertAggReduction(request); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -968,7 +968,7 @@ public void testReduceTopNWithFromOffset() throws Exception { ScoreDoc[] scoreDocs = reduce.sortedTopDocs().scoreDocs(); assertEquals(5, scoreDocs.length); assertEquals(100.f, reduce.maxScore(), 0.0f); - assertEquals(12, reduce.totalHits().value); + assertEquals(12, reduce.totalHits().value()); assertEquals(95.0f, scoreDocs[0].score, 0.0f); assertEquals(94.0f, scoreDocs[1].score, 0.0f); assertEquals(93.0f, scoreDocs[2].score, 0.0f); @@ -1021,7 +1021,7 @@ public void testConsumerSortByField() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(Math.min(expectedNumResults, size), reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertTrue(reduce.sortedTopDocs().isSortedByField()); assertEquals(1, reduce.sortedTopDocs().sortFields().length); @@ -1078,7 +1078,7 @@ public void testConsumerFieldCollapsing() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(3, reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(a, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertEquals(b, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[1]).fields[0]); assertEquals(c, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[2]).fields[0]); @@ -1198,7 +1198,7 @@ public void testConsumerSuggestions() throws Exception { assertEquals(maxScoreCompletion, reduce.sortedTopDocs().scoreDocs()[0].score, 0f); assertEquals(0, reduce.sortedTopDocs().scoreDocs()[0].doc); assertNotEquals(-1, reduce.sortedTopDocs().scoreDocs()[0].shardIndex); - assertEquals(0, reduce.totalHits().value); + assertEquals(0, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -1289,7 +1289,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 06967a26cd514..815bf3ab5d0ec 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -244,11 +244,11 @@ public void run() { SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); if (withScroll) { - assertThat(phase.totalHits().value, equalTo((long) numShards)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo((long) numShards)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } else { - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo(2L)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); } assertThat(phase.sortedTopDocs().scoreDocs().length, equalTo(1)); assertThat(phase.sortedTopDocs().scoreDocs()[0], instanceOf(FieldDoc.class)); @@ -534,8 +534,8 @@ public void run() { assertThat(successfulOps.get(), equalTo(2)); SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo(2L)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101932") @@ -699,8 +699,8 @@ public void run() { assertThat(successfulOps.get(), equalTo(2)); SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo(2L)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); SearchShardTarget searchShardTarget = new SearchShardTarget("node3", shardIt.shardId(), null); SearchActionListener listener = new SearchActionListener(searchShardTarget, 0) { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 0070d61a2adcb..0d846897411d8 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -761,11 +761,11 @@ public void testMergeSearchHits() throws InterruptedException { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } - final int numDocs = totalHits == null || totalHits.value >= requestedSize ? requestedSize : (int) totalHits.value; + final int numDocs = totalHits == null || totalHits.value() >= requestedSize ? requestedSize : (int) totalHits.value(); int scoreFactor = randomIntBetween(1, numResponses); float maxScore = scoreSort ? numDocs * scoreFactor : Float.NaN; SearchHit[] hits = randomSearchHitArray( @@ -861,8 +861,8 @@ public void testMergeSearchHits() throws InterruptedException { assertNull(searchHits.getTotalHits()); } else { assertNotNull(searchHits.getTotalHits()); - assertEquals(expectedTotalHits.value, searchHits.getTotalHits().value); - assertSame(expectedTotalHits.relation, searchHits.getTotalHits().relation); + assertEquals(expectedTotalHits.value(), searchHits.getTotalHits().value()); + assertSame(expectedTotalHits.relation(), searchHits.getTotalHits().relation()); } if (expectedMaxScore == Float.NEGATIVE_INFINITY) { assertTrue(Float.isNaN(searchHits.getMaxScore())); @@ -909,9 +909,9 @@ public void testMergeNoResponsesAdded() { assertEquals(0, response.getNumReducePhases()); assertFalse(response.isTimedOut()); assertNotNull(response.getHits().getTotalHits()); - assertEquals(0, response.getHits().getTotalHits().value); + assertEquals(0, response.getHits().getTotalHits().value()); assertEquals(0, response.getHits().getHits().length); - assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation); + assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation()); assertNull(response.getScrollId()); assertSame(InternalAggregations.EMPTY, response.getAggregations()); assertNull(response.getSuggest()); @@ -1003,7 +1003,7 @@ public void testMergeEmptySearchHitsWithNonEmpty() { assertEquals(2, merger.numResponses()); SearchResponse mergedResponse = merger.getMergedResponse(clusters); try { - assertEquals(10, mergedResponse.getHits().getTotalHits().value); + assertEquals(10, mergedResponse.getHits().getTotalHits().value()); assertEquals(10, mergedResponse.getHits().getHits().length); assertEquals(2, mergedResponse.getTotalShards()); assertEquals(2, mergedResponse.getSuccessfulShards()); @@ -1031,8 +1031,8 @@ public void testMergeOnlyEmptyHits() { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } SearchResponse searchResponse = new SearchResponse( SearchHits.empty(totalHits, Float.NaN), @@ -1231,7 +1231,7 @@ public void testPartialAggsMixedWithFullResponses() { SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); // should be 2 hits from remote1 + assertThat(hits.getTotalHits().value(), equalTo(2L)); // should be 2 hits from remote1 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ { @@ -1272,7 +1272,7 @@ public void testPartialAggsMixedWithFullResponses() { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1413,7 +1413,7 @@ public void testPartialAggsMixedWithFullResponses() { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1482,7 +1482,7 @@ public void testPartialAggsMixedWithFullResponses() { private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Index[] indices) { TotalHits totalHits = new TotalHits(2, TotalHits.Relation.EQUAL_TO); - final int numDocs = (int) totalHits.value; + final int numDocs = (int) totalHits.value(); int scoreFactor = 1; float maxScore = numDocs; int numFields = 1; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index 244906f304482..dabebe15d4b46 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -620,8 +620,8 @@ public void testSerialization() throws IOException { if (searchResponse.getHits().getTotalHits() == null) { assertNull(deserialized.getHits().getTotalHits()); } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + assertEquals(searchResponse.getHits().getTotalHits().value(), deserialized.getHits().getTotalHits().value()); + assertEquals(searchResponse.getHits().getTotalHits().relation(), deserialized.getHits().getTotalHits().relation()); } assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index c674ef505c77c..787878a5a721b 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -171,10 +171,10 @@ public void testPruneUnreferencedFiles() throws IOException { assertEquals(3, open.maxDoc()); IndexSearcher s = newSearcher(open); - assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value, 0); + assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value(), 0); for (String file : dir.listAll()) { assertFalse("unexpected file: " + file, file.equals("segments_3") || file.startsWith("_2")); diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java index 7122c1465a27d..c8457adf84e62 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java @@ -53,7 +53,7 @@ public void testVectorHighlighter() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -86,7 +86,7 @@ public void testVectorHighlighterPrefixQuery() throws Exception { IndexReader reader = searcher.getIndexReader(); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); @@ -142,7 +142,7 @@ public void testVectorHighlighterNoStore() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -169,7 +169,7 @@ public void testVectorHighlighterNoTermVector() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 07d455477b73d..8cdfc29dc2e3e 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -289,7 +289,7 @@ public void testRefreshActuallyWorks() throws Exception { // we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, search.totalHits.value); + assertEquals(1, search.totalHits.value()); } }); assertFalse(refreshTask.isClosed()); @@ -303,7 +303,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the force refresh we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(2, search.totalHits.value); + assertEquals(2, search.totalHits.value()); } }); prepareIndex("test").setId("2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); @@ -311,7 +311,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the scheduled refresh try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(3, search.totalHits.value); + assertEquals(3, search.totalHits.value()); } }); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index faae0d90344b7..e0a4d579203a7 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -749,7 +749,7 @@ public void testTranslogRecoveryWithMultipleGenerations() throws IOException { recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), docs); - assertEquals(docs, topDocs.totalHits.value); + assertEquals(docs, topDocs.totalHits.value()); } } finally { IOUtils.close(initialEngine, recoveringEngine, store); @@ -3423,7 +3423,7 @@ public void testSkipTranslogReplay() throws IOException { engine.skipTranslogRecovery(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); - assertThat(topDocs.totalHits.value, equalTo(0L)); + assertThat(topDocs.totalHits.value(), equalTo(0L)); } } } @@ -3513,7 +3513,7 @@ public void testTranslogReplay() throws IOException { assertThat(result.getVersion(), equalTo(2L)); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } engine.close(); @@ -3522,7 +3522,7 @@ public void testTranslogReplay() throws IOException { engine.refresh("warm_up"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } assertEquals(flush ? 1 : 2, translogHandler.appliedOperations()); engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), primaryTerm.get())); @@ -3533,7 +3533,7 @@ public void testTranslogReplay() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs); - assertThat(topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), equalTo((long) numDocs)); } } @@ -3889,7 +3889,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } operation = appendOnlyPrimary(doc, false, 1, create); retry = appendOnlyPrimary(doc, true, 1, create); @@ -3924,7 +3924,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -3982,7 +3982,7 @@ public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } } @@ -4006,7 +4006,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } final boolean create = randomBoolean(); @@ -4046,7 +4046,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4091,12 +4091,12 @@ public void testDoubleDeliveryReplica() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { List ops = readAllOperationsInLucene(engine); @@ -4171,7 +4171,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } index = new Engine.Index( @@ -4193,7 +4193,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4263,7 +4263,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } Engine.Index secondIndexRequestReplica = new Engine.Index( @@ -4284,7 +4284,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index 8b6644b382bac..c35000d9f1684 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -112,7 +112,7 @@ public void testSingleValueAllSet() throws Exception { TopFieldDocs topDocs; SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(toString(((FieldDoc) topDocs.scoreDocs[0]).fields[0]), equalTo(one())); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); @@ -122,7 +122,7 @@ public void testSingleValueAllSet() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(1)); @@ -192,14 +192,14 @@ public void testMultiValueAllSet() throws Exception { IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(2)); sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); @@ -257,7 +257,7 @@ public void testSortMultiValuesFields() throws Exception { IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(7)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("!08")); @@ -278,7 +278,7 @@ public void testSortMultiValuesFields() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(6)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("10")); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index 2399ee62bf251..e28d622ea7e80 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -261,7 +261,7 @@ public void testActualMissingValue(boolean reverse) throws IOException { randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField) ); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); @@ -320,7 +320,7 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException { new Sort(sortField) ); - assertThat(topDocs.totalHits.value, lessThanOrEqualTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), lessThanOrEqualTo((long) numDocs)); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index 6577148d78c7b..3af69445215d4 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -46,7 +46,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r1 -> assertThat(r1.getHits().getTotalHits().value, equalTo(3L)) + r1 -> assertThat(r1.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(0, 1); @@ -56,7 +56,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r2 -> assertThat(r2.getHits().getTotalHits().value, equalTo(3L)) + r2 -> assertThat(r2.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(1, 1); @@ -71,7 +71,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r3 -> assertThat(r3.getHits().getTotalHits().value, equalTo(5L)) + r3 -> assertThat(r3.getHits().getTotalHits().value(), equalTo(5L)) ); assertRequestCacheStats(1, 2); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index f3199b2afe4c3..c55e5a75a810d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -37,25 +37,25 @@ public void testDoubleIndexingSameDoc() throws Exception { }, reader -> { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); }); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 4f7951c543909..8c3ff22a1163c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -878,7 +878,7 @@ protected TokenStreamComponents createComponents(String fieldName) { IndexSearcher searcher = newSearcher(ir); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "Prio 1"); TopDocs td = searcher.search(queryBuilder.toQuery(searchExecutionContext), 1); - assertEquals(1, td.totalHits.value); + assertEquals(1, td.totalHits.value()); }); Exception e = expectThrows( diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index 9e331e1c5c0f8..4fcbd15839d30 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -237,7 +237,7 @@ public void testDoToQuery() throws Exception { 10, new Sort(SortField.FIELD_DOC) ); - assertThat(topDocsWithMinimumShouldMatchField.totalHits.value, equalTo(3L)); + assertThat(topDocsWithMinimumShouldMatchField.totalHits.value(), equalTo(3L)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[0].doc, equalTo(1)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[1].doc, equalTo(3)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[2].doc, equalTo(4)); @@ -248,7 +248,7 @@ public void testDoToQuery() throws Exception { ).doToQuery(context); searcher = newSearcher(ir); TopDocs topDocsWithMinimumShouldMatch = searcher.search(queryWithMinimumShouldMatch, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocsWithMinimumShouldMatch.totalHits.value, equalTo(5L)); + assertThat(topDocsWithMinimumShouldMatch.totalHits.value(), equalTo(5L)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[0].doc, equalTo(1)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[1].doc, equalTo(2)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[2].doc, equalTo(3)); @@ -265,7 +265,7 @@ public void testDoToQuery() throws Exception { 10, new Sort(SortField.FIELD_DOC) ); - assertThat(topDocsWithMinimumShouldMatchNegative.totalHits.value, equalTo(1L)); + assertThat(topDocsWithMinimumShouldMatchNegative.totalHits.value(), equalTo(1L)); assertThat(topDocsWithMinimumShouldMatchNegative.scoreDocs[0].doc, equalTo(5)); } } @@ -309,7 +309,7 @@ public void testDoToQuery_msmScriptField() throws Exception { .doToQuery(context); IndexSearcher searcher = newSearcher(ir); TopDocs topDocs = searcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); assertThat(topDocs.scoreDocs[2].doc, equalTo(4)); diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 1d8633db28f16..d4f87663b329d 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -304,7 +304,7 @@ public void testConflictingOpsOnReplica() throws Exception { for (IndexShard shard : shards) { try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("f", "2")), 10); - assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits.value); + assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits.value()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java index 2cf619895aa90..2ad5b412ba95a 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java @@ -224,7 +224,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(7)); @@ -239,7 +239,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(13)); @@ -257,7 +257,7 @@ public void testNestedSorting() throws Exception { query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(12)); @@ -272,7 +272,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(15)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(3)); @@ -288,7 +288,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, 127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(127)); @@ -304,7 +304,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, -127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-127)); @@ -339,7 +339,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java index 37367a8007f22..591dbcb31bd34 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java @@ -72,7 +72,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java index 01876f5d329d3..91ef5196590d0 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java @@ -75,7 +75,7 @@ protected void assertAvgScoreMode( ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index a68561f1355ac..f4a81bda917e0 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -310,7 +310,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("a")); @@ -332,7 +332,7 @@ public void testNestedSorting() throws Exception { ); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("o")); @@ -358,7 +358,7 @@ public void testNestedSorting() throws Exception { query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("m")); @@ -620,7 +620,7 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.setNestedSort(new NestedSortBuilder("chapters").setNestedSort(new NestedSortBuilder("chapters.paragraphs"))); QueryBuilder queryBuilder = new MatchAllQueryBuilder(); TopFieldDocs topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(5L)); + assertThat(topFields.totalHits.value(), equalTo(5L)); StoredFields storedFields = searcher.storedFields(); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); @@ -637,25 +637,25 @@ public void testMultiLevelNestedSorting() throws IOException { { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(234L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } @@ -665,7 +665,7 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.order(SortOrder.DESC); queryBuilder = new MatchAllQueryBuilder(); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(5L)); + assertThat(topFields.totalHits.value(), equalTo(5L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); @@ -682,25 +682,25 @@ public void testMultiLevelNestedSorting() throws IOException { { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(849L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(180L)); } @@ -718,7 +718,7 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); + assertThat(topFields.totalHits.value(), equalTo(2L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); @@ -731,7 +731,7 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); + assertThat(topFields.totalHits.value(), equalTo(2L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); @@ -756,7 +756,7 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); + assertThat(topFields.totalHits.value(), equalTo(2L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); @@ -769,7 +769,7 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); + assertThat(topFields.totalHits.value(), equalTo(2L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); @@ -786,25 +786,25 @@ public void testMultiLevelNestedSorting() throws IOException { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); + assertThat(topFields.totalHits.value(), equalTo(1L)); assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java index 8ea98acdd6806..6177200227e38 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java @@ -51,7 +51,7 @@ public void testReaderCloseListenerIsCalled() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); final AtomicInteger closeCalls = new AtomicInteger(0); CheckedFunction wrapper = reader -> new FieldMaskingReader( "field", @@ -81,7 +81,7 @@ public void testReaderCloseListenerIsCalled() throws IOException { } outerCount.incrementAndGet(); }); - assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); wrap.close(); assertFalse("wrapped reader is closed", wrap.getIndexReader().tryIncRef()); assertEquals(sourceRefCount, open.getRefCount()); @@ -105,7 +105,7 @@ public void testIsCacheable() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); final AtomicInteger closeCalls = new AtomicInteger(0); CheckedFunction wrapper = reader -> new FieldMaskingReader( @@ -152,7 +152,7 @@ public void testAlwaysWrapWithFieldUsageTrackingDirectoryReader() throws IOExcep writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); CheckedFunction wrapper = directoryReader -> directoryReader; try ( diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 28497dc4a8a6b..9f2cac97ef796 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2789,9 +2789,9 @@ public void testReaderWrapperIsUsed() throws IOException { } try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } CheckedFunction wrapper = reader -> new FieldMaskingReader("foo", reader); closeShards(shard); @@ -2811,9 +2811,9 @@ public void testReaderWrapperIsUsed() throws IOException { try (Engine.Searcher searcher = newShard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 0); + assertEquals(search.totalHits.value(), 0); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } try (Engine.GetResult getResult = newShard.get(new Engine.Get(false, false, "1"))) { assertTrue(getResult.exists()); diff --git a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java index 3bb6265e2c94d..fce22edca75b4 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java @@ -143,7 +143,7 @@ public double execute( 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); r.close(); @@ -235,7 +235,7 @@ public double execute( searcher.setSimilarity(sim); Query query = new BoostQuery(new TermQuery(new Term("f", "foo")), 3.2f); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(initCalled.get()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index d3fc58cec5fb9..972f1b62e6925 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -420,7 +420,7 @@ public BytesReference get() { try (BytesStreamOutput out = new BytesStreamOutput()) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(id))), 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); Document document = reader.storedFields().document(topDocs.scoreDocs[0].doc); out.writeString(document.get("value")); loadedFromCache = false; diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java index bec0f83f78674..8e40ab3b405f4 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java @@ -113,11 +113,11 @@ private > void assertSearchCollapse( TopFieldDocs topDocs = searcher.search(query, topFieldCollectorManager); TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(sortField.getField(), collapseTopFieldDocs.field); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); + assertEquals(totalHits, topDocs.totalHits.value()); Object currentValue = null; int topDocsIndex = 0; diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java index bb4b3f42fde85..3e9ebbe6bf31b 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java @@ -139,10 +139,10 @@ private > void assertSearchCollapse( TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(collapseField.getField(), collapseTopFieldDocs.field); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); assertEquals(totalHits, topDocs.scoreDocs.length); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(totalHits, topDocs.totalHits.value()); Set seen = new HashSet<>(); // collapse field is the last sort diff --git a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java index fd383c7726f28..c31c725f7ba65 100644 --- a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java @@ -130,7 +130,7 @@ public void testDismaxQuery() throws IOException { assertThat(termStates.totalTermFreq(), greaterThan(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo((long) iters + username.length)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo((long) iters + username.length)); } { // test with an unknown field and an unknown term @@ -145,7 +145,7 @@ public void testDismaxQuery() throws IOException { assertThat(termStates.docFreq(), equalTo(0)); assertThat(termStates.totalTermFreq(), equalTo(0L)); } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(0L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(0L)); } { // test with an unknown field and a term that is present in only one field @@ -165,7 +165,7 @@ public void testDismaxQuery() throws IOException { assertThat(termStates.totalTermFreq(), equalTo(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(1L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(1L)); } reader.close(); w.close(); @@ -297,7 +297,7 @@ public void testRandomFields() throws IOException { String[] fieldNames = fields.keySet().toArray(new String[0]); Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fieldNames, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); - assertTrue(search.totalHits.value > 0); + assertTrue(search.totalHits.value() > 0); assertTrue(search.scoreDocs.length > 0); } reader.close(); diff --git a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index 8412cc241f51a..05c0d45c908ae 100644 --- a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -140,7 +140,7 @@ private void assertHighlightOneDoc( IndexSearcher searcher = newSearcher(reader); iw.close(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, analyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 2eb3aeab21ff1..58ec59d6ef732 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -771,7 +771,7 @@ public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) ), (response) -> { SearchHits hits = response.getHits(); - assertEquals(hits.getTotalHits().value, numDocs); + assertEquals(hits.getTotalHits().value(), numDocs); assertEquals(hits.getHits().length, 2); int index = 0; for (SearchHit hit : hits.getHits()) { @@ -2504,7 +2504,7 @@ public void testWaitOnRefresh() throws ExecutionException, InterruptedException ); PlainActionFuture future = new PlainActionFuture<>(); service.executeQueryPhase(request, task, future.delegateFailure((l, r) -> { - assertEquals(1, r.queryResult().getTotalHits().value); + assertEquals(1, r.queryResult().getTotalHits().value()); l.onResponse(null); })); future.get(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index cfee97891aa32..3c9c6d12705fb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -79,12 +79,12 @@ protected void indexData() throws Exception { indexRandom(true, docs); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()), resp -> { - long totalOnOne = resp.getHits().getTotalHits().value; + long totalOnOne = resp.getHits().getTotalHits().value(); assertThat(totalOnOne, is(15L)); }); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()), resp -> { assertNoFailures(resp); - long totalOnTwo = resp.getHits().getTotalHits().value; + long totalOnTwo = resp.getHits().getTotalHits().value(); assertThat(totalOnTwo, is(12L)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 5859c1c162c43..d349f12a9d3a5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -2570,19 +2570,19 @@ public void testWithKeywordAndTopHits() throws Exception { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); assertEquals(2L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); topHits = result.getBuckets().get(2).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new FieldExistsQuery("keyword")), dataset, () -> { @@ -2597,13 +2597,13 @@ public void testWithKeywordAndTopHits() throws Exception { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index dff5c090f818e..0a59cd6a8232c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -413,7 +413,7 @@ public void testWithNestedScoringAggregations() throws IOException { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); TotalHits hits = topHits.getHits().getTotalHits(); assertNotNull(hits); - assertThat(hits.value, equalTo(counter)); + assertThat(hits.value(), equalTo(counter)); assertThat(topHits.getHits().getMaxScore(), equalTo(Float.NaN)); counter += 1; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 27f0b21d2767f..21cc6a9b3b9fd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -1729,8 +1729,8 @@ private void assertNestedTopHitsScore(InternalMultiBucketAggregation terms int ptr = 9; for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); - assertThat(topHits.getHits().getTotalHits().value, equalTo((long) ptr)); - assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation); + assertThat(topHits.getHits().getTotalHits().value(), equalTo((long) ptr)); + assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation()); if (withScore) { assertThat(topHits.getHits().getMaxScore(), equalTo(1f)); } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 717d80eeabc90..2505681418753 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -219,8 +219,8 @@ protected void assertReduced(InternalTopHits reduced, List inpu TotalHits.Relation relation = TotalHits.Relation.EQUAL_TO; for (int input = 0; input < inputs.size(); input++) { SearchHits internalHits = inputs.get(input).getHits(); - totalHits += internalHits.getTotalHits().value; - if (internalHits.getTotalHits().relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHits += internalHits.getTotalHits().value(); + if (internalHits.getTotalHits().relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { relation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } maxScore = max(maxScore, internalHits.getMaxScore()); @@ -378,14 +378,14 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { case 2 -> size += between(1, 100); case 3 -> topDocs = new TopDocsAndMaxScore( new TopDocs( - new TotalHits(topDocs.topDocs.totalHits.value + between(1, 100), topDocs.topDocs.totalHits.relation), + new TotalHits(topDocs.topDocs.totalHits.value() + between(1, 100), topDocs.topDocs.totalHits.relation()), topDocs.topDocs.scoreDocs ), topDocs.maxScore + randomFloat() ); case 4 -> { TotalHits totalHits = new TotalHits( - searchHits.getTotalHits().value + between(1, 100), + searchHits.getTotalHits().value() + between(1, 100), randomFrom(TotalHits.Relation.values()) ); searchHits = SearchHits.unpooled(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 7c4194e0db873..4bf9c2d57295a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -54,7 +54,7 @@ public void testTopLevel() throws Exception { result = testCase(query, topHits("_name")); } SearchHits searchHits = ((TopHits) result).getHits(); - assertEquals(3L, searchHits.getTotalHits().value); + assertEquals(3L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("2", searchHits.getAt(1).getId()); assertEquals("1", searchHits.getAt(2).getId()); @@ -64,7 +64,7 @@ public void testTopLevel() throws Exception { public void testNoResults() throws Exception { TopHits result = (TopHits) testCase(new MatchNoDocsQuery(), topHits("_name").sort("string", SortOrder.DESC)); SearchHits searchHits = result.getHits(); - assertEquals(0L, searchHits.getTotalHits().value); + assertEquals(0L, searchHits.getTotalHits().value()); assertFalse(AggregationInspectionHelper.hasValue(((InternalTopHits) result))); } @@ -88,27 +88,27 @@ public void testInsideTerms() throws Exception { // The "a" bucket TopHits hits = (TopHits) terms.getBucketByKey("a").getAggregations().get("top"); SearchHits searchHits = (hits).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("a").getAggregations().get("top")))); // The "b" bucket searchHits = ((TopHits) terms.getBucketByKey("b").getAggregations().get("top")).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("b").getAggregations().get("top")))); // The "c" bucket searchHits = ((TopHits) terms.getBucketByKey("c").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("c").getAggregations().get("top")))); // The "d" bucket searchHits = ((TopHits) terms.getBucketByKey("d").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("d").getAggregations().get("top")))); } @@ -178,7 +178,7 @@ public void testSetScorer() throws Exception { .build(); AggregationBuilder agg = AggregationBuilders.topHits("top_hits"); TopHits result = searchAndReduce(reader, new AggTestConfig(agg, STRING_FIELD_TYPE).withQuery(query)); - assertEquals(3, result.getHits().getTotalHits().value); + assertEquals(3, result.getHits().getTotalHits().value()); reader.close(); directory.close(); } diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index 06f9d880c34d7..936d25a0b08c6 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -365,7 +365,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery()))); TopDocs topDocs = searcher.search(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("foo", "bar"))), 3f), 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3f, topDocs.scoreDocs[0].score, 0); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java index fc8b9706d387a..22792c1bdfa8c 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java @@ -124,14 +124,14 @@ public void testManagerWithSearcher() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); String profileReason = "profiler_reason"; ProfileCollectorManager profileCollectorManager = new ProfileCollectorManager<>(topDocsManager, profileReason); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), profileCollectorManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); CollectorResult result = profileCollectorManager.getCollectorTree(); assertEquals("profiler_reason", result.getReason()); assertEquals("SimpleTopScoreDocCollector", result.getName()); diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java index 92e051aa799b4..dae9977e55120 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java @@ -119,7 +119,7 @@ public void testTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topScoreDocManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -132,7 +132,7 @@ public void testTopDocsOnly() throws IOException { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } } @@ -149,7 +149,7 @@ public void testWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -164,7 +164,7 @@ public void testWithAggs() throws IOException { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numField2Docs, result.aggs.intValue()); } } @@ -183,7 +183,7 @@ public void testPostFilterTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -198,7 +198,7 @@ public void testPostFilterTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } } @@ -217,7 +217,7 @@ public void testPostFilterWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -234,7 +234,7 @@ public void testPostFilterWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // post_filter is not applied to aggs assertEquals(reader.maxDoc(), result.aggs.intValue()); } @@ -250,7 +250,7 @@ public void testMinScoreTopDocsOnly() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -265,7 +265,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -278,7 +278,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -291,7 +291,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -305,7 +305,7 @@ public void testMinScoreWithAggs() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -321,7 +321,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // min_score is applied to aggs as well as top docs assertEquals(numField2Docs, result.aggs.intValue()); } @@ -337,7 +337,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -352,7 +352,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -369,7 +369,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -384,7 +384,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -397,7 +397,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -410,7 +410,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -426,7 +426,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -442,7 +442,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); assertEquals(numField3Docs, result.aggs.intValue()); } { @@ -457,7 +457,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -472,7 +472,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -622,7 +622,7 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -637,7 +637,7 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -650,7 +650,7 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -666,7 +666,7 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); assertEquals(terminateAfter, result.aggs.intValue()); } } @@ -682,7 +682,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -697,7 +697,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -712,7 +712,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -728,7 +728,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); // aggs see more documents because the filter is not applied to them assertThat(result.aggs, Matchers.greaterThanOrEqualTo(terminateAfter)); } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 03d525ae23333..5e11c25094ce6 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -142,7 +142,7 @@ private void countTestCase(Query query, IndexReader reader, boolean shouldCollec QueryPhase.addCollectorsAndSearch(context); ContextIndexSearcher countSearcher = shouldCollectCount ? newContextSearcher(reader) : noCollectionContextSearcher(reader); - assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value()); } } @@ -231,15 +231,15 @@ public void testPostFilterDisablesHitCountShortcut() throws Exception { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when a post_filter is provided, hence it forces collection despite @@ -247,16 +247,16 @@ public void testPostFilterDisablesHitCountShortcut() throws Exception { context.setSize(0); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -267,8 +267,8 @@ public void testTerminateAfterWithFilter() throws Exception { context.setSize(10); context.parsedPostFilter(new ParsedQuery(new TermQuery(new Term("foo", "bar")))); QueryPhase.addCollectorsAndSearch(context); - assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } } @@ -278,15 +278,15 @@ public void testMinScoreDisablesHitCountShortcut() throws Exception { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when min_score is provided, hence it forces collection despite @@ -294,16 +294,16 @@ public void testMinScoreDisablesHitCountShortcut() throws Exception { context.setSize(0); context.minimumScore(100); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.minimumScore(100); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -334,17 +334,17 @@ public void testInOrderScrollOptimization() throws Exception { context.setSize(size); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); context.setSearcher(earlyTerminationContextSearcher(reader, size)); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0].doc, greaterThanOrEqualTo(size)); } } @@ -362,8 +362,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -373,8 +373,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { @@ -385,8 +385,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); // Given that total hit count does not require collection, PartialHitCountCollector does not early terminate. - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -405,8 +405,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.setSize(0); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -417,8 +417,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } { @@ -432,9 +432,9 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(trackTotalHits); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) trackTotalHits)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) trackTotalHits)); assertThat( - context.queryResult().topDocs().topDocs.totalHits.relation, + context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); @@ -447,8 +447,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(11, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(10L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(10L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -466,7 +466,7 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.setSize(10); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(10)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 1), new MatchAllDocsQuery())) { @@ -475,8 +475,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -487,8 +487,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), new MatchAllDocsQuery())) { @@ -498,8 +498,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } { @@ -513,8 +513,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(size)); } } @@ -533,8 +533,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -544,8 +544,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -556,8 +556,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, 6)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -570,8 +570,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { // depending on docs distribution we may or may not be able to honor terminate_after: low scoring hits are skipped via // setMinCompetitiveScore, which bypasses terminate_after until the next leaf collector is pulled, when that happens. assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(5)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -582,9 +582,9 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(8, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); // TODO this looks off, it should probably be GREATER_THAN_OR_EQUAL_TO - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } } @@ -597,8 +597,8 @@ public void testIndexSortingEarlyTermination() throws Exception { context.setSize(1); context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -610,7 +610,7 @@ public void testIndexSortingEarlyTermination() throws Exception { context.parsedPostFilter(new ParsedQuery(new MinDocQuery(1))); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(numDocs - 1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(numDocs - 1L)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -621,7 +621,7 @@ public void testIndexSortingEarlyTermination() throws Exception { context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.executeQuery(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -684,18 +684,18 @@ public void testIndexSortScrollOptimization() throws Exception { context.sort(searchSortAndFormat); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); int sizeMinus1 = context.queryResult().topDocs().topDocs.scoreDocs.length - 1; FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[sizeMinus1]; context.setSearcher(earlyTerminationContextSearcher(reader, 10)); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; for (int i = 0; i < searchSortAndFormat.sort.getSort().length; i++) { @SuppressWarnings("unchecked") @@ -744,8 +744,8 @@ public void testDisableTopScoreCollection() throws Exception { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.COMPLETE); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.EQUAL_TO); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.EQUAL_TO); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); } try (TestSearchContext context = createContext(newContextSearcher(reader), q)) { @@ -762,9 +762,9 @@ public void testDisableTopScoreCollection() throws Exception { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.TOP_DOCS); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } } @@ -879,8 +879,8 @@ public void testNumericSortOptimization() throws Exception { QueryPhase.addCollectorsAndSearch(searchContext); assertTrue(searchContext.sort().sort.getSort()[0].getOptimizeSortWithPoints()); assertThat(searchContext.queryResult().topDocs().topDocs.scoreDocs, arrayWithSize(0)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } // 7. Test that sort optimization doesn't break a case where from = 0 and size= 0 @@ -948,8 +948,8 @@ public void testMaxScoreQueryVisitor() { // assert score docs are in order and their number is as expected private static void assertSortResults(TopDocs topDocs, long totalNumDocs, boolean isDoubleSort) { - assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation); - assertThat(topDocs.totalHits.value, lessThan(totalNumDocs)); // we collected less docs than total number + assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation()); + assertThat(topDocs.totalHits.value(), lessThan(totalNumDocs)); // we collected less docs than total number long cur1, cur2; long prev1 = Long.MIN_VALUE; long prev2 = Long.MIN_VALUE; @@ -988,7 +988,7 @@ public void testMinScore() throws Exception { context.trackTotalHitsUpTo(5); QueryPhase.addCollectorsAndSearch(context); - assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value()); } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java index f7a5be707f12e..09666cb06a928 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java @@ -132,7 +132,7 @@ private void scorerTimeoutTest(int size, CheckedConsumer= results.scoreDocs.length); + assertTrue(results.totalHits.value() >= results.scoreDocs.length); // verify the results are in descending score order float last = Float.MAX_VALUE; for (ScoreDoc scoreDoc : results.scoreDocs) { diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java index c09ed24668963..931a69a497734 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java @@ -281,8 +281,8 @@ public void testScoreDocQuery() throws IOException { final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); TopDocs topDocs = searcher.search(query, 100); - assertEquals(scoreDocs.length, topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, topDocs.totalHits.relation); + assertEquals(scoreDocs.length, topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, topDocs.totalHits.relation()); Arrays.sort(topDocs.scoreDocs, Comparator.comparingInt(scoreDoc -> scoreDoc.doc)); assertEquals(scoreDocs.length, topDocs.scoreDocs.length); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java index 34930af101e63..496053846dc9c 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java @@ -61,14 +61,14 @@ public void testSimpleEuclidean() throws Exception { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 3f, 0.25f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 1f, 0.5f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } @@ -137,14 +137,14 @@ public void testSimpleCosine() throws IOException { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .8f, .9f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .9f, 0.95f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 8664ee08a51a2..01ad8ad262db5 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -371,7 +371,7 @@ public void testSuccessfulSnapshotAndRestore() { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseListener, r -> { - assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value); + assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value()); documentCountVerified.set(true); }); @@ -815,7 +815,10 @@ public void testConcurrentSnapshotRestoreAndDeleteOther() { var response = safeResult(searchResponseListener); try { - assertEquals(documentsFirstSnapshot + documentsSecondSnapshot, Objects.requireNonNull(response.getHits().getTotalHits()).value); + assertEquals( + documentsFirstSnapshot + documentsSecondSnapshot, + Objects.requireNonNull(response.getHits().getTotalHits()).value() + ); } finally { response.decRef(); } @@ -1176,7 +1179,7 @@ public void testSuccessfulSnapshotWithConcurrentDynamicMappingUpdates() { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseStepListener, r -> { - final long hitCount = r.getHits().getTotalHits().value; + final long hitCount = r.getHits().getTotalHits().value(); assertThat( "Documents were restored but the restored index mapping was older than some documents and misses some of their fields", (int) hitCount, diff --git a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java index b2eca6752712b..f4a335ce57fd5 100644 --- a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java +++ b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java @@ -139,7 +139,7 @@ public void testRetrieveSnapshots() throws Exception { logger.info("--> run a search"); assertResponse(client.prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("text", "sometext")), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(COUNTS.intValue(), greaterThan(0)); }); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index 2639aafae1300..5be5649464fe9 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -94,7 +94,7 @@ public static TotalHits getTotalHits(SearchRequestBuilder request) { } public static long getTotalHitsValue(SearchRequestBuilder request) { - return getTotalHits(request).value; + return getTotalHits(request).value(); } public static SearchResponse responseAsSearchResponse(Response searchResponse) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index 7fc1826952477..2f556b3628b9f 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -231,7 +231,7 @@ public void setupSuiteScopeCluster() throws Exception { .setSize(5000), response -> { assertNoFailures(response); - long totalHits = response.getHits().getTotalHits().value; + long totalHits = response.getHits().getTotalHits().value(); XContentBuilder builder = XContentFactory.jsonBuilder(); ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java index 23b17e8f3f163..f118e5790a07a 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java @@ -39,7 +39,7 @@ public void testEmptyAggregation() { .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)), response -> { CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(geoCentroid, notNullValue()); assertThat(geoCentroid.getName(), equalTo(aggName())); assertThat(geoCentroid.centroid(), equalTo(null)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java index 9dae49f658211..bedc81b7d1ad8 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java @@ -154,7 +154,7 @@ public void testEmptyAggregation() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(EMPTY_IDX_NAME).setQuery(matchAllQuery()).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); SpatialBounds geoBounds = response.getAggregations().get(aggName()); assertThat(geoBounds, notNullValue()); assertThat(geoBounds.getName(), equalTo(aggName())); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 6c84a9ba601cf..af3b3d00e40b6 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -103,7 +103,7 @@ public void testIndexPointsFilterRectangle() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -112,7 +112,7 @@ public void testIndexPointsFilterRectangle() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -169,7 +169,7 @@ public void testIndexPointsPolygon() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), response -> { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getId(), equalTo("1")); } ); @@ -208,7 +208,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -218,7 +218,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.WITHIN)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -228,7 +228,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.DISJOINT)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -237,7 +237,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.CONTAINS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); @@ -263,7 +263,7 @@ public void testIndexPointsRectangle() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -318,7 +318,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .indexedShapePath(indexedShapePath) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("point2")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java index 406625b33813f..03afdfdf7695f 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java @@ -205,7 +205,7 @@ public void testRandomGeoCollectionQuery() throws Exception { QueryBuilder intersects = queryBuilder().intersectionQuery(defaultFieldName, queryCollection); assertNoFailuresAndResponse(client().prepareSearch(defaultIndexName).setQuery(intersects), response -> { - assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value > 0); + assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value() > 0); }); } @@ -351,7 +351,7 @@ public void testEdgeCases() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, query)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("blakely")); } @@ -456,7 +456,7 @@ public void testIndexedShapeReference() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, "Big_Rectangle")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -465,7 +465,7 @@ public void testIndexedShapeReference() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, "Big_Rectangle")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java index 751a4d835610d..1fc17b21a2f68 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java @@ -67,7 +67,7 @@ public void testRectangleSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); }); @@ -111,7 +111,7 @@ public void testPolygonSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("4", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); @@ -154,7 +154,7 @@ public void testMultiPolygonSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("3", searchHits.getAt(0).getId()); assertNotEquals("3", searchHits.getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java index fe5c7b8572b35..de0bc4c217728 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java @@ -101,7 +101,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -113,7 +113,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -125,7 +125,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // top == bottom && left == right .setQuery(geoBoundingBoxQuery("location").setCorners(40.7143528, -74.0059731, 40.7143528, -74.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("1")); @@ -137,7 +137,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // top == bottom .setQuery(geoBoundingBoxQuery("location").setCorners(40.759011, -74.00009, 40.759011, -73.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -149,7 +149,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // left == right .setQuery(geoBoundingBoxQuery("location").setCorners(41.8, -73.9844722, 40.7, -73.9844722)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -162,7 +162,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoDistanceQuery("location").point(40.5, -73.9).distance(25, DistanceUnit.KILOMETERS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("7"), equalTo("4"))); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java index 1da562da3dd0e..5d440473ea851 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java @@ -142,7 +142,7 @@ public void testEnvelopeSpanningDateline() throws Exception { } ); assertResponse(client().prepareSearch(defaultIndexName).setQuery(querySupplier.get()), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertNotEquals("1", response.getHits().getAt(0).getId()); assertNotEquals("1", response.getHits().getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 71628967bf266..d6497e679e036 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -897,9 +897,11 @@ public void waitNoPendingTasksOnAll() throws Exception { /** Ensures the result counts are as expected, and logs the results if different */ public void assertResultsAndLogOnFailure(long expectedResults, SearchResponse searchResponse) { final TotalHits totalHits = searchResponse.getHits().getTotalHits(); - if (totalHits.value != expectedResults || totalHits.relation != TotalHits.Relation.EQUAL_TO) { + if (totalHits.value() != expectedResults || totalHits.relation() != TotalHits.Relation.EQUAL_TO) { StringBuilder sb = new StringBuilder("search result contains ["); - String value = Long.toString(totalHits.value) + (totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO ? "+" : ""); + String value = Long.toString(totalHits.value()) + (totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO + ? "+" + : ""); sb.append(value).append("] results. expected [").append(expectedResults).append("]"); String failMsg = sb.toString(); for (SearchHit hit : searchResponse.getHits().getHits()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 46c49f1c26134..872c784d2e1cd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -311,7 +311,7 @@ public static void assertHitCount(ActionFuture responseFuture, l public static void assertHitCount(SearchResponse countResponse, long expectedHitCount) { final TotalHits totalHits = countResponse.getHits().getTotalHits(); - if (totalHits.relation != TotalHits.Relation.EQUAL_TO || totalHits.value != expectedHitCount) { + if (totalHits.relation() != TotalHits.Relation.EQUAL_TO || totalHits.value() != expectedHitCount) { fail("Count is " + totalHits + " but " + expectedHitCount + " was expected. " + formatShardStatus(countResponse)); } } @@ -343,7 +343,7 @@ public static void assertFourthHit(SearchResponse searchResponse, Matcher matcher) { assertThat("SearchHit number must be greater than 0", number, greaterThan(0)); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo((long) number)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo((long) number)); assertThat(searchResponse.getHits().getAt(number - 1), matcher); } @@ -406,13 +406,13 @@ public static void assertScrollResponsesAndHitCount( responses.add(scrollResponse); int retrievedDocsCount = 0; try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); while (scrollResponse.getHits().getHits().length > 0) { scrollResponse = client.prepareSearchScroll(scrollResponse.getScrollId()).setScroll(keepAlive).get(); responses.add(scrollResponse); - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java index 52ecc40c957b7..aee344777779b 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java @@ -314,9 +314,9 @@ private AsyncSearchResponse doNext() throws Exception { assertThat(newResponse.getSearchResponse().getShardFailures().length, equalTo(numFailures)); assertNull(newResponse.getSearchResponse().getAggregations()); assertNotNull(newResponse.getSearchResponse().getHits().getTotalHits()); - assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value, equalTo(0L)); + assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value(), equalTo(0L)); assertThat( - newResponse.getSearchResponse().getHits().getTotalHits().relation, + newResponse.getSearchResponse().getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); } else { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java index 302bb68af6c61..fd4463df07a73 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java @@ -62,7 +62,7 @@ public void testFetchFailuresAllShards() throws Exception { assertEquals(10, searchResponse.getSuccessfulShards()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(0, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); @@ -106,7 +106,7 @@ public void testFetchFailuresOnlySomeShards() throws Exception { assertEquals(10, searchResponse.getTotalShards()); assertEquals(5, searchResponse.getSuccessfulShards()); assertEquals(5, searchResponse.getFailedShards()); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(5, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 1b7875e4a36b4..618489abd687e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -703,7 +703,7 @@ protected void atLeastDocsIndexed(Client client, String index, long numDocsRepli request.source(new SearchSourceBuilder().size(0)); assertResponse(client.search(request), response -> { assertNotNull(response.getHits().getTotalHits()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(numDocsReplicated)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(numDocsReplicated)); }); }, 60, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java index 0fea3c0d3b74f..1bf52b663b30f 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java @@ -274,7 +274,7 @@ private void assertHits(String index, int numDocsExpected, boolean sourceHadDele }; assertResponse(prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC).setSize(numDocsExpected), searchResponse -> { assertConsumer.accept(searchResponse, sourceHadDeletions); - assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value()); }); SearchResponse searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) .setScroll(TimeValue.timeValueMinutes(1)) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java index dea158b425071..d315f09ebda88 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java @@ -105,7 +105,7 @@ protected void searchResources(AbstractGetResourcesRequest request, TaskId paren listener.delegateFailure((l, response) -> { List docs = new ArrayList<>(); Set foundResourceIds = new HashSet<>(); - long totalHitCount = response.getHits().getTotalHits().value; + long totalHitCount = response.getHits().getTotalHits().value(); for (SearchHit hit : response.getHits().getHits()) { try ( XContentParser parser = XContentHelper.createParserNotCompressed( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java index de43f744c307b..4e5f97acacf64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java @@ -145,7 +145,7 @@ default SearchSourceBuilder buildSearch(EvaluationParameters parameters, QueryBu */ default void process(SearchResponse searchResponse) { Objects.requireNonNull(searchResponse); - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { String requiredFieldsString = String.join(", ", getRequiredFields()); throw ExceptionsHelper.badRequestException("No documents found containing all the required fields [{}]", requiredFieldsString); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java index 3154fe5999b8e..129619f6976e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java @@ -79,19 +79,19 @@ public void onResponse(SearchResponse resp) { } } - if (results.size() > resp.getHits().getTotalHits().value) { + if (results.size() > resp.getHits().getTotalHits().value()) { clearScroll.accept(lastResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + results.size() + "] than expected [" - + resp.getHits().getTotalHits().value + + resp.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) ); - } else if (results.size() == resp.getHits().getTotalHits().value) { + } else if (results.size() == resp.getHits().getTotalHits().value()) { clearScroll.accept(resp); // Finally, return the list of the entity listener.onResponse(Collections.unmodifiableList(results)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java index 0061870c73cc9..32b12c834dd9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java @@ -55,8 +55,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("took", tookInMillis); builder.startObject("total"); { - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); } builder.endObject(); builder.startArray("profiles"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java index e4a53d0a34d6a..54390365c62af 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java @@ -401,14 +401,14 @@ public void onFailure(Exception e) { try (Engine.Searcher searcher = restoredShard.acquireSearcher("test")) { assertEquals(searcher.getIndexReader().maxDoc(), seqNoStats.getLocalCheckpoint()); TopDocs search = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); search = searcher.search( new MatchAllDocsQuery(), Integer.MAX_VALUE, new Sort(new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG)), false ); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); long previous = -1; for (ScoreDoc doc : search.scoreDocs) { FieldDoc fieldDoc = (FieldDoc) doc; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java index 46b4384af0914..8433f38e40a0c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java @@ -198,7 +198,7 @@ public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, } IndexSearcher searcher = newSearcher(snapReader); TopDocs id = searcher.search(new TermQuery(new Term("id", "1")), 10); - assertEquals(0, id.totalHits.value); + assertEquals(0, id.totalHits.value()); } targetDir = newDirectory(targetDir); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java index c40dd00e0e350..6fe271d1b05e3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java @@ -96,25 +96,25 @@ public void testSearch() throws Exception { ); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); TopDocs result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(0)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value2")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(1)); // this doc has been marked as deleted: indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value3")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(0)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(0L)); + assertThat(result.totalHits.value(), equalTo(0L)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value4")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(3)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java index 692946f11fd2d..dbabc891cec6e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java @@ -215,7 +215,7 @@ public void testKnnVectors() throws Exception { // Check that we can't see fieldB assertNull(leafReader.getFloatVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); @@ -249,7 +249,7 @@ public void testKnnByteVectors() throws Exception { // Check that we can't see fieldB assertNull(leafReader.getByteVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index d3dcd7ae36f59..65d53d3adabe7 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -125,7 +125,7 @@ public void testRunner() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = searchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -176,7 +176,7 @@ public void testRunner() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -202,7 +202,7 @@ public void testRunnerGeoMatchType() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("location"), is(equalTo("POINT(10.0 10.0)"))); @@ -244,7 +244,7 @@ public void testRunnerGeoMatchType() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -286,7 +286,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("range"), is(equalTo(Map.of("lt", 10, "gt", 1)))); @@ -330,7 +330,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -376,7 +376,7 @@ public void testRunnerRangeTypeWithIpRange() throws Exception { new SearchRequest(sourceIndexName).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("subnet"), is(equalTo("10.0.0.0/8"))); @@ -421,7 +421,7 @@ public void testRunnerRangeTypeWithIpRange() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -460,7 +460,7 @@ public void testRunnerMultiSource() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -522,7 +522,7 @@ public void testRunnerMultiSource() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -564,7 +564,7 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -633,7 +633,7 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -688,7 +688,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -749,7 +749,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -943,7 +943,7 @@ public void testRunnerObjectSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -993,7 +993,7 @@ public void testRunnerObjectSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1051,7 +1051,7 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1100,7 +1100,7 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1158,7 +1158,7 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1209,7 +1209,7 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1273,7 +1273,7 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1329,7 +1329,7 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1395,7 +1395,7 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1450,7 +1450,7 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1519,7 +1519,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1580,7 +1580,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021-08-19T14:00:00Z")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(0L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); assertResponse( @@ -1590,7 +1590,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E ) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1614,7 +1614,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021/08/20 at 14:00")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); // Validate segments @@ -1657,7 +1657,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("data.field1"), is(equalTo("value1"))); @@ -1704,7 +1704,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -1736,7 +1736,7 @@ public void testRunnerWithForceMergeRetry() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -1868,7 +1868,7 @@ protected void afterRefreshEnrichIndex(ActionListener listener) { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -1901,7 +1901,7 @@ public void testRunnerWithEmptySegmentsResponse() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2007,7 +2007,7 @@ public void testRunnerWithShardFailuresInSegmentResponse() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2405,7 +2405,7 @@ public void testRunnerValidatesIndexIntegrity() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java index 00f22aca2cb92..8dbc9b0f4f43a 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java @@ -64,7 +64,7 @@ public void testExecute() throws Exception { assertThat(response.getResponses().length, equalTo(numSearches)); for (int i = 0; i < numSearches; i++) { assertThat(response.getResponses()[i].isFailure(), is(false)); - assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().size(), equalTo(1)); assertThat( response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().get("key1"), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index cd98b43adc159..5e1fde0dfb942 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -1073,7 +1073,7 @@ private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnecto final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) .toList(); - return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value); + return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value()); } private static ConnectorSearchResult hitToConnector(SearchHit searchHit) { @@ -1115,7 +1115,7 @@ private void isDataIndexNameAlreadyInUse(String indexName, String connectorId, A client.search(searchRequest, new ActionListener<>() { @Override public void onResponse(SearchResponse searchResponse) { - boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value > 0L; + boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value() > 0L; listener.onResponse(indexNameIsInUse); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 9ef895a3a5786..ce6f7f0dbf2b2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -417,7 +417,7 @@ private ConnectorSyncJobsResult mapSearchResponseToConnectorSyncJobsList(SearchR .map(ConnectorSyncJobIndexService::hitToConnectorSyncJob) .toList(); - return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value); + return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value()); } private static ConnectorSyncJobSearchResult hitToConnectorSyncJob(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index 2eec155ae8ea2..8bf4bbd5716b7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -436,7 +436,7 @@ private static QueryRulesetResult mapSearchResponseToQueryRulesetList(SearchResp final List rulesetResults = Arrays.stream(response.getHits().getHits()) .map(QueryRulesIndexService::hitToQueryRulesetListItem) .toList(); - return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value); + return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value()); } private static QueryRulesetListItem hitToQueryRulesetListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 9e8a8f750b764..30d533aeb9ae5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -416,7 +416,7 @@ private static SearchApplicationResult mapSearchResponse(SearchResponse response final List apps = Arrays.stream(response.getHits().getHits()) .map(SearchApplicationIndexService::hitToSearchApplicationListItem) .toList(); - return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value); + return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value()); } private static SearchApplicationListItem hitToSearchApplicationListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index be1d4c0871ca7..2b7b8b074fa71 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -582,8 +582,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(Fields.HITS); if (totalHits != null) { builder.startObject(Fields.TOTAL); - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); builder.endObject(); } if (events != null) { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 73c0f6d4c7685..54d83af8f5d95 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -135,7 +135,7 @@ public void testBulkOperations() throws Exception { SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true); SearchResponse searchResponse = client().search(new SearchRequest(INDEX_NAME).source(sourceBuilder)).get(); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(totalDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(totalDocs)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index bb0691c691176..6347386798975 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -418,7 +418,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -426,7 +426,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a", "b")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -434,7 +434,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("d")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -442,7 +442,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("z")), 10 ); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } }); } diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java index 081a170aac9f1..bb4464542a422 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java @@ -87,7 +87,7 @@ protected void doExecute(Task task, GetPipelineRequest request, ActionListener { - final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value); + final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value()); final Map pipelineSources = Maps.newMapWithExpectedSize(numHits); final Consumer clearScroll = (response) -> { if (response != null && response.getScrollId() != null) { @@ -148,14 +148,14 @@ private void handleFilteringSearchResponse( ActionListener listener ) { int numberOfHitsSeenSoFar = numberOfHitsSeenPreviously + searchResponse.getHits().getHits().length; - if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value) { + if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value()) { clearScroll.accept(searchResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + numberOfHitsSeenSoFar + "] than expected [" - + searchResponse.getHits().getTotalHits().value + + searchResponse.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) @@ -179,7 +179,7 @@ private void handleFilteringSearchResponse( } } - if (numberOfHitsSeenSoFar == searchResponse.getHits().getTotalHits().value) { + if (numberOfHitsSeenSoFar == searchResponse.getHits().getTotalHits().value()) { clearScroll.accept(searchResponse); listener.onResponse(new GetPipelineResponse(pipelineSources)); } else { diff --git a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java index 94d8a144b0bd6..c89d1f8493b6b 100644 --- a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java +++ b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java @@ -117,7 +117,7 @@ public void testPrefixQuery() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-A").caseInsensitive(true)), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[0].getSourceAsMap().get("version")); } ); @@ -134,7 +134,7 @@ public void testSort() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.DESC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("1.3.567#12", hits[0].getSortValues()[0]); assertEquals("1.2.3alpha", hits[1].getSortValues()[0]); @@ -150,7 +150,7 @@ public void testSort() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); var hits = response.getHits().getHits(); assertEquals("1.0.0", hits[0].getSortValues()[0]); assertEquals("1.3.0+build.1234567", hits[1].getSortValues()[0]); @@ -179,7 +179,7 @@ public void testRegexQuery() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "2.*0")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -187,21 +187,21 @@ public void testRegexQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "<0-10>.<0-10>.*al.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } ); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "1.[0-9].[0-9].*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("1.3.0+build.1234567", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); // test case sensitivity / insensitivity assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*alpha.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -211,7 +211,7 @@ public void testRegexQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*Alpha.*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -234,7 +234,7 @@ public void testFuzzyQuery() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.fuzzyQuery("version", "2.3.0")), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); assertEquals("2.a3.0", response.getHits().getHits()[2].getSourceAsMap().get("version")); @@ -288,7 +288,7 @@ public void testWildcardQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", "*Alpha*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0-alpha.2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -297,7 +297,7 @@ public void testWildcardQuery() throws Exception { private void checkWildcardQuery(String indexName, String query, String... expectedResults) { assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", query)), response -> { - assertEquals(expectedResults.length, response.getHits().getTotalHits().value); + assertEquals(expectedResults.length, response.getHits().getTotalHits().value()); for (int i = 0; i < expectedResults.length; i++) { String expected = expectedResults[i]; Object actual = response.getHits().getHits()[i].getSourceAsMap().get("version"); @@ -321,7 +321,7 @@ public void testStoreMalformed() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addDocValueField("version"), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("1.invalid.0", response.getHits().getAt(0).field("version").getValue()); @@ -359,7 +359,7 @@ public void testStoreMalformed() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("2.2.0", hits[0].getSortValues()[0]); assertEquals("", hits[1].getSortValues()[0]); @@ -437,36 +437,36 @@ public void testMultiValues() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addSort("version", SortOrder.ASC), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("2", response.getHits().getAt(1).getId()); assertEquals("3", response.getHits().getAt(2).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "3.0.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "4.alpha.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2", response.getHits().getAt(0).getId()); }); // range assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("1.5.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.5.0")), - response -> assertEquals(3, response.getHits().getTotalHits().value) + response -> assertEquals(3, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("5.0.0").to("6.0.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 8c245a4543abe..39519dc7931d0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -345,7 +345,7 @@ private void testExpiredDeletion(Float customThrottle, int numUnusedState) throw assertResponse( prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false).setTrackTotalHits(true).setSize(10000), stateDocsResponse -> { - assertThat(stateDocsResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(5L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(5L)); int nonExistingJobDocsCount = 0; List nonExistingJobExampleIds = new ArrayList<>(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 2e096f3262cb6..9864c88d1405c 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -372,7 +372,7 @@ protected long countForecastDocs(String jobId, String forecastId) { .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) .filter(QueryBuilders.termQuery(Forecast.FORECAST_ID.getPreferredName(), forecastId)) ), - searchResponse -> count.set(searchResponse.getHits().getTotalHits().value) + searchResponse -> count.set(searchResponse.getHits().getTotalHits().value()) ); return count.get(); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java index 94bc3150cb12e..5f82d996c87fa 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java @@ -77,7 +77,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t ++numStateRecords; } } - assertThat(stateDocsResponse1.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse1.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } @@ -117,7 +117,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t } } - assertThat(stateDocsResponse2.getHits().getTotalHits().value, equalTo(3L)); + assertThat(stateDocsResponse2.getHits().getTotalHits().value(), equalTo(3L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(2)); @@ -154,7 +154,7 @@ public void testPersistJobOnGracefulShutdown_givenNoDataAndTimeAdvanced() throws ++numStateRecords; } } - assertThat(stateDocsResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java index f5d0b23b437f3..8a6499ec3bb6a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java @@ -164,7 +164,7 @@ public void testSingleNumericFeatureAndMixedTrainingAndNonTrainingRows() throws + testDocsWithEmptyFeatureImportance + "] test docs with empty feature importance" + " from " - + sourceData.getHits().getTotalHits().value + + sourceData.getHits().getTotalHits().value() + " hits.\n" + badDocuments, trainingDocsWithEmptyFeatureImportance + testDocsWithEmptyFeatureImportance, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java index 260a5dea0a3c1..388583f6f8656 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java @@ -295,7 +295,7 @@ private Quantiles getQuantiles(String jobId) throws Exception { prepareSearch(".ml-state*").setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(jobId))).setSize(1), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); try ( XContentParser parser = JsonXContent.jsonXContent.createParser( XContentParserConfiguration.EMPTY, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index 8fbad7ccd3877..1505d374dfa08 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -396,7 +396,7 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti } assertResponse(prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value == docCount) { + if (searchResponse.getHits().getTotalHits().value() == docCount) { long seenCount = SearchResponseUtils.getTotalHitsValue( prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) @@ -404,7 +404,7 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti logger.debug("We stopped during analysis: [{}] < [{}]", seenCount, docCount); assertThat(seenCount, lessThan((long) docCount)); } else { - logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value, docCount); + logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value(), docCount); } }); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java index c15750de3b336..edc851def4468 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java @@ -77,7 +77,7 @@ public void testCountCorrelation() { .setSize(0) .setTrackTotalHits(true), percentilesSearch -> { - long totalHits = percentilesSearch.getHits().getTotalHits().value; + long totalHits = percentilesSearch.getHits().getTotalHits().value(); Percentiles percentiles = percentilesSearch.getAggregations().get("percentiles"); Tuple aggs = buildRangeAggAndSetExpectations( percentiles, diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java index 8fddfa47c377c..139d1b074c7b2 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java @@ -192,7 +192,7 @@ private boolean doesLocalAuditMessageExist(String message) { .setQuery(new MatchPhraseQueryBuilder("message", message)) .get(); try { - return response.getHits().getTotalHits().value > 0; + return response.getHits().getTotalHits().value() > 0; } finally { response.decRef(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 17fe20c5115ff..dfb960794537b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -767,7 +767,7 @@ private static DataCounts getDataCountsFromIndex(String jobId) throws IOExceptio prepareSearch().setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .setQuery(QueryBuilders.idsQuery().addIds(DataCounts.documentId(jobId))), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { setOnce.set(new DataCounts(jobId)); return; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index a47b67e490851..210973f2601d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -189,7 +189,7 @@ private void initChunkedBucketSearcher( ML_ORIGIN, searchRequest, ActionListener.wrap(searchResponse -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); if (totalHits > 0) { InternalAggregations aggregations = searchResponse.getAggregations(); Min min = aggregations.get(EARLIEST_TIME); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 6aaa1e50f2e8a..d676e6cc9d065 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -415,7 +415,7 @@ private void checkModelIdAgainstTags(String modelId, ActionListener listen ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_MODEL_ID_AND_TAGS_UNIQUE, modelId)) ); @@ -443,7 +443,7 @@ private void checkTagsAgainstModelIds(List tags, ActionListener li ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_TAGS_AND_MODEL_IDS_UNIQUE, tags)) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 9db8a72f0bb14..4161006c438b1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -433,7 +433,7 @@ private static void checkDestIndexIsEmptyIfExists( TransportSearchAction.TYPE, destEmptySearch, ActionListener.wrap(searchResponse -> { - if (searchResponse.getHits().getTotalHits().value > 0) { + if (searchResponse.getHits().getTotalHits().value() > 0) { listener.onFailure(ExceptionsHelper.badRequestException("dest index [{}] must be empty", destIndex)); } else { listener.onResponse(startContext); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java index f0e03a1e94973..7c41dbd463413 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java @@ -62,7 +62,7 @@ public static DataExtractor.DataSummary getDataSummary(SearchResponse searchResp } else { Long earliestTime = toLongIfFinite((aggregations.get(EARLIEST_TIME)).value()); Long latestTime = toLongIfFinite((aggregations.get(LATEST_TIME)).value()); - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); return new DataExtractor.DataSummary(earliestTime, latestTime, totalHits); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index 20da61a3d6910..7829adb395675 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -226,7 +226,7 @@ public void findDatafeedIdsForJobIds(Collection jobIds, ActionListenerdelegateFailureAndWrap((delegate, response) -> { Set datafeedIds = new HashSet<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { @@ -259,7 +259,7 @@ public void findDatafeedsByJobIds( listener.delegateFailureAndWrap((delegate, response) -> { Map datafeedsByJobId = new HashMap<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { DatafeedConfig.Builder builder = parseLenientlyFromSource(hit.getSourceRef()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index c890ab599c380..315d2249d00cb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -377,7 +377,7 @@ public DataSummary collectDataSummary() { SearchRequestBuilder searchRequestBuilder = buildDataSummarySearchRequestBuilder(); SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); try { - long rows = searchResponse.getHits().getTotalHits().value; + long rows = searchResponse.getHits().getTotalHits().value(); LOGGER.debug(() -> format("[%s] Data summary rows [%s]", context.jobId, rows)); return new DataSummary(rows, organicFeatures.length + processedFeatures.length); } finally { @@ -396,7 +396,7 @@ public void collectDataSummaryAsync(ActionListener dataSummaryActio TransportSearchAction.TYPE, searchRequestBuilder.request(), dataSummaryActionListener.delegateFailureAndWrap( - (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields)) + (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value(), numberOfFields)) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index dfcc12d98be41..64cf493028ad1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -169,7 +169,7 @@ private InferenceState restoreInferenceState() { ); try { Max maxIncrementalIdAgg = searchResponse.getAggregations().get(DestinationIndex.INCREMENTAL_ID); - long processedTestDocCount = searchResponse.getHits().getTotalHits().value; + long processedTestDocCount = searchResponse.getHits().getTotalHits().value(); Long lastIncrementalId = processedTestDocCount == 0 ? null : (long) maxIncrementalIdAgg.value(); if (lastIncrementalId != null) { LOGGER.debug( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java index 482e82f9ec303..fdd4bdd120f6a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java @@ -115,7 +115,7 @@ private void searchIfTestDocsExist(ActionListener listener) { ML_ORIGIN, TransportSearchAction.TYPE, searchRequest, - listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value > 0)) + listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value() > 0)) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java index 3ef2affa5d399..0b3dd573deaae 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java @@ -70,7 +70,7 @@ private TrainTestSplitter createSingleClassSplitter(Regression regression) { regression.getDependentVariable(), regression.getTrainingPercent(), regression.getRandomizeSeed(), - searchResponse.getHits().getTotalHits().value + searchResponse.getHits().getTotalHits().value() ); } finally { searchResponse.decRef(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java index f56c589aea19a..c4396c4f9d2c8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java @@ -196,7 +196,7 @@ private void doSearch( numDocsWritten += searchResponse.getHits().getHits().length; boolean endOfSearch = searchResponse.getHits().getHits().length < searchSize - || searchResponse.getHits().getTotalHits().value == numDocsWritten; + || searchResponse.getHits().getTotalHits().value() == numDocsWritten; if (endOfSearch) { successConsumer.accept(Boolean.TRUE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index f493c735d87ea..ff5f37427b18f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -1008,7 +1008,7 @@ public void expandIds( ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - long totalHitCount = response.getHits().getTotalHits().value + foundResourceIds.size(); + long totalHitCount = response.getHits().getTotalHits().value() + foundResourceIds.size(); Set foundFromDocs = new HashSet<>(); for (SearchHit hit : response.getHits().getHits()) { Map docSource = hit.getSourceAsMap(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index 8493513f40bd6..df9a187f59616 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -683,7 +683,7 @@ public void groupExists(String groupId, ActionListener listener) { ML_ORIGIN, searchRequest, ActionListener.wrap( - response -> listener.onResponse(response.getHits().getTotalHits().value > 0), + response -> listener.onResponse(response.getHits().getTotalHits().value() > 0), listener::onFailure ), client::search diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index b9cc1902b7ab6..0f3abe3ab8c20 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -352,7 +352,7 @@ public void deleteJobDocuments( } } SearchResponse searchResponse = item.getResponse(); - if (searchResponse.getHits().getTotalHits().value > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { + if (searchResponse.getHits().getTotalHits().value() > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { needToRunDBQTemp = true; } else { indicesToDelete.add(indexNames.get()[i]); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index f9e4e62e4e3bc..51b3e0b55d75b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -870,7 +870,7 @@ public void buckets( throw QueryPage.emptyQueryPage(Bucket.RESULTS_FIELD); } - QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, Bucket.RESULTS_FIELD); + QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), Bucket.RESULTS_FIELD); if (query.isExpand()) { Iterator bucketsToExpand = buckets.results() @@ -1086,7 +1086,7 @@ public void categoryDefinitions( } QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), CategoryDefinition.RESULTS_FIELD ); handler.accept(result); @@ -1143,7 +1143,7 @@ public void records( } QueryPage queryPage = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), AnomalyRecord.RESULTS_FIELD ); handler.accept(queryPage); @@ -1207,7 +1207,7 @@ public void influencers( } QueryPage result = new QueryPage<>( influencers, - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Influencer.RESULTS_FIELD ); handler.accept(result); @@ -1375,7 +1375,7 @@ private void modelSnapshots( QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), ModelSnapshot.RESULTS_FIELD ); handler.accept(result); @@ -1411,7 +1411,7 @@ public QueryPage modelPlot(String jobId, int from, int size) { } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1444,7 +1444,7 @@ public QueryPage categorizerStats(String jobId, int from, int } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1700,7 +1700,7 @@ public void scheduledEvents(ScheduledEventsQueryBuilder query, ActionListener(events, response.getHits().getTotalHits().value, ScheduledEvent.RESULTS_FIELD)); + handler.onResponse(new QueryPage<>(events, response.getHits().getTotalHits().value(), ScheduledEvent.RESULTS_FIELD)); } catch (Exception e) { handler.onFailure(e); } @@ -1901,7 +1901,7 @@ public void calendars(CalendarQueryBuilder queryBuilder, ActionListener(calendars, response.getHits().getTotalHits().value, Calendar.RESULTS_FIELD)); + listener.onResponse(new QueryPage<>(calendars, response.getHits().getTotalHits().value(), Calendar.RESULTS_FIELD)); } catch (Exception e) { listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 886c19a65a4d0..194759c026a30 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -168,7 +168,7 @@ private List findForecastsToDelete(SearchResponse searchResponse) List forecastsToDelete = new ArrayList<>(); SearchHits hits = searchResponse.getHits(); - if (hits.getTotalHits().value > MAX_FORECASTS) { + if (hits.getTotalHits().value() > MAX_FORECASTS) { LOGGER.info("More than [{}] forecasts were found. This run will only delete [{}] of them", MAX_FORECASTS, MAX_FORECASTS); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java index 86488a647baa1..ef6087f021e9d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java @@ -111,7 +111,7 @@ private SearchResponse initScroll() { ); SearchResponse searchResponse = client.search(searchRequest).actionGet(); - totalHits = searchResponse.getHits().getTotalHits().value; + totalHits = searchResponse.getHits().getTotalHits().value(); scrollId = searchResponse.getScrollId(); return searchResponse; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java index f63f6e0549179..802bcaf3b342e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java @@ -110,7 +110,7 @@ public Deque next() { SearchResponse searchResponse = doSearch(searchAfterFields()); try { if (trackTotalHits && totalHits.get() == 0) { - totalHits.set(searchResponse.getHits().getTotalHits().value); + totalHits.set(searchResponse.getHits().getTotalHits().value()); } return mapHits(searchResponse); } finally { diff --git a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index daea70abd29e3..7ddaa53a59914 100644 --- a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -149,7 +149,7 @@ public void testMonitoringBulk() throws Exception { assertResponse(client().prepareSearch(".monitoring-" + system.getSystem() + "-" + TEMPLATE_VERSION + "-*"), response -> { // exactly 3 results are expected - assertThat("No monitoring documents yet", response.getHits().getTotalHits().value, equalTo(3L)); + assertThat("No monitoring documents yet", response.getHits().getTotalHits().value(), equalTo(3L)); final List> sources = Arrays.stream(response.getHits().getHits()) .map(SearchHit::getSourceAsMap) @@ -165,7 +165,7 @@ public void testMonitoringBulk() throws Exception { assertCheckedResponse(client().prepareSearch(monitoringIndex), response -> { final SearchHits hits = response.getHits(); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat( "Monitoring documents must have the same timestamp", Arrays.stream(hits.getHits()).map(hit -> extractValue("timestamp", hit.getSourceAsMap())).distinct().count(), diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index 93e055b58ddc3..d68395ef7656f 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -113,7 +113,7 @@ public void testExport() throws Exception { assertResponse( prepareSearch(".monitoring-*"), - response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value)) + response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value())) ); }); @@ -260,7 +260,7 @@ private void checkMonitoringDocs() { DateFormatter dateFormatter = DateFormatter.forPattern(customTimeFormat).withZone(ZoneOffset.UTC); assertResponse(prepareSearch(".monitoring-*").setSize(100), rsp -> { - assertThat(rsp.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(rsp.getHits().getTotalHits().value(), greaterThan(0L)); for (SearchHit hit : rsp.getHits().getHits()) { final Map source = hit.getSourceAsMap(); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index d6e15ea25c8e1..d382905c1c9c2 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -293,12 +293,14 @@ private void assertNoWatchesExist() { .query(QueryBuilders.matchQuery("metadata.xpack.cluster_uuid", clusterUUID)); assertResponse(prepareSearch(".watches").setSource(searchSource), response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { List invalidWatches = new ArrayList<>(); for (SearchHit hit : response.getHits().getHits()) { invalidWatches.add(ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap())); } - fail("Found [" + response.getHits().getTotalHits().value + "] invalid watches when none were expected: " + invalidWatches); + fail( + "Found [" + response.getHits().getTotalHits().value() + "] invalid watches when none were expected: " + invalidWatches + ); } }); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java index 48673d2002170..f447f67b4cdd2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java @@ -179,7 +179,7 @@ private void searchProfilingEvents( .setQuery(request.getQuery()) .setTrackTotalHits(true) .execute(ActionListener.wrap(searchResponse -> { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); EventsIndex resampledIndex = mediumDownsampled.getResampledIndex(request.getSampleSize(), sampleCount); log.debug( "User requested [{}] samples, [{}] samples matched in [{}]. Picking [{}]", @@ -220,7 +220,7 @@ private void searchGenericEvents( .setPreference(String.valueOf(request.hashCode())) .setQuery(request.getQuery()) .execute(ActionListener.wrap(searchResponse -> { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); int requestedSampleCount = request.getSampleSize(); // random sampler aggregation does not support sampling rates between 0.5 and 1.0 -> clamp to 1.0 if (sampleCount <= requestedSampleCount * 2L) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java index 9dd46e778fb9a..dbb4cf4dc6856 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java @@ -180,7 +180,7 @@ private void execute(ClusterState state, ActionListener { - boolean hasData = searchResponse.getHits().getTotalHits().value > 0; + boolean hasData = searchResponse.getHits().getTotalHits().value() > 0; listener.onResponse( new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, resourcesCreated, anyPre891Data, hasData) ); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java index b501967524a6b..29c471296b5d1 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java @@ -218,7 +218,7 @@ public void testMultipleOnlyKnn() { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -355,7 +355,7 @@ public void testBM25AndKnnWithBucketAggregation() { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -483,7 +483,7 @@ public void testBM25AndMultipleKnnWithAggregation() { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java index 7269d9c3e5e7f..ed26aa50ffa62 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java @@ -217,7 +217,7 @@ public void testMultipleOnlyKnn() { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -356,7 +356,7 @@ public void testBM25AndKnnWithBucketAggregation() { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -486,7 +486,7 @@ public void testBM25AndMultipleKnnWithAggregation() { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java index 2a17a4a1152cf..8df4e3a8dbea5 100644 --- a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java +++ b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java @@ -120,7 +120,7 @@ private void assertPinnedPromotions(PinnedQueryBuilder pqb, LinkedHashSet { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, lessThanOrEqualTo((long) numRelevantDocs + pins.size())); // Check pins are sorted by increasing score, (unlike organic, there are no duplicate scores) @@ -193,7 +193,7 @@ public void testExhaustiveScoring() throws Exception { private void assertExhaustiveScoring(PinnedQueryBuilder pqb) { assertResponse(prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSearchType(DFS_QUERY_THEN_FETCH), response -> { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, equalTo(2L)); }); } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index faf41e7e655a8..eab73fbe5ad04 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -305,7 +305,7 @@ public void testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQuerying assertThat(newSearchResponse.getSuccessfulShards(), equalTo(totalShards)); assertThat(newSearchResponse.getFailedShards(), equalTo(0)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo((long) numDocsWithinRange)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo((long) numDocsWithinRange)); }); // test with SearchShardsAPI @@ -655,7 +655,7 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() assertThat(searchResponse.getFailedShards(), equalTo(indexOutsideSearchRangeShardCount)); assertThat(searchResponse.getSkippedShards(), equalTo(searchableSnapshotShardCount)); assertThat(searchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -736,7 +736,7 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() // a shard that's available in order to construct the search response assertThat(newSearchResponse.getSkippedShards(), equalTo(totalShards - 1)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); }); @@ -850,7 +850,7 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo SearchResponse response = client().search(request).actionGet(); logger.info( "[TEST DEBUG INFO] Search hits: {} Successful shards: {}, failed shards: {}, skipped shards: {}, total shards: {}", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), response.getSuccessfulShards(), response.getFailedShards(), response.getSkippedShards(), diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java index 7615723860cff..9888afdd16499 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java @@ -79,7 +79,7 @@ public void testSearchableSnapshotRelocationDoNotUseSnapshotBasedRecoveries() th ensureGreen(restoredIndexName); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mockLog.assertAllExpectationsMatched(); } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java index a3da932398fb1..1e76477378da2 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java @@ -66,7 +66,7 @@ public void testRepositoryUsedBySearchableSnapshotCanBeUpdatedButNotUnregistered Storage storage = randomFrom(Storage.values()); String restoredIndexName = (storage == Storage.FULL_COPY ? "fully-mounted-" : "partially-mounted-") + indexName + '-' + i; mountSnapshot(repositoryName, snapshotName, indexName, restoredIndexName, Settings.EMPTY, storage); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mountedIndices[i] = restoredIndexName; } @@ -183,7 +183,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); final String mountedAgain = randomValueOtherThan(mounted, () -> randomAlphaOfLength(10).toLowerCase(Locale.ROOT)); final SnapshotRestoreException exception = expectThrows( @@ -208,7 +208,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value()); assertAcked(indicesAdmin().prepareDelete(mountedAgain)); assertAcked(indicesAdmin().prepareDelete(mounted)); @@ -240,7 +240,7 @@ public void testDeletionOfSnapshotSettingCannotBeUpdated() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); if (randomBoolean()) { assertAcked(indicesAdmin().prepareClose(mounted)); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 40b7e08936fa3..7eaf5d8f060c6 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -179,7 +179,7 @@ public void testCleanUpAfterIndicesAreDeleted() throws Exception { ) .setSize(0), res -> { - final long remainingEntriesInCache = res.getHits().getTotalHits().value; + final long remainingEntriesInCache = res.getHits().getTotalHits().value(); if (indicesToDelete.contains(mountedIndex)) { assertThat(remainingEntriesInCache, equalTo(0L)); } else if (snapshotId.equals(SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings))) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index a21e3e6beabce..21e67212f1f51 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -549,7 +549,7 @@ public void run() { try (listeners) { executeSearch(new SearchRequest().source(getSearchSourceBuilder().trackTotalHits(true)), (searchResponse, refs) -> { assert total.get() == 0L; - total.set(searchResponse.getHits().getTotalHits().value); + total.set(searchResponse.getHits().getTotalHits().value()); handleSearchResponse(searchResponse, refs); }); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java index 6ffa09dc1f265..6d9110b564862 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java @@ -224,7 +224,7 @@ protected void assertSearchResponseContainsEmptyResult(Response response) { assertOK(response); SearchResponse searchResponse = SearchResponseUtils.responseAsSearchResponse(response); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java index 767452e6fcae7..4b994ce82d92f 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java @@ -282,7 +282,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr searchResponse = SearchResponseUtils.parseSearchResponse(parser); } try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java index d5e77c1694640..1602a097b1b08 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java @@ -362,7 +362,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr assertOK(response); final SearchResponse searchResponse = SearchResponseUtils.parseSearchResponse(responseAsParser(response)); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java index 8b18359fb8310..1345e275fab17 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java @@ -119,7 +119,7 @@ public void testCrossClusterSearch() throws Exception { responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index 69331fa448113..4cbd1cab21af9 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -419,7 +419,7 @@ public void testCrossClusterSearch() throws Exception { responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java index 505b82b39b960..53c622898476a 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java @@ -230,7 +230,7 @@ public void testAnonymousUserFromQueryClusterWorks() throws Exception { Arrays.stream(searchResponse5.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toList()), containsInAnyOrder(".security-7") ); - assertThat(searchResponse5.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(searchResponse5.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); } finally { searchResponse5.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java index 3871029b3b44b..6fa3ef1b4ef63 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java @@ -134,7 +134,7 @@ public void testCrossClusterScrollWithSniffModeWhenSomeRemoteNodesAreNotDirectly final Request scrollRequest = new Request("GET", "/_search/scroll"); final String scrollId; try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics")); documentFieldValues.add(searchResponse.getHits().getHits()[0].getSourceAsMap().get("name")); scrollId = searchResponse.getScrollId(); @@ -153,7 +153,7 @@ public void testCrossClusterScrollWithSniffModeWhenSomeRemoteNodesAreNotDirectly responseAsParser(performRequestWithRemoteMetricUser(scrollRequest)) ); try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat( Arrays.stream(scrollResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics") diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java index b1a76a4559812..9a1d653132d2d 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java @@ -81,7 +81,7 @@ public void testDateMathExpressionsCanBeAuthorized() throws Exception { assertResponse( client.prepareMultiSearch().add(client.prepareSearch(expression).setQuery(QueryBuilders.matchAllQuery()).request()), - multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)) + multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)) ); UpdateResponse updateResponse = client.prepareUpdate(expression, response.getId()) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index b0572b265a45b..a5f827c2a4b53 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -437,7 +437,7 @@ private void assertSearchResponse(SearchRequestBuilder requestBuilder, Set assertResponse(prepareSearch("alias" + role), searchResponse2 -> { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(searchResponse2.getHits().getTotalHits().value)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(searchResponse2.getHits().getTotalHits().value())); for (int hitI = 0; hitI < searchResponse1.getHits().getHits().length; hitI++) { assertThat(searchResponse1.getHits().getAt(hitI).getId(), equalTo(searchResponse2.getHits().getAt(hitI).getId())); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index c0866fa7ea694..87ca7d279c709 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -474,13 +474,13 @@ public void testMSearch() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -495,13 +495,13 @@ public void testMSearch() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); @@ -522,7 +522,7 @@ public void testMSearch() throws Exception { ), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -531,7 +531,7 @@ public void testMSearch() throws Exception { assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -898,7 +898,7 @@ public void testKnnSearch() throws Exception { .addFetchField("field1") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field1")); @@ -914,7 +914,7 @@ public void testKnnSearch() throws Exception { .addFetchField("field2") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field2")); @@ -929,7 +929,7 @@ public void testKnnSearch() throws Exception { .setQuery(query) .setSize(10), response -> { - assertEquals(10, response.getHits().getTotalHits().value); + assertEquals(10, response.getHits().getTotalHits().value()); assertEquals(10, response.getHits().getHits().length); } ); @@ -1265,7 +1265,7 @@ public void testScroll() throws Exception { .get(); do { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1325,7 +1325,7 @@ public void testReaderId() throws Exception { .setQuery(termQuery("field1", "value1")) .get(); assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java index 34eecd57b53d5..01020a428c318 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java @@ -208,7 +208,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field1", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -231,7 +231,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field2", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -254,7 +254,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field3", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index bffa53b1f4da6..66c8c0a5b1b52 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -956,10 +956,10 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } @@ -975,10 +975,10 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); } @@ -993,11 +993,11 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1013,9 +1013,9 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); } ); @@ -1029,12 +1029,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1051,12 +1051,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1073,12 +1073,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1095,11 +1095,11 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1132,7 +1132,7 @@ public void testScroll() throws Exception { .get(); do { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1191,7 +1191,7 @@ public void testPointInTimeId() throws Exception { .setQuery(constantScoreQuery(termQuery("field1", "value1"))) .setFetchSource(true), response -> { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1281,7 +1281,7 @@ public void testScrollWithQueryCache() { .setSize(1) .setFetchSource(true) .get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); } else { user2SearchResponse.decRef(); @@ -1289,7 +1289,7 @@ public void testScrollWithQueryCache() { user2SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)) ).prepareSearchScroll(user2SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); if (randomBoolean()) { // maybe reuse the scroll even if empty @@ -1309,7 +1309,7 @@ public void testScrollWithQueryCache() { .setSize(1) .setFetchSource(true) .get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1319,7 +1319,7 @@ public void testScrollWithQueryCache() { user1SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ).prepareSearchScroll(user1SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); if (scrolledDocsUser1 < numDocs) { assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); @@ -2042,7 +2042,7 @@ private void verifyParentChild() { .setQuery(hasChildQuery("child", termQuery("field1", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -2061,7 +2061,7 @@ private void verifyParentChild() { .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index d4375d15e6a6d..7d99d5817bdc0 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -103,20 +103,20 @@ public void testSearchAndMSearch() throws Exception { indexRandom(true, prepareIndex(index).setSource(field, "bar")); assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), response -> { - final long hits = response.getHits().getTotalHits().value; + final long hits = response.getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); assertResponse( client().filterWithHeader( singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), - response2 -> assertEquals(response2.getHits().getTotalHits().value, hits) + response2 -> assertEquals(response2.getHits().getTotalHits().value(), hits) ); final long multiHits; MultiSearchResponse multiSearchResponse = client().prepareMultiSearch() .add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) .get(); try { - multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; + multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); } finally { multiSearchResponse.decRef(); @@ -125,7 +125,7 @@ public void testSearchAndMSearch() throws Exception { singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); try { - assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); + assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), multiHits); } finally { multiSearchResponse.decRef(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java index af54f71779f08..6f8ea0f103a56 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java @@ -312,7 +312,7 @@ public void testMultiNamesWorkCorrectly() { assertResponse( userAClient.prepareSearch("alias1").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); final ElasticsearchSecurityException e1 = expectThrows( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 78146e58e91e2..e178f4bf3eb6c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -343,7 +343,7 @@ private void testAddUserAndRoleThenAuth(String username, String roleName) { String token = basicAuthHeaderValue(username, new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); assertClusterHealthOnlyAuthorizesWhenAnonymousRoleActive(token); @@ -366,7 +366,7 @@ public void testUpdatingUserAndAuthentication() throws Exception { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); preparePutUser("joe", "s3krit-password2", hasher, SecuritySettingsSource.TEST_ROLE).get(); @@ -382,7 +382,7 @@ public void testUpdatingUserAndAuthentication() throws Exception { token = basicAuthHeaderValue("joe", new SecureString("s3krit-password2")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); } @@ -403,7 +403,7 @@ public void testCreateDeleteAuthenticate() { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); DeleteUserResponse response = new DeleteUserRequestBuilder(client()).username("joe").get(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index f34983f7f125c..0acc281dd8440 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -213,7 +213,7 @@ public void testMultiSearchUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -231,7 +231,7 @@ public void testMultiSearchUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -249,7 +249,7 @@ public void testMultiSearchMissingUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -267,7 +267,7 @@ public void testMultiSearchMissingUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -317,7 +317,7 @@ public void testMultiSearchWildcard() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); } @@ -336,7 +336,7 @@ public void testMultiSearchWildcard() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java index 1b62c79236a9c..83d47a657ac8a 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java @@ -48,13 +48,13 @@ public void testScrollIsPerUser() throws Exception { indexRandom(true, docs); assertResponse(prepareSearch("foo").setScroll(TimeValue.timeValueSeconds(5L)).setQuery(matchAllQuery()).setSize(1), response -> { - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); if (randomBoolean()) { assertResponse( client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueSeconds(5L)), response2 -> { - assertEquals(numDocs, response2.getHits().getTotalHits().value); + assertEquals(numDocs, response2.getHits().getTotalHits().value()); assertEquals(1, response2.getHits().getHits().length); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java index 4b8fbfd41acdf..437fb76351176 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java @@ -856,7 +856,7 @@ private SuggestProfilesResponse.ProfileHit[] doSuggest(Set dataKeys, Str final SuggestProfilesRequest suggestProfilesRequest = new SuggestProfilesRequest(dataKeys, name, 10, hint); final SuggestProfilesResponse suggestProfilesResponse = client().execute(SuggestProfilesAction.INSTANCE, suggestProfilesRequest) .actionGet(); - assertThat(suggestProfilesResponse.getTotalHits().relation, is(TotalHits.Relation.EQUAL_TO)); + assertThat(suggestProfilesResponse.getTotalHits().relation(), is(TotalHits.Relation.EQUAL_TO)); return suggestProfilesResponse.getProfileHits(); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 90566e25b4ea5..0a8947c49c606 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -2254,7 +2254,7 @@ public void queryApiKeys(SearchRequest searchRequest, boolean withLimitedBy, Act TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No api keys found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryApiKeysResult.EMPTY); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 698cda1683a20..bf09dff315c2d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -179,7 +179,7 @@ public void queryUsers(SearchRequest searchRequest, ActionListener { - final long total = searchResponse.getHits().getTotalHits().value; + final long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No users found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryUserResults.EMPTY); @@ -214,7 +214,7 @@ void getUserCount(final ActionListener listener) { .setSize(0) .setTrackTotalHits(true) .request(), - listener.safeMap(response -> response.getHits().getTotalHits().value), + listener.safeMap(response -> response.getHits().getTotalHits().value()), client::search ) ); @@ -706,7 +706,7 @@ void getAllReservedUserInfo(ActionListener> listen @Override public void onResponse(SearchResponse searchResponse) { Map userInfos = new HashMap<>(); - assert searchResponse.getHits().getTotalHits().value <= 10 + assert searchResponse.getHits().getTotalHits().value() <= 10 : "there are more than 10 reserved users we need to change this to retrieve them all!"; for (SearchHit searchHit : searchResponse.getHits().getHits()) { Map sourceMap = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 9ddda193dba39..52b39e2aae694 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -278,7 +278,7 @@ public void queryRoleDescriptors(SearchSourceBuilder searchSourceBuilder, Action TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No roles found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryRoleResult.EMPTY); @@ -730,28 +730,28 @@ public void onResponse(MultiSearchResponse items) { if (responses[0].isFailure()) { usageStats.put("size", 0); } else { - usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value); + usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value()); } if (responses[1].isFailure()) { usageStats.put("fls", false); } else { - usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[2].isFailure()) { usageStats.put("dls", false); } else { - usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[3].isFailure()) { usageStats.put("remote_indices", 0); } else { - usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value()); } if (responses[4].isFailure()) { usageStats.put("remote_cluster", 0); } else { - usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value()); } delegate.onResponse(usageStats); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index dd2377ec773c4..5461311196617 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -414,19 +414,19 @@ public void usageStats(ActionListener> listener) { logger.debug("error on counting total profiles", items[0].getFailure()); usage.put("total", 0L); } else { - usage.put("total", items[0].getResponse().getHits().getTotalHits().value); + usage.put("total", items[0].getResponse().getHits().getTotalHits().value()); } if (items[1].isFailure()) { logger.debug("error on counting enabled profiles", items[0].getFailure()); usage.put("enabled", 0L); } else { - usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value); + usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value()); } if (items[2].isFailure()) { logger.debug("error on counting recent profiles", items[0].getFailure()); usage.put("recent", 0L); } else { - usage.put("recent", items[2].getResponse().getHits().getTotalHits().value); + usage.put("recent", items[2].getResponse().getHits().getTotalHits().value()); } listener.onResponse(usage); }, listener::onFailure) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java index 5ec76a8dc3d01..5cd8cba763d3d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java @@ -77,8 +77,8 @@ public void migrate(SecurityIndexManager indexManager, Client client, ActionList client.search(countRequest, ActionListener.wrap(response -> { // If there are no roles, skip migration - if (response.getHits().getTotalHits().value > 0) { - logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value + "] roles"); + if (response.getHits().getTotalHits().value() > 0) { + logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value() + "] roles"); updateRolesByQuery(indexManager, client, filterQuery, listener); } else { listener.onResponse(null); diff --git a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index d42d45e430627..e5171a7c51650 100644 --- a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -281,7 +281,7 @@ public void testRetentionWhileSnapshotInProgress() throws Exception { completedSnapshotName, Strings.arrayToCommaDelimitedString(resp.getHits().getHits()) ); - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); } ); }); diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index d1eaff1bef1b2..9b67fa0a893d2 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -1595,7 +1595,7 @@ private void assertDocumentsAreEqual(String indexName, int docCount) { int docIdToMatch = randomIntBetween(0, docCount - 1); assertResponse(searchRequestBuilder.setQuery(QueryBuilders.termQuery("field", docIdToMatch)), searchResponse -> { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = searchResponse.getHits().getAt(0); Map source = searchHit.getSourceAsMap(); assertThat(source, is(notNullValue())); @@ -1613,7 +1613,7 @@ private void assertDocumentsAreEqual(String indexName, int docCount) { private void assertSearchResponseContainsAllIndexedDocs(SearchResponse searchResponse, long docCount) { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docCount)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docCount)); for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { SearchHit searchHit = searchResponse.getHits().getAt(i); Map source = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java index 3c64d140e2b56..e7b9156d5fb66 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java @@ -129,7 +129,7 @@ public void testKnownIssueWithCellLeftOfDatelineTouchingPolygonOnRightOfDateline client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder), innerResponse -> assertThat( "Bucket " + bucket.getKeyAsString(), - innerResponse.getHits().getTotalHits().value, + innerResponse.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); @@ -320,7 +320,7 @@ private void assertQuery(List buckets, BiFunction assertThat( "Expected hits at precision " + precision + " for H3 cell " + bucket.getKeyAsString(), - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java index b4a3a07502abf..b4d7a472591bd 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java @@ -175,7 +175,7 @@ public void testStorePolygonDateLine() throws Exception { indexRandom(true, prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); assertNoFailuresAndResponse(client().prepareSearch("test").setFetchSource(false).addStoredField("shape"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.field("shape").getValue(), instanceOf(BytesRef.class)); BytesRef bytesRef = searchHit.field("shape").getValue(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java index 1c013aba52261..4f23b6de4c37d 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java @@ -247,7 +247,7 @@ public void testFieldAlias() { assertResponse( client().prepareSearch(INDEX).setQuery(new ShapeQueryBuilder("alias", queryGeometry).relation(ShapeRelation.INTERSECTS)), response -> { - assertTrue(response.getHits().getTotalHits().value > 0); + assertTrue(response.getHits().getTotalHits().value() > 0); } ); } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java index 1ac6bf3b6fd31..e26066cd89c50 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java @@ -112,7 +112,7 @@ public void testIndexPointsFilterRectangle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -123,7 +123,7 @@ public void testIndexPointsFilterRectangle() { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -138,7 +138,7 @@ public void testIndexPointsCircle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, circle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -154,7 +154,7 @@ public void testIndexPointsPolygon() { .setQuery(new ShapeQueryBuilder(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), response -> { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(searchHits.getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); } @@ -175,7 +175,7 @@ public void testIndexPointsMultiPolygon() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, mp).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("3"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("3"))); @@ -191,7 +191,7 @@ public void testIndexPointsRectangle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -232,7 +232,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .indexedShapePath(indexedShapePath) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java index 66f5597be543e..2713afc149e05 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java @@ -230,8 +230,8 @@ public void testGeoShapeQueryAcrossDateline() throws IOException { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value(), equalTo(1L)); } } } @@ -261,8 +261,8 @@ public void testShapeQuery() throws IOException { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(centerPointQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(centerPointQuery, 1).totalHits.value(), equalTo(1L)); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 9cf60ec3bb2e4..d30bc8f99cc34 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -224,7 +224,7 @@ protected static void logSearchResponse(SearchResponse response, Logger logger) } var totalHits = response.getHits().getTotalHits(); - var hits = totalHits != null ? "hits " + totalHits.relation + " " + totalHits.value + ", " : ""; + var hits = totalHits != null ? "hits " + totalHits.relation() + " " + totalHits.value() + ", " : ""; logger.trace( "Got search response [{}{} aggregations: [{}], {} failed shards, {} skipped shards, " + "{} successful shards, {} total shards, took {}, timed out [{}]]", @@ -549,7 +549,7 @@ protected List initBucketExtractors(SearchResponse response) { List exts = new ArrayList<>(refs.size()); TotalHits totalHits = response.getHits().getTotalHits(); - ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value); + ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value()); for (QueryContainer.FieldInfo ref : refs) { exts.add(createExtractor(ref.extraction(), totalCount)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java index 78976ea7e83c0..cf52a5f5d7126 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java @@ -76,7 +76,7 @@ public Object extract(Bucket bucket) { throw new SqlIllegalArgumentException("Cannot find an aggregation named {}", name); } - if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value == 0) { + if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value() == 0) { return null; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java index 8ee23e38f9ffe..0ba29fef8e06d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java @@ -123,7 +123,7 @@ protected void masterOperation( Arrays.toString(transformCountSuccess.getShardFailures()) ); } - long totalTransforms = transformCountSuccess.getHits().getTotalHits().value; + long totalTransforms = transformCountSuccess.getHits().getTotalHits().value(); if (totalTransforms == 0) { var usage = new TransformFeatureSetUsage(transformsCountByState, Collections.emptyMap(), new TransformIndexerStats()); listener.onResponse(new XPackUsageFeatureResponse(usage)); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java index f49d5fc96f3ab..cd06a4cadaa37 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java @@ -84,7 +84,7 @@ public void sourceHasChanged(TransformCheckpoint lastCheckpoint, ActionListener< client, TransportSearchAction.TYPE, searchRequest, - ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value > 0L), listener::onFailure) + ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value() > 0L), listener::onFailure) ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java index ffc4b48f9cc30..9d5175922c892 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java @@ -509,7 +509,7 @@ public void expandTransformIds( final ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(idTokens, allowNoMatch); executeAsyncWithOrigin(request, foundConfigsListener.delegateFailureAndWrap((l, searchResponse) -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); // important: preserve order Set ids = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); Set configs = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); @@ -589,7 +589,7 @@ public void resetTransform(String transformId, ActionListener listener) .trackTotalHitsUpTo(1) ); executeAsyncWithOrigin(TransportSearchAction.TYPE, searchRequest, deleteListener.delegateFailureAndWrap((l, searchResponse) -> { - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { listener.onFailure( new ResourceNotFoundException(TransformMessages.getMessage(TransformMessages.REST_UNKNOWN_TRANSFORM, transformId)) ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java index 23bab56de5ec9..2de810b2b902d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java @@ -207,7 +207,7 @@ private SearchRequest buildSearchRequestForValidation(String logId, SourceConfig @Override public void getInitialProgressFromResponse(SearchResponse response, ActionListener progressListener) { - progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value, 0L, 0L)); + progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value(), 0L, 0L)); } } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java index 706337768a299..5f7c6490e51f1 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java @@ -50,7 +50,7 @@ public void testCanUseAnyConcreteIndexName() throws Exception { assertBusy(() -> { assertResponse( prepareSearch(watchResultsIndex).setTrackTotalHits(true), - searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value, greaterThan(0)) + searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value(), greaterThan(0)) ); }); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java index f1ad29607b5b8..7fa5365afa0ab 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java @@ -120,7 +120,7 @@ private void assertTotalHistoryEntries(String id, long expectedCount) throws Exc assertResponse( prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSize(0) .setSource(new SearchSourceBuilder().query(QueryBuilders.boolQuery().must(termQuery("watch_id", id)))), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(oneOf(expectedCount, expectedCount + 1))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(oneOf(expectedCount, expectedCount + 1))) ); }); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index 60867ba5d4410..4068c534013b9 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -122,7 +122,7 @@ public void testActionConditionWithHardFailures() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -176,7 +176,7 @@ public void testActionConditionWithFailures() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -236,7 +236,7 @@ public void testActionCondition() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index 5b7ea39079f28..dac87eaa6f034 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -105,7 +105,7 @@ public void testEmailFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 97347de1ea23e..ffac36846414e 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -102,7 +102,7 @@ public void testHttpFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java index 7dde279fb90db..8dec5287ae607 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java @@ -54,7 +54,7 @@ public void testIndexActionFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index 567d4acfa45e5..b268caa45f471 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -72,7 +72,7 @@ public void testHttpFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 5dc537fc259d9..5eaf27e7b2670 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -377,7 +377,7 @@ protected void assertWatchWithMinimumPerformedActionsCount( lastResponse.set(searchResponse); assertThat( "could not find executed watch record for watch " + watchName, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(minimumExpectedWatchActionsWithActionPerformed) ); if (assertConditionMet) { @@ -396,7 +396,7 @@ protected void assertWatchWithMinimumPerformedActionsCount( } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -452,7 +452,7 @@ protected void assertWatchWithNoActionNeeded(final String watchName, final long searchResponse -> { lastResponse.set(searchResponse); assertThat( - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(expectedWatchActionsWithNoActionNeeded) ); } @@ -461,7 +461,7 @@ protected void assertWatchWithNoActionNeeded(final String watchName, final long } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -497,7 +497,7 @@ protected void assertWatchWithMinimumActionsCount(final String watchName, final searchResponse -> { assertThat( "could not find executed watch record", - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(recordCount) ); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java index 99640d1ebc3ea..03f1e6cb57eb8 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java @@ -296,8 +296,8 @@ private void assertSingleExecutionAndCompleteWatchHistory(final long numberOfWat AtomicLong successfulWatchExecutions = new AtomicLong(); refresh(); assertResponse(prepareSearch("output"), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(numberOfWatches))); - successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value); + assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(numberOfWatches))); + successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value()); }); // the watch history should contain entries for each triggered watch, which a few have been marked as not executed @@ -378,7 +378,7 @@ public void testWatchRecordSavedTwice() throws Exception { // the actual documents are in the output index refresh(); assertResponse(prepareSearch(HistoryStoreField.DATA_STREAM).setSize(numRecords), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo((long) numRecords)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo((long) numRecords)); for (int i = 0; i < numRecords; i++) { assertThat(searchResponse.getHits().getAt(i).getSourceAsMap().get("state"), is(ExecutionState.EXECUTED.id())); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java index e5f4091ca89eb..f3648580691cb 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java @@ -55,7 +55,7 @@ public void testHistoryOnRejection() throws Exception { assertBusy(() -> { flushAndRefresh(".watcher-history-*"); assertResponse(prepareSearch(".watcher-history-*"), searchResponse -> { - assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(2L)); + assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(2L)); assertThat( "Did not find watcher history for rejected watch", Arrays.stream(searchResponse.getHits().getHits()) diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java index 7ff293ed9b150..fbb1996a4cf42 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -69,7 +69,7 @@ public void testThatLoadingWithNonExistingIndexWorks() throws Exception { assertThat(refreshResponse.getStatus(), equalTo(RestStatus.OK)); assertResponse( prepareSearch(".watcher-history*").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(1L))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(1L))) ); }, 30, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java index 4298f641cbdd2..e12805f3ace09 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java @@ -79,7 +79,7 @@ public void testWatchMetadata() throws Exception { } assertNotNull(searchResponse); try { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 92ac91a63e097..2ec6541275d04 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -139,13 +139,13 @@ public void testScriptTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); @@ -184,12 +184,12 @@ public void testSearchTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); } @@ -223,13 +223,13 @@ public void testChainTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 5389f34212270..0ea9b432d3b0f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -340,7 +340,7 @@ private Collection loadWatches(ClusterState clusterState) { throw new ElasticsearchException("Partial response while loading watches"); } - if (response.getHits().getTotalHits().value == 0) { + if (response.getHits().getTotalHits().value() == 0) { return Collections.emptyList(); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index 6775dca424bf1..dfa0c47493ed7 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -156,7 +156,7 @@ public Collection findTriggeredWatches(Collection watches SearchResponse response = null; try { response = client.search(searchRequest).actionGet(defaultSearchTimeout); - logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value); + logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value()); while (response.getHits().getHits().length != 0) { for (SearchHit hit : response.getHits()) { Wid wid = new Wid(hit.getId()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java index 9d6186e9c1c48..e6bd1b0efb95d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java @@ -91,7 +91,7 @@ SearchInput.Result doExecute(WatchExecutionContext ctx, WatcherSearchTemplateReq try { if (logger.isDebugEnabled()) { - logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value); + logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value()); } final Payload payload; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java index 97ae29a26e68c..358a839e60ea5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java @@ -98,11 +98,11 @@ SearchRequest createSearchRequest(QueryWatchesAction.Request request) { } void transformResponse(SearchResponse searchResponse, ActionListener listener) { - assert searchResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert searchResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; List items = Arrays.stream(searchResponse.getHits().getHits()) .map(this::transformSearchHit) .toList(); - listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value, items)); + listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value(), items)); } QueryWatchesAction.Response.Item transformSearchHit(SearchHit searchHit) { diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 3406aa7ee27c4..d3f8fa89a7705 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -184,7 +184,7 @@ public void testTooBigKeywordField() throws IOException { Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery("*a*", null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(1L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(1L)); reader.close(); dir.close(); @@ -231,12 +231,12 @@ public void testTooBigQueryField() throws IOException { String queryString = randomABString((IndexSearcher.getMaxClauseCount() * 2) + 1); Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(queryString, null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); // Test regexp query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(queryString, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); reader.close(); dir.close(); @@ -273,13 +273,13 @@ public void testTermAndPrefixQueryIgnoreWildcardSyntax() throws IOException { private void expectTermMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().termQuery(term, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } private void expectPrefixMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().prefixQuery(term, null, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } public void testSearchResultsVersusKeywordField() throws IOException { @@ -392,8 +392,8 @@ public void testSearchResultsVersusKeywordField() throws IOException { TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, values.size() + 1, Sort.RELEVANCE); assertThat( keywordFieldQuery + "\n" + wildcardFieldQuery, - wildcardFieldTopDocs.totalHits.value, - equalTo(kwTopDocs.totalHits.value) + wildcardFieldTopDocs.totalHits.value(), + equalTo(kwTopDocs.totalHits.value()) ); HashSet expectedDocs = new HashSet<>(); @@ -499,7 +499,7 @@ public void testRangeQueryVersusKeywordField() throws IOException { TopDocs kwTopDocs = searcher.search(keywordFieldQuery, 10, Sort.RELEVANCE); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.RELEVANCE); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(kwTopDocs.totalHits.value)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(kwTopDocs.totalHits.value())); HashSet expectedDocs = new HashSet<>(); for (ScoreDoc topDoc : kwTopDocs.scoreDocs) { diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index c8c72855eaf7a..435bf4f2e3503 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -381,7 +381,7 @@ private void assertDocs( try { logger.info(searchResponse); // check hit count - assertEquals(numDocs, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocs, searchResponse.getHits().getTotalHits().value()); // check that _index is properly set assertTrue(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).allMatch(index::equals)); // check that all _ids are there @@ -408,7 +408,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals(id, searchResponse.getHits().getHits()[0].getId()); assertEquals(sourceForDoc(num), searchResponse.getHits().getHits()[0].getSourceAsString()); } finally { @@ -460,7 +460,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(typeCount, searchResponse.getHits().getTotalHits().value); + assertEquals(typeCount, searchResponse.getHits().getTotalHits().value()); for (SearchHit hit : searchResponse.getHits().getHits()) { DocumentField typeField = hit.field("_type"); assertNotNull(typeField); @@ -486,7 +486,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(0, searchResponse.getHits().getTotalHits().value); + assertEquals(0, searchResponse.getHits().getTotalHits().value()); assertEquals(numberOfShards, searchResponse.getSuccessfulShards()); // When all shards are skipped, at least one of them is queried in order to provide a proper search response. assertEquals(numberOfShards - 1, searchResponse.getSkippedShards()); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index dddba9b7b0fba..02dc679152bf4 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -445,7 +445,7 @@ private List getAllTokenIds() throws IOException { final SearchHits searchHits = response.getHits(); assertThat( "Search request used with size parameter that was too small to fetch all tokens.", - searchHits.getTotalHits().value, + searchHits.getTotalHits().value(), lessThanOrEqualTo(searchSize) ); final List tokenIds = Arrays.stream(searchHits.getHits()).map(searchHit -> { From 88001073cb855e8867e5fb7881f9d1f44c2b3e12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 12 Sep 2024 15:53:34 +0200 Subject: [PATCH 280/417] spotless --- .../elasticsearch/analysis/common/SynonymsAnalysisTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java index 7f14320954f2d..044aea31158c7 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java @@ -338,7 +338,7 @@ public void testShingleFilters() { .put( IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) - ) + ) .put("path.home", createTempDir().toString()) .put("index.analysis.filter.synonyms.type", "synonym") .putList("index.analysis.filter.synonyms.synonyms", "programmer, developer") From 84bb2b4a3edbc9eaf56a71b374ce210af5ad8bc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 12 Sep 2024 16:40:58 +0200 Subject: [PATCH 281/417] Fixing more checkstyle and spotless issues after merging main --- .../legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java | 2 -- .../cluster/metadata/MetadataCreateIndexServiceTests.java | 1 - .../java/org/elasticsearch/cluster/metadata/MetadataTests.java | 1 - .../org/elasticsearch/index/engine/InternalEngineTests.java | 1 - 4 files changed, 5 deletions(-) diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java index c3b14f7d96493..3002d25b0115d 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java @@ -53,8 +53,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -@UpdateForV9 // can we remove this entire test? -@SuppressWarnings("deprecation") @UpdateForV9 @AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public class LegacyGeoShapeFieldMapperTests extends MapperTestCase { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index e3244a2a0a52a..9aae549ddfe2e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -43,7 +43,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexNotFoundException; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index b62364c7bb9da..ac4b31cd612c8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -2019,7 +2019,6 @@ public void testSystemAliasValidationNewSystemAndRegularFails() { ); } - public void testSystemAliasValidationAll8x() { final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 9463e6815101b..9ab26fd664d82 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -95,7 +95,6 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; From e916319da3f030e4733ac0622a7ab05d7c787686 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 12 Sep 2024 14:48:35 -0400 Subject: [PATCH 282/417] Remove awaitsFix, as the issue was fixed --- .../elasticsearch/index/query/IntervalQueryBuilderTests.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 0eb18ecca40bc..37c7172623b54 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -725,9 +725,6 @@ public void testPrefixes() throws IOException { assertEquals(expected, builder.toQuery(createSearchExecutionContext())); } - // TODO Lucene 10 upgrade, this fails because automata are not evaluated as "equal" any more, needs more investigation - // and potentially an issue / fix in Lucene - @AwaitsFix(bugUrl = "https://github.com/apache/lucene/pull/13718") public void testRegexp() throws IOException { String json = Strings.format(""" { From c54686b2ce2df579f6df34938835fc2574c63dca Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 12 Sep 2024 22:38:27 +0200 Subject: [PATCH 283/417] Override the correct search method search(List, Weight, Collector) is deprecated and mostly unused. We need to override the other one that accepts a LeafReaderContextPartition[]. --- .../org/elasticsearch/search/internal/ContextIndexSearcher.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 09b3bd886dd05..80859caf90fb1 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -379,7 +379,7 @@ private T search(Weight weight, CollectorManager * 2) handles the ES TimeExceededException */ @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(LeafReaderContextPartition[] leaves, Weight weight, Collector collector) throws IOException { boolean success = false; try { super.search(leaves, weight, collector); From 28846bebcc039c2de1bcc227ffa89e968d2c9bf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 12 Sep 2024 22:55:19 +0200 Subject: [PATCH 284/417] Follow up changes to inter-segment concurrency changes --- .../bucket/filter/QueryToFilterAdapter.java | 7 +++---- .../aggregations/bucket/global/GlobalAggregator.java | 11 +++-------- .../suggest/completion/CompletionSuggester.java | 4 ++-- .../xpack/esql/enrich/EnrichQuerySourceOperator.java | 6 ++---- 4 files changed, 10 insertions(+), 18 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java index 7f33a43163027..50753f604d32c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; @@ -214,8 +215,7 @@ long count(LeafReaderContext ctx, FiltersAggregator.Counter counter, Bits live) // No hits in this segment. return 0; } - IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx); - scorer.score(counter, live, partition.minDocId, partition.maxDocId); + scorer.score(counter, live, 0, DocIdSetIterator.NO_MORE_DOCS); return counter.readAndReset(ctx); } @@ -228,8 +228,7 @@ void collect(LeafReaderContext ctx, LeafCollector collector, Bits live) throws I // No hits in this segment. return; } - IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx); - scorer.score(collector, live, partition.minDocId, partition.maxDocId); + scorer.score(collector, live, 0, DocIdSetIterator.NO_MORE_DOCS); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index 8f7f299ecf8c2..5b9cb95bb39c2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -7,9 +7,8 @@ */ package org.elasticsearch.search.aggregations.bucket.global; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BulkScorer; -import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorable; @@ -41,15 +40,11 @@ public GlobalAggregator(String name, AggregatorFactories subFactories, Aggregati @Override public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, LeafBucketCollector sub) throws IOException { // Run sub-aggregations on child documents - LeafReaderContext leafReaderContext = aggCtx.getLeafReaderContext(); - BulkScorer scorer = weight.bulkScorer(leafReaderContext); + BulkScorer scorer = weight.bulkScorer(aggCtx.getLeafReaderContext()); if (scorer == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } grow(1); - IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment( - leafReaderContext - ); scorer.score(new LeafCollector() { @Override @@ -61,7 +56,7 @@ public void collect(int doc) throws IOException { public void setScorer(Scorable scorer) throws IOException { sub.setScorer(scorer); } - }, aggCtx.getLeafReaderContext().reader().getLiveDocs(), partition.minDocId, partition.maxDocId); + }, aggCtx.getLeafReaderContext().reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 2c65d9a4e7e86..df2c1bc064da1 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.CollectionTerminatedException; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Weight; @@ -78,13 +79,12 @@ private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSu query = (CompletionQuery) query.rewrite(searcher); Weight weight = query.createWeight(searcher, collector.scoreMode(), 1f); for (LeafReaderContext context : searcher.getIndexReader().leaves()) { - IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment(context); BulkScorer scorer = weight.bulkScorer(context); if (scorer != null) { LeafCollector leafCollector = null; try { leafCollector = collector.getLeafCollector(context); - scorer.score(leafCollector, context.reader().getLiveDocs(), partition.minDocId, partition.maxDocId); + scorer.score(leafCollector, context.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java index 765828359da42..f70cfe1dc8a41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; @@ -89,10 +90,7 @@ public Page getOutput() { continue; } final DocCollector collector = new DocCollector(docsBuilder); - IndexSearcher.LeafReaderContextPartition partition = IndexSearcher.LeafReaderContextPartition.createForEntireSegment( - leaf - ); - scorer.score(collector, leaf.reader().getLiveDocs(), partition.minDocId, partition.maxDocId); + scorer.score(collector, leaf.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); int matches = collector.matches; if (segmentsBuilder != null) { From 07fdd8bf8803208f05dcb42c8807571af941b4ae Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 13 Sep 2024 06:11:29 +0000 Subject: [PATCH 285/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-5045d3c67b1 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 84b8896eab6a5..daef3268dd9a2 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-74e3c44063a +lucene = 10.0.0-snapshot-5045d3c67b1 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cc52035770a37..8a26015deb6c1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 7dca607fb0803fef5e39c2bdae993dc639cfd362 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 13 Sep 2024 06:11:47 +0000 Subject: [PATCH 286/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-6cc4f13ab22 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a25ac5a6e4c6a..9bb553d642a46 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-ab262f917d4 +lucene = 9.12.0-snapshot-6cc4f13ab22 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index c22b238ae4ac0..41cac4ffe2e6d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 8cb58a7aae354b99621bd3769f6a98e81e05a11c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 13 Sep 2024 13:33:44 +0200 Subject: [PATCH 287/417] Use RegExp.DEPRECATED_COMPLEMENT where needed Lucene 10 removed the RegExp.COMPLEMENT flag and the ~ operator but brought it back as a deprecated option in https://github.com/apache/lucene/pull/13739, so for the time being we should still be able to use it. --- .../telemetry/apm/internal/tracing/APMTracer.java | 2 +- .../java/org/elasticsearch/index/query/RegexpFlag.java | 10 ++++++---- .../elasticsearch/index/query/RegexpQueryBuilder.java | 4 +++- .../index/search/QueryStringQueryParser.java | 9 ++++++++- .../indices/AssociatedIndexDescriptor.java | 2 +- .../elasticsearch/indices/SystemIndexDescriptor.java | 4 ++-- .../aggregations/bucket/terms/IncludeExclude.java | 4 ++-- .../metadata/MetadataCreateIndexServiceTests.java | 5 +---- .../xpack/core/security/support/Automatons.java | 5 ++++- .../core/expression/predicate/regex/RLikePattern.java | 5 ++++- .../xpack/versionfield/VersionFieldWildcardQuery.java | 2 +- .../xpack/versionfield/VersionStringFieldMapper.java | 3 ++- .../ql/expression/predicate/regex/RLikePattern.java | 5 ++++- 13 files changed, 39 insertions(+), 21 deletions(-) diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index 201d967dacf0c..56d3fdccefc26 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -444,7 +444,7 @@ private static CharacterRunAutomaton buildAutomaton(List includePatterns private static Automaton patternsToAutomaton(List patterns) { final List automata = patterns.stream().map(s -> { final String regex = s.replace(".", "\\.").replace("*", ".*"); - return new RegExp(regex).toAutomaton(); + return new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); }).toList(); if (automata.isEmpty()) { return null; diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index f32871dc899cf..c32da083d4094 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -12,6 +12,8 @@ import java.util.Locale; +import static org.apache.lucene.util.automaton.RegExp.DEPRECATED_COMPLEMENT; + /** * Regular expression syntax flags. Each flag represents optional syntax support in the regular expression: *
      @@ -37,10 +39,10 @@ public enum RegexpFlag { /** * Enables complement expression of the form: {@code ~<expression>} * TODO Lucene 10 upgrade: Lucenes RegExp class has removed the COMPLEMENT flag in https://issues.apache.org/jira/browse/LUCENE-10010 - * I'm currently not sure if it still supports the "~" operator but we need an enum constant for - * parsing our own flag syntax, so leaving a tombstone here for now + * We can use the deprecated constant for now but need a plan for deprecation / removal + * at some point before removal of this functionality in Lucene */ - COMPLEMENT(0x0002), + COMPLEMENT(DEPRECATED_COMPLEMENT), /** * Enables empty language expression: {@code #} @@ -65,7 +67,7 @@ public enum RegexpFlag { /** * Enables all available option flags */ - ALL(RegExp.ALL); + ALL(RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); final int value; diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index b46e30401a0a2..499c04c803320 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -279,7 +279,9 @@ protected Query doToQuery(SearchExecutionContext context) throws QueryShardExcep int matchFlagsValue = caseInsensitive ? RegExp.ASCII_CASE_INSENSITIVE : 0; Query query = null; // For BWC we mask irrelevant bits (RegExp changed ALL from 0xffff to 0xff) - int sanitisedSyntaxFlag = syntaxFlagsValue & RegExp.ALL; + // We need to preserve the DEPRECATED_COMPLEMENT for now though + int deprecatedComplementFlag = syntaxFlagsValue & RegExp.DEPRECATED_COMPLEMENT; + int sanitisedSyntaxFlag = syntaxFlagsValue & (RegExp.ALL | deprecatedComplementFlag); MappedFieldType fieldType = context.getFieldType(fieldName); if (fieldType != null) { diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 89fc7be8d0fd2..0e8adc58e7611 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -759,7 +759,14 @@ private Query getRegexpQuerySingle(String field, String termStr) throws ParseExc setAnalyzer(forceAnalyzer); return super.getRegexpQuery(field, termStr); } - return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getDeterminizeWorkLimit(), getMultiTermRewriteMethod(), context); + return currentFieldType.regexpQuery( + termStr, + RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT, + 0, + getDeterminizeWorkLimit(), + getMultiTermRewriteMethod(), + context + ); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); diff --git a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java index 47a976a3ff481..b968dfa1f409e 100644 --- a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java @@ -93,7 +93,7 @@ static Automaton buildAutomaton(String pattern) { String output = pattern; output = output.replace(".", "\\."); output = output.replace("*", ".*"); - return new RegExp(output).toAutomaton(); + return new RegExp(output, RegExp.ALL | RegExp.ALL).toAutomaton(); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index aa724af1c2e74..7f5ef723c47a8 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -881,13 +881,13 @@ static Automaton buildAutomaton(String pattern, String alias) { final String patternAsRegex = patternToRegex(pattern); final String aliasAsRegex = alias == null ? null : patternToRegex(alias); - final Automaton patternAutomaton = new RegExp(patternAsRegex).toAutomaton(); + final Automaton patternAutomaton = new RegExp(patternAsRegex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); if (aliasAsRegex == null) { return patternAutomaton; } - final Automaton aliasAutomaton = new RegExp(aliasAsRegex).toAutomaton(); + final Automaton aliasAutomaton = new RegExp(aliasAsRegex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); return Operations.determinize(Operations.union(patternAutomaton, aliasAutomaton), DEFAULT_DETERMINIZE_WORK_LIMIT); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java index 3b6a13431535f..68ae23330ad65 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -357,8 +357,8 @@ public IncludeExclude( if (exclude != null && excludeValues != null) { throw new IllegalArgumentException(); } - this.include = include == null ? null : new RegExp(include); - this.exclude = exclude == null ? null : new RegExp(exclude); + this.include = include == null ? null : new RegExp(include, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); + this.exclude = exclude == null ? null : new RegExp(exclude, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); this.includeValues = includeValues; this.excludeValues = excludeValues; this.incZeroBasedPartition = 0; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 9aae549ddfe2e..ab05dc00e4830 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -601,13 +601,10 @@ public void testCalculateNumRoutingShards() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/112453") public void testValidateDotIndex() { List systemIndexDescriptors = new ArrayList<>(); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-one*", "test")); - // TODO Lucene 10 upgrade - // The "~" operator in Rexeg Automata doesn't seem to work as expected any more without minimization - Automaton patternAutomaton = new RegExp("\\.test-~(one.*)").toAutomaton(); + Automaton patternAutomaton = new RegExp("\\.test-~(one.*)", RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); assertTrue( new CharacterRunAutomaton(Operations.determinize(patternAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)).run( ".test-~(one.*)" diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index 7d3367482c067..201cb4b69e472 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -225,7 +225,10 @@ private static Automaton buildAutomaton(String pattern) { ); } String regex = pattern.substring(1, pattern.length() - 1); - return Operations.determinize(new RegExp(regex).toAutomaton(), DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize( + new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + DEFAULT_DETERMINIZE_WORK_LIMIT + ); } else if (pattern.equals("*")) { return MATCH_ALL; } else { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java index 8c86ced576d88..4e559f564acb1 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java @@ -22,7 +22,10 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return Operations.determinize(new RegExp(regexpPattern).toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java index 0370a02c709bf..1e5ecf19bdf81 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java @@ -114,7 +114,7 @@ private static Automaton toAutomaton(Term wildcardquery, boolean caseInsensitive if (containsPreReleaseSeparator == false) { automata.add(Operations.optional(Automata.makeChar(VersionEncoder.NO_PRERELEASE_SEPARATOR_BYTE))); } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java index b49b4500ce7b7..6bf2917c601ac 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java @@ -187,7 +187,8 @@ public Query regexpQuery( matchFlags, DEFAULT_PROVIDER, maxDeterminizedStates, - method == null ? CONSTANT_SCORE_REWRITE : method + method == null ? CONSTANT_SCORE_REWRITE : method, + true ) { @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java index 1e29a4a229d6c..41ae97ec5e4fd 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java @@ -22,7 +22,10 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return Operations.determinize(new RegExp(regexpPattern).toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override From 32f5907f536d1e7724b6ee24a32d67d4a8b24de1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 13 Sep 2024 13:41:38 +0200 Subject: [PATCH 288/417] Unmute two tests that now pass --- .../xpack/core/security/support/StringMatcherTests.java | 1 - .../xpack/security/authz/store/CompositeRolesStoreTests.java | 1 - 2 files changed, 2 deletions(-) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java index ef0497dd224f4..2e31f760f6db2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/StringMatcherTests.java @@ -98,7 +98,6 @@ public void testSingleExactMatch() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/112453") public void testSingleRegex() throws Exception { final String notStr = randomAlphaOfLengthBetween(3, 5); final StringMatcher matcher = StringMatcher.of("/~(" + notStr + ")/"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index e5c9b99c2431c..693bd9b868ede 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -2736,7 +2736,6 @@ public void testSecurityProfileUserHasAccessForOnlyProfileIndex() { assertThat(CompositeRolesStore.tryGetRoleDescriptorForInternalUser(subject).get().getClusterPrivileges(), emptyArray()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/112453") public void testXPackUserCanAccessNonRestrictedIndices() { for (String action : Arrays.asList( TransportGetAction.TYPE.name(), From c79f782700e89bf5618fec600ca53f3b8c17d3e4 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 13 Sep 2024 17:25:26 +0100 Subject: [PATCH 289/417] Unmute a couple more tests that now pass --- .../xpack/esql/optimizer/LogicalPlanOptimizerTests.java | 2 -- .../optimizer/rules/logical/ReplaceRegexMatchTests.java | 6 ------ 2 files changed, 8 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 4a5bd6f806564..22a4b410a6d7a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -1792,8 +1792,6 @@ public void testSimplifyRLikeNoWildcard() { assertTrue(filter.child() instanceof EsRelation); } - // TODO Lucene 10 upgrade - @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testSimplifyRLikeMatchAll() { LogicalPlan plan = optimizedPlan(""" from test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java index 0bd327c3a218e..c5e64d41be4dc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java @@ -26,8 +26,6 @@ public class ReplaceRegexMatchTests extends ESTestCase { - // TODO Lucene 10 upgrade - @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testMatchAllLikeToExist() { for (String s : asList("%", "%%", "%%%")) { LikePattern pattern = new LikePattern(s, (char) 0); @@ -40,8 +38,6 @@ public void testMatchAllLikeToExist() { } } - // TODO Lucene 10 upgrade - @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testMatchAllWildcardLikeToExist() { for (String s : asList("*", "**", "***")) { WildcardPattern pattern = new WildcardPattern(s); @@ -54,8 +50,6 @@ public void testMatchAllWildcardLikeToExist() { } } - // TODO Lucene 10 upgrade - @AwaitsFix(bugUrl = "https://github.com/apache/lucene/issues/13706") public void testMatchAllRLikeToExist() { RLikePattern pattern = new RLikePattern(".*"); FieldAttribute fa = getFieldAttribute(); From 014d3381362679a29e82b15b8503f720f119f2ee Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 14 Sep 2024 06:12:44 +0000 Subject: [PATCH 290/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-7c056ab88c7 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index daef3268dd9a2..5eb4810f540e4 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-5045d3c67b1 +lucene = 10.0.0-snapshot-7c056ab88c7 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 8a26015deb6c1..58046e8897a1c 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 35764df0bb4cbd9714470b10027eaed6f46763f0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 14 Sep 2024 06:12:57 +0000 Subject: [PATCH 291/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-1b38d5dec85 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9bb553d642a46..64cb34b52180e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-6cc4f13ab22 +lucene = 9.12.0-snapshot-1b38d5dec85 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 41cac4ffe2e6d..5f8f4003a7bcb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e76b6f6ecc8f6412392a623f9623d93aa1fdbc68 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 15 Sep 2024 06:11:15 +0000 Subject: [PATCH 292/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-568d1f3fbe7 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 5eb4810f540e4..b73d817a3fbba 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-7c056ab88c7 +lucene = 10.0.0-snapshot-568d1f3fbe7 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 58046e8897a1c..06c3a9d4a030b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 00ae8a4ad7f992a8495f52158994e13079bffe7d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 15 Sep 2024 06:11:50 +0000 Subject: [PATCH 293/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-9cd6a24be43 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 64cb34b52180e..f48f02c2921ef 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-1b38d5dec85 +lucene = 9.12.0-snapshot-9cd6a24be43 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5f8f4003a7bcb..aa15fcfb28fb2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 40aa2f94155658e86f5260c52ab8629e10c7e55b Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 15 Sep 2024 15:49:47 +0100 Subject: [PATCH 294/417] Fix getDiscountOverlaps in LegacyBM25Similarity --- .../lucene/similarity/LegacyBM25Similarity.java | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java b/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java index 7421579d643e4..114a4d7f4a2d2 100644 --- a/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java +++ b/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java @@ -43,7 +43,12 @@ public final class LegacyBM25Similarity extends Similarity { *
    */ public LegacyBM25Similarity() { - this.bm25Similarity = new BM25Similarity(); + this(new BM25Similarity()); + } + + private LegacyBM25Similarity(BM25Similarity bm25Similarity) { + super(bm25Similarity.getDiscountOverlaps()); + this.bm25Similarity = bm25Similarity; } /** @@ -81,13 +86,6 @@ public float getB() { return bm25Similarity.getB(); } - /** - * Returns true if overlap tokens are discounted from the document's length. - */ - public boolean getDiscountOverlaps() { - return bm25Similarity.getDiscountOverlaps(); - } - @Override public String toString() { return bm25Similarity.toString(); From b05b5f3fbbf1fa5b8bf927fc1d8d8b7ffabc9220 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 15 Sep 2024 16:01:11 +0100 Subject: [PATCH 295/417] Fix AggregatorTestCase with LeafReaderContextPartition --- .../search/aggregations/AggregatorTestCase.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 6ca513516d90e..b111ba1a1bef4 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -958,11 +958,11 @@ protected DirectoryReader wrapDirectoryReader(DirectoryReader reader) throws IOE } private static class ShardSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; ShardSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -971,7 +971,7 @@ public void search(Weight weight, Collector collector) throws IOException { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } From 7d27f53c4b3ecd5254d6aa11bc52c16c68c4ef35 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 15 Sep 2024 16:07:20 +0100 Subject: [PATCH 296/417] More LeafReaderContextPartition refactoring fixes --- .../lucene/grouping/SinglePassGroupingCollectorTests.java | 6 +++--- .../org/elasticsearch/search/query/QueryPhaseTests.java | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java index 3e9ebbe6bf31b..2bce4636d7834 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java @@ -48,11 +48,11 @@ public class SinglePassGroupingCollectorTests extends ESTestCase { private static class SegmentSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; SegmentSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -61,7 +61,7 @@ public void search(Weight weight, Collector collector) throws IOException { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 5e11c25094ce6..fbcd13c76e289 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -1118,7 +1118,7 @@ private static ContextIndexSearcher earlyTerminationContextSearcher(IndexReader ) { @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(LeafReaderContextPartition[] partitions, Weight weight, Collector collector) throws IOException { final Collector in = new FilterCollector(collector) { @Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { @@ -1135,7 +1135,7 @@ public void collect(int doc) throws IOException { }; } }; - super.search(leaves, weight, in); + super.search(partitions, weight, in); } }; } From 70a5dbab3c173122d7f5ba8e9b986fe521cd323e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 16 Sep 2024 06:12:27 +0000 Subject: [PATCH 297/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-3801d859783 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index b73d817a3fbba..39861b9791755 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-568d1f3fbe7 +lucene = 10.0.0-snapshot-3801d859783 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 06c3a9d4a030b..70b60aa7d7368 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@
    - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 4b2f5f1a0bf26a369254344a7270b6960bcf8c22 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 16 Sep 2024 08:18:46 +0100 Subject: [PATCH 298/417] lucene_snapshot: Fix constructor chaining in LegacyBM25Similarity --- .../lucene/similarity/LegacyBM25Similarity.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java b/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java index 114a4d7f4a2d2..d420e519a30e7 100644 --- a/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java +++ b/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java @@ -46,11 +46,6 @@ public LegacyBM25Similarity() { this(new BM25Similarity()); } - private LegacyBM25Similarity(BM25Similarity bm25Similarity) { - super(bm25Similarity.getDiscountOverlaps()); - this.bm25Similarity = bm25Similarity; - } - /** * BM25 with the supplied parameter values. * @param k1 Controls non-linear term frequency normalization (saturation). @@ -59,7 +54,12 @@ private LegacyBM25Similarity(BM25Similarity bm25Similarity) { * not within the range {@code [0..1]} */ public LegacyBM25Similarity(float k1, float b, boolean discountOverlaps) { - this.bm25Similarity = new BM25Similarity(k1, b, discountOverlaps); + this(new BM25Similarity(k1, b, discountOverlaps)); + } + + private LegacyBM25Similarity(BM25Similarity bm25Similarity) { + super(bm25Similarity.getDiscountOverlaps()); + this.bm25Similarity = bm25Similarity; } @Override From 762ec3ef15d9f68df6613748b379774d5a831b75 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 16 Sep 2024 08:27:48 +0100 Subject: [PATCH 299/417] lucene_snapshot_10: fix license headers --- .../elasticsearch/index/codec/Elasticsearch816Codec.java | 9 +++++---- ...ES87TSDBDocValuesFormatVariableSkipIntervalTests.java | 9 +++++---- .../index/query/RangeIntervalsSourceProviderTests.java | 9 +++++---- .../index/query/RegexpIntervalsSourceProviderTests.java | 9 +++++---- 4 files changed, 20 insertions(+), 16 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index 1b08b87e46762..00711c7ecc306 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -1,9 +1,10 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ package org.elasticsearch.index.codec; diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java index b8257db1b7a1a..099b59808ef4a 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java @@ -1,9 +1,10 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ package org.elasticsearch.index.codec.tsdb; diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java index 73b4be4ec6154..e170faf8043be 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java @@ -1,9 +1,10 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ package org.elasticsearch.index.query; diff --git a/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java index b226a1394c75e..ace7350d8d796 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java @@ -1,9 +1,10 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ package org.elasticsearch.index.query; From b3f371d08713c66d881d045f857bb6fe39ce59f3 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 16 Sep 2024 11:49:54 +0200 Subject: [PATCH 300/417] Move AggregatorTestCase to search(Query, CollectorManager) search(Query, Collector) is deprecated and no longer performs post collection --- .../aggregations/AggregatorTestCase.java | 25 ++++++++++++++----- 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 2660692c39fd9..5f64d123c1bed 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -33,6 +33,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -158,6 +159,7 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -831,11 +833,8 @@ protected void debugTestCase( QueryCachingPolicy queryCachingPolicy, MappedFieldType... fieldTypes ) throws IOException { - // Don't use searchAndReduce because we only want a single aggregator. - IndexSearcher searcher = newIndexSearcher( - reader, - aggregationBuilder.supportsParallelCollection(field -> getCardinality(reader, field)) - ); + // Don't use searchAndReduce because we only want a single aggregator, disable parallel collection too. + IndexSearcher searcher = newIndexSearcher(reader, false); if (queryCachingPolicy != null) { searcher.setQueryCachingPolicy(queryCachingPolicy); } @@ -854,7 +853,21 @@ protected void debugTestCase( try { Aggregator aggregator = createAggregator(builder, context); aggregator.preCollection(); - searcher.search(context.query(), aggregator.asCollector()); + searcher.search(context.query(), new CollectorManager() { + boolean called = false; + + @Override + public Collector newCollector() { + assert called == false : "newCollector called multiple times"; + called = true; + return aggregator.asCollector(); + } + + @Override + public Void reduce(Collection collectors) { + return null; + } + }); InternalAggregation r = aggregator.buildTopLevel(); r = doReduce( List.of(r), From 6a580742ab767460a1909e095ca8648a2adfad23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 16 Sep 2024 15:23:30 +0200 Subject: [PATCH 301/417] Don't randomize LuceneTestCase concurrency when using "newSearcher" LuceneTestCase#newSearcher(reader) now randomizes which concurrency type is being used. For some parts we don't support INTRA_SEGMENT concurrency, so we override this widely used utility method so we use INTER_SEGMENT concurrency for now. --- .../src/main/java/org/elasticsearch/test/ESTestCase.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 00cfedb257187..a7eb113b80c09 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -33,6 +33,8 @@ import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.tests.util.TestRuleMarkFailure; @@ -2570,4 +2572,10 @@ public static void ensureAllContextsReleased(SearchService searchService) { throw new AssertionError("Failed to verify search contexts", e); } } + + // lucene 10 upgrade: this overwrites LuceneTestCase#newSearcher so we never get random INTRA_SEGMENT + // concurrency. We want to slowly migrate of stuff out of using this method after we got a working branch + public static IndexSearcher newSearcher(IndexReader r) { + return newSearcher(r, true, true, Concurrency.INTER_SEGMENT); + } } From 6fc5930a1ff3b352728553a08b7bdf8904356605 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 16 Sep 2024 17:31:24 +0200 Subject: [PATCH 302/417] Fix docs according to changes in Lovins token filter, Pathhierarchy Analyzer and Noritoken filter --- docs/plugins/analysis-nori.asciidoc | 12 +++++----- .../snowball-tokenfilter.asciidoc | 2 +- .../pathhierarchy-tokenizer.asciidoc | 24 +++++++++---------- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/docs/plugins/analysis-nori.asciidoc b/docs/plugins/analysis-nori.asciidoc index 1a3153fa3bea5..435844127f406 100644 --- a/docs/plugins/analysis-nori.asciidoc +++ b/docs/plugins/analysis-nori.asciidoc @@ -238,11 +238,11 @@ Which responds with: "end_offset": 3, "type": "word", "position": 1, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JKS(Subject case marker)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JKS(Subject case marker)" }, { "token": "깊", @@ -262,11 +262,11 @@ Which responds with: "end_offset": 6, "type": "word", "position": 3, - "leftPOS": "E(Verbal endings)", + "leftPOS": "ETM(Adnominal form transformative ending)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "E(Verbal endings)" + "rightPOS": "ETM(Adnominal form transformative ending)" }, { "token": "나무", @@ -286,11 +286,11 @@ Which responds with: "end_offset": 10, "type": "word", "position": 5, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JX(Auxiliary postpositional particle)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JX(Auxiliary postpositional particle)" } ] }, diff --git a/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc index 57e402988cc5a..bd708096aac48 100644 --- a/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc @@ -28,7 +28,7 @@ PUT /my-index-000001 "filter": { "my_snow": { "type": "snowball", - "language": "Lovins" + "language": "Porter" } } } diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index 2cf01b77d57ab..5f98807387280 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -40,14 +40,14 @@ POST _analyze "start_offset": 0, "end_offset": 8, "type": "word", - "position": 0 + "position": 1 }, { "token": "/one/two/three", "start_offset": 0, "end_offset": 14, "type": "word", - "position": 0 + "position": 2 } ] } @@ -144,14 +144,14 @@ POST my-index-000001/_analyze "start_offset": 7, "end_offset": 18, "type": "word", - "position": 0 + "position": 1 }, { "token": "/three/four/five", "start_offset": 7, "end_offset": 23, "type": "word", - "position": 0 + "position": 2 } ] } @@ -178,14 +178,14 @@ If we were to set `reverse` to `true`, it would produce the following: [[analysis-pathhierarchy-tokenizer-detailed-examples]] === Detailed examples -A common use-case for the `path_hierarchy` tokenizer is filtering results by -file paths. If indexing a file path along with the data, the use of the -`path_hierarchy` tokenizer to analyze the path allows filtering the results +A common use-case for the `path_hierarchy` tokenizer is filtering results by +file paths. If indexing a file path along with the data, the use of the +`path_hierarchy` tokenizer to analyze the path allows filtering the results by different parts of the file path string. This example configures an index to have two custom analyzers and applies -those analyzers to multifields of the `file_path` text field that will +those analyzers to multifields of the `file_path` text field that will store filenames. One of the two analyzers uses reverse tokenization. Some sample documents are then indexed to represent some file paths for photos inside photo folders of two different users. @@ -264,8 +264,8 @@ POST file-path-test/_doc/5 -------------------------------------------------- -A search for a particular file path string against the text field matches all -the example documents, with Bob's documents ranking highest due to `bob` also +A search for a particular file path string against the text field matches all +the example documents, with Bob's documents ranking highest due to `bob` also being one of the terms created by the standard analyzer boosting relevance for Bob's documents. @@ -301,7 +301,7 @@ GET file-path-test/_search With the reverse parameter for this tokenizer, it's also possible to match from the other end of the file path, such as individual file names or a deep level subdirectory. The following example shows a search for all files named -`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field +`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field configured to use the reverse parameter in the mapping. @@ -342,7 +342,7 @@ POST file-path-test/_analyze It's also useful to be able to filter with file paths when combined with other -types of searches, such as this example looking for any files paths with `16` +types of searches, such as this example looking for any files paths with `16` that also must be in Alice's photo directory. [source,console] From 2d1a6146c2a65ae67c5cc175d09aa631cd491b22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 16 Sep 2024 22:58:51 +0200 Subject: [PATCH 303/417] Fix geo_shape related docs tests --- .../spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index 868167a22da0f..5c194b5080fb3 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -130,7 +130,7 @@ public Builder( this.geoFormatterFactory = geoFormatterFactory; this.ignoreMalformed = ignoreMalformedParam(m -> builder(m).ignoreMalformed.get(), ignoreMalformedByDefault); this.coerce = coerceParam(m -> builder(m).coerce.get(), coerceByDefault); - this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), false); + this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), true); addScriptValidation(script, indexed, hasDocValues); } From 52bf71d65a6c17895e6eb1020b493260ebba5822 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 17 Sep 2024 06:11:55 +0000 Subject: [PATCH 304/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-f4ebed2404e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 39861b9791755..21063d99a8d26 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-3801d859783 +lucene = 10.0.0-snapshot-f4ebed2404e bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 70b60aa7d7368..6d26a9f759776 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 592642b1cc1ce62783216f7498d882a597a48771 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Sep 2024 13:19:40 +0200 Subject: [PATCH 305/417] Fix expected output of romanian analyzer --- .../rest-api-spec/test/analysis-common/20_analyzers.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index c03bdb3111050..7674b95af4851 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -948,7 +948,7 @@ text: absenţa analyzer: romanian - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } - do: indices.analyze: @@ -957,7 +957,7 @@ text: absenţa analyzer: my_analyzer - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } --- "russian": From 4f7df526d73200d0d0a7d80a95ad7ceffba53b35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Sep 2024 13:46:35 +0200 Subject: [PATCH 306/417] Fix QueryTranslatorSpecTests due to changes in regex syntax flags --- .../plugin/eql/src/test/resources/querytranslator_tests.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt index b04d28654f1d5..00c08096fd084 100644 --- a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt +++ b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt @@ -769,7 +769,7 @@ process where command_line regex "^.*?net.exe" regexSingleArgInsensitive process where command_line regex~ "^.*?net.exe" ; -"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":65791,"case_insensitive":true ; regexMultiArg @@ -781,7 +781,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe") regexMultiArgInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":65791,"case_insensitive":true ; regexMultiMultiArgVariant @@ -793,7 +793,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\ regexMultiMultiArgVariantInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\system32\\\\net1\\s+") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":65791,"case_insensitive":true ; regexMultiArgWithScript From 4ebfae1399cce8248c88b2389021466e03e48abe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Sep 2024 14:50:31 +0200 Subject: [PATCH 307/417] Fix 370_profile yaml test for yamlRestCompatTest --- rest-api-spec/build.gradle | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 015c9c4b812c6..138744e74085a 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -53,3 +53,9 @@ tasks.register('enforceYamlTestConvention').configure { tasks.named("precommit").configure { dependsOn 'enforceYamlTestConvention' } + +tasks.named("yamlRestCompatTestTransform").configure { task -> + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") + +} From a6f6d21df82d73ac2534becb99b657c927c2183c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Sep 2024 15:35:44 +0200 Subject: [PATCH 308/417] Fix romanian analyzer restBwc test --- modules/analysis-common/build.gradle | 4 ++++ rest-api-spec/build.gradle | 1 - 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index b16c6eaaaa1d1..f4f7e787d2b7b 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -33,3 +33,7 @@ dependencies { artifacts { restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } + +tasks.named("yamlRestCompatTestTransform").configure { task -> + task.replaceValueInMatch("tokens.0.token", "absenț", "romanian") +} diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 138744e74085a..80deb8252e1e5 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -57,5 +57,4 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure { task -> task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") - } From 309abb07535fd3ad40c07d771448ed106a3bac50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Sep 2024 16:35:47 +0200 Subject: [PATCH 309/417] Remove v7.17.13 bwc tasks in CI --- .buildkite/pipelines/lucene-snapshot/run-tests.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.buildkite/pipelines/lucene-snapshot/run-tests.yml b/.buildkite/pipelines/lucene-snapshot/run-tests.yml index 49c3396488d82..a3ae332736fa9 100644 --- a/.buildkite/pipelines/lucene-snapshot/run-tests.yml +++ b/.buildkite/pipelines/lucene-snapshot/run-tests.yml @@ -62,7 +62,6 @@ steps: matrix: setup: BWC_VERSION: - - 7.17.13 - 8.9.1 - 8.10.0 agents: From 98e560091a4892654c0e36e03e0d083022542ceb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Sep 2024 21:00:04 +0200 Subject: [PATCH 310/417] Determinize automaton produced by IncludeExclude --- .../search/aggregations/bucket/terms/IncludeExclude.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java index 7c8752b904eb0..4bcbe08ed227c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -529,7 +529,7 @@ private Automaton toAutomaton() { if (exclude != null) { a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } - return a; + return Operations.determinize(a, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public StringFilter convertToStringFilter(DocValueFormat format) { From 455980df9f949073a3d0de7be10d7cc1c53c055a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Sep 2024 21:30:13 +0200 Subject: [PATCH 311/417] Fix persian language analyzer doc by adding stemmer --- docs/reference/analysis/analyzers/lang-analyzer.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc index 5273537389e3d..881970787f5a6 100644 --- a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc @@ -1430,7 +1430,8 @@ PUT /persian_example "decimal_digit", "arabic_normalization", "persian_normalization", - "persian_stop" + "persian_stop", + "persian_stem" ] } } From 01127d10de7235ecfaefdde9b65900301dd8eda5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 18 Sep 2024 06:12:45 +0000 Subject: [PATCH 312/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-b59a357e586 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 21063d99a8d26..461f75b9132cf 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-f4ebed2404e +lucene = 10.0.0-snapshot-b59a357e586 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 6d26a9f759776..1811debc0ffb1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 69580861c0c22f7b55b70b6704f13b97cb5c7520 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 18 Sep 2024 10:42:11 +0200 Subject: [PATCH 313/417] Fix compile errors after L10 snapshot merge --- .../index/codec/DeduplicatingFieldInfosFormat.java | 2 +- .../index/codec/tsdb/ES87TSDBDocValuesConsumer.java | 9 +++++---- .../index/codec/tsdb/ES87TSDBDocValuesProducer.java | 3 ++- .../index/engine/TranslogDirectoryReader.java | 7 ++++--- .../elasticsearch/index/mapper/DocumentLeafReader.java | 3 ++- .../elasticsearch/index/mapper/FieldTypeTestCase.java | 3 ++- .../snapshots/sourceonly/SourceOnlySnapshot.java | 2 +- .../elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java | 2 +- .../bwc/codecs/lucene50/Lucene50FieldInfosFormat.java | 3 ++- 9 files changed, 20 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java index f5eefc4fdf7a7..00614140e237a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java @@ -54,7 +54,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType(), - fi.hasDocValuesSkipIndex(), + fi.docValuesSkipIndexType(), fi.getDocValuesGen(), internStringStringMap(fi.attributes()), fi.getPointDimensionCount(), diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java index 05c39286ab0f4..3df7edeeb6df6 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java @@ -15,6 +15,7 @@ import org.apache.lucene.codecs.lucene90.IndexedDISI; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.EmptyDocValuesProducer; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; @@ -106,7 +107,7 @@ public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOExcepti return DocValues.singleton(valuesProducer.getNumeric(field)); } }; - if (field.hasDocValuesSkipIndex()) { + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { writeSkipIndex(field, producer); } @@ -322,7 +323,7 @@ public long cost() { return DocValues.singleton(sortedOrds); } }; - if (field.hasDocValuesSkipIndex()) { + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { writeSkipIndex(field, producer); } if (addTypeByte) { @@ -483,7 +484,7 @@ public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProdu } private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { - if (field.hasDocValuesSkipIndex()) { + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { writeSkipIndex(field, valuesProducer); } if (maxOrd > -1) { @@ -685,7 +686,7 @@ public static SkipAccumulator merge(List list, int index, int l } private void writeSkipIndex(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { - assert field.hasDocValuesSkipIndex(); + assert field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE; final long start = data.getFilePointer(); final SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); long globalMaxValue = Long.MIN_VALUE; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index 0c00fdac150f2..edf94cc91ebe7 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -831,7 +832,7 @@ private void readFields(IndexInput meta, FieldInfos infos) throws IOException { throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); } byte type = meta.readByte(); - if (info.hasDocValuesSkipIndex()) { + if (info.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { skippers.put(info.name, readDocValueSkipperMeta(meta)); } if (type == ES87TSDBDocValuesFormat.NUMERIC) { diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index 556b0aadeeb69..0f772b49bf92b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; @@ -152,7 +153,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, - false, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -172,7 +173,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, - false, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -192,7 +193,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.DOCS, DocValuesType.NONE, - false, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java index de01536709da3..d37f6c51d288d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; @@ -279,7 +280,7 @@ private static FieldInfo fieldInfo(String name) { false, IndexOptions.NONE, DocValuesType.NONE, - false, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index 18682fc252b8c..1c4cfa4ec7ff9 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -139,7 +140,7 @@ public FieldInfo getFieldInfoWithName(String name) { randomBoolean(), IndexOptions.NONE, DocValuesType.NONE, - false, + DocValuesSkipIndexType.NONE, -1, new HashMap<>(), 1, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index c9883a8337aae..421a306babf29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -254,7 +254,7 @@ private SegmentCommitInfo syncSegment( false, IndexOptions.NONE, DocValuesType.NONE, - fieldInfo.hasDocValuesSkipIndex(), + fieldInfo.docValuesSkipIndexType(), -1, fieldInfo.attributes(), 0, diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 13f6140587e4a..3ed8fc26ac937 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -99,7 +99,7 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { false, fieldInfo.getIndexOptions(), fieldInfo.getDocValuesType(), - fieldInfo.hasDocValuesSkipIndex(), + fieldInfo.docValuesSkipIndexType(), fieldInfo.getDocValuesGen(), fieldInfo.attributes(), fieldInfo.getPointDimensionCount(), diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java index 0c71ea2df6932..06002d2d10dee 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java @@ -23,6 +23,7 @@ import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -103,7 +104,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm storePayloads, indexOptions, docValuesType, - false, + DocValuesSkipIndexType.NONE, dvGen, attributes, 0, From 9af0d8e7100488f934368701084abfac0fc41c2e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 16 Sep 2024 06:12:23 +0000 Subject: [PATCH 314/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-a774a998be1 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f48f02c2921ef..62fd08bb49505 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-9cd6a24be43 +lucene = 9.12.0-snapshot-a774a998be1 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index aa15fcfb28fb2..3b2d0f5ea89d3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6294ad2bc8c31134dd71408685fc629f45df8eb2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 16 Sep 2024 08:18:46 +0100 Subject: [PATCH 315/417] lucene_snapshot: Fix constructor chaining in LegacyBM25Similarity --- .../lucene/similarity/LegacyBM25Similarity.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java b/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java index 7421579d643e4..18be0b01814f0 100644 --- a/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java +++ b/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java @@ -43,7 +43,7 @@ public final class LegacyBM25Similarity extends Similarity { * */ public LegacyBM25Similarity() { - this.bm25Similarity = new BM25Similarity(); + this(new BM25Similarity()); } /** @@ -54,7 +54,12 @@ public LegacyBM25Similarity() { * not within the range {@code [0..1]} */ public LegacyBM25Similarity(float k1, float b, boolean discountOverlaps) { - this.bm25Similarity = new BM25Similarity(k1, b, discountOverlaps); + this(new BM25Similarity(k1, b, discountOverlaps)); + } + + private LegacyBM25Similarity(BM25Similarity bm25Similarity) { + super(bm25Similarity.getDiscountOverlaps()); + this.bm25Similarity = bm25Similarity; } @Override From f5ce091e7ab115be331e695d13546fc59af87232 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 17 Sep 2024 06:12:01 +0000 Subject: [PATCH 316/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-cd7a74cb4d4 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 62fd08bb49505..4d5db5288fd23 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-a774a998be1 +lucene = 9.12.0-snapshot-cd7a74cb4d4 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3b2d0f5ea89d3..87aa14efc1b68 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 75fcbe0a0d7b5f57b9139521923da7f15b537aa4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 18 Sep 2024 06:11:47 +0000 Subject: [PATCH 317/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-71ca6b4bb16 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4d5db5288fd23..307139aad6bc7 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.16.0 -lucene = 9.12.0-snapshot-cd7a74cb4d4 +lucene = 9.12.0-snapshot-71ca6b4bb16 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 87aa14efc1b68..55843c33bdb3a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6e401251de4358a6955040477036bf1dd402b1ab Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 18 Sep 2024 09:04:13 +0100 Subject: [PATCH 318/417] lucene_snapshot: fix another instance of IOContext.READONCE --- .../src/main/java/org/elasticsearch/common/lucene/Lucene.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 2b8db7fe8b7fd..2cd3361a05822 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -241,7 +241,7 @@ public static void checkSegmentInfoIntegrity(final Directory directory) throws I @Override protected Object doBody(String segmentFileName) throws IOException { - try (IndexInput input = directory.openInput(segmentFileName, IOContext.READ)) { + try (IndexInput input = directory.openInput(segmentFileName, IOContext.READONCE)) { CodecUtil.checksumEntireFile(input); } return null; From ff74c908150731ae5347fca6dff73ce4aa1548f7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 15 Sep 2024 17:28:33 +0100 Subject: [PATCH 319/417] lucene_snapshot: fix license headers --- .../elasticsearch/index/codec/Elasticsearch816Codec.java | 9 +++++---- .../index/query/RangeIntervalsSourceProviderTests.java | 9 +++++---- .../index/query/RegexpIntervalsSourceProviderTests.java | 9 +++++---- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index 1b08b87e46762..00711c7ecc306 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -1,9 +1,10 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ package org.elasticsearch.index.codec; diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java index 73b4be4ec6154..e170faf8043be 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java @@ -1,9 +1,10 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ package org.elasticsearch.index.query; diff --git a/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java index b226a1394c75e..ace7350d8d796 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java @@ -1,9 +1,10 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". */ package org.elasticsearch.index.query; From 26b6513bcd5bb4baf899a78aeb5050c9286b546d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 19 Sep 2024 06:11:45 +0000 Subject: [PATCH 320/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-6d987e1ce1c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 461f75b9132cf..da35190f85890 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-b59a357e586 +lucene = 10.0.0-snapshot-6d987e1ce1c bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1811debc0ffb1..2de376504e277 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From fb44c6359460a199b949fd5217a4bd41dca7b87c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 19 Sep 2024 06:11:50 +0000 Subject: [PATCH 321/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-b467a2bb66d --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6397531397f83..7f6f7167dec1c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 9.12.0-snapshot-71ca6b4bb16 +lucene = 9.12.0-snapshot-b467a2bb66d bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 55843c33bdb3a..3250d73eeaf5f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 9eec2c4422b2a73b3357056795aae7de8dd48fe0 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Thu, 19 Sep 2024 14:32:08 +0100 Subject: [PATCH 322/417] Add a capability and transport version for new regex and range interval query support (#113128) This commit adds a capability and transport version for new regex and range interval query support. --- qa/ccs-common-rest/build.gradle | 2 +- .../test/search/230_interval_query.yml | 16 +++++++++--- .../org/elasticsearch/TransportVersions.java | 1 + .../index/query/IntervalsSourceProvider.java | 16 ++++++++++-- .../rest/action/search/RestSearchAction.java | 5 ++++ .../action/search/SearchCapabilities.java | 25 +++++++++++++++++++ x-pack/qa/runtime-fields/build.gradle | 2 +- 7 files changed, 59 insertions(+), 8 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java diff --git a/qa/ccs-common-rest/build.gradle b/qa/ccs-common-rest/build.gradle index e5e8c5a489d5b..6121f7dcd4f82 100644 --- a/qa/ccs-common-rest/build.gradle +++ b/qa/ccs-common-rest/build.gradle @@ -10,7 +10,7 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' restResources { restApi { - include '_common', 'bulk', 'count', 'cluster', 'field_caps', 'get', 'knn_search', 'index', 'indices', 'msearch', + include 'capabilities', '_common', 'bulk', 'count', 'cluster', 'field_caps', 'get', 'knn_search', 'index', 'indices', 'msearch', 'search', 'async_search', 'graph', '*_point_in_time', 'info', 'scroll', 'clear_scroll', 'search_mvt', 'eql', 'sql' } restTests { diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml index e828c9ce8d8a8..6a5f34b5207ce 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml @@ -479,8 +479,12 @@ setup: --- "Test regexp": - requires: - cluster_features: "gte_v8.16.0" - reason: "Implemented in 8.16" + capabilities: + - method: POST + path: /_search + capabilities: [ range_regexp_interval_queries ] + test_runner_features: capabilities + reason: "Support for range and regexp interval queries capability required" - do: search: index: test @@ -500,8 +504,12 @@ setup: --- "Test range": - requires: - cluster_features: "gte_v8.16.0" - reason: "Implemented in 8.16" + capabilities: + - method: POST + path: /_search + capabilities: [ range_regexp_interval_queries ] + test_runner_features: capabilities + reason: "Support for range and regexp interval queries capability required" - do: search: index: test diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index cef4bd14d992b..92b871c771f70 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -219,6 +219,7 @@ static TransportVersion def(int id) { public static final TransportVersion SIMULATE_COMPONENT_TEMPLATES_SUBSTITUTIONS = def(8_743_00_0); public static final TransportVersion ML_INFERENCE_IBM_WATSONX_EMBEDDINGS_ADDED = def(8_744_00_0); public static final TransportVersion BULK_INCREMENTAL_STATE = def(8_745_00_0); + public static final TransportVersion REGEX_AND_RANGE_INTERVAL_QUERIES = def(8_746_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java index 2fcae3c954bf7..647e45d1beda1 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java @@ -14,11 +14,13 @@ import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -753,7 +755,7 @@ String getUseField() { } } - public static class Regexp extends IntervalsSourceProvider { + public static class Regexp extends IntervalsSourceProvider implements VersionedNamedWriteable { public static final String NAME = "regexp"; @@ -821,6 +823,11 @@ public String getWriteableName() { return NAME; } + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.REGEX_AND_RANGE_INTERVAL_QUERIES; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(pattern); @@ -1032,7 +1039,7 @@ String getUseField() { } } - public static class Range extends IntervalsSourceProvider { + public static class Range extends IntervalsSourceProvider implements VersionedNamedWriteable { public static final String NAME = "range"; @@ -1120,6 +1127,11 @@ public String getWriteableName() { return NAME; } + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.REGEX_AND_RANGE_INTERVAL_QUERIES; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(lowerTerm); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 28330c7c45479..38157efd8a370 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -95,6 +95,11 @@ public List routes() { ); } + @Override + public Set supportedCapabilities() { + return SearchCapabilities.CAPABILITIES; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java new file mode 100644 index 0000000000000..45fd6afe4fca6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.rest.action.search; + +import java.util.Set; + +/** + * A {@link Set} of "capabilities" supported by the {@link RestSearchAction}. + */ +public final class SearchCapabilities { + + private SearchCapabilities() {} + + /** Support regex and range match rules in interval queries. */ + private static final String RANGE_REGEX_INTERVAL_QUERY_CAPABILITY = "range_regexp_interval_queries"; + + public static final Set CAPABILITIES = Set.of(RANGE_REGEX_INTERVAL_QUERY_CAPABILITY); +} diff --git a/x-pack/qa/runtime-fields/build.gradle b/x-pack/qa/runtime-fields/build.gradle index 5add595d64e3f..43d6d9463e0d1 100644 --- a/x-pack/qa/runtime-fields/build.gradle +++ b/x-pack/qa/runtime-fields/build.gradle @@ -29,7 +29,7 @@ subprojects { restResources { restApi { - include '_common', 'bulk', 'count', 'cluster', 'index', 'indices', 'field_caps', 'msearch', + include 'capabilities', '_common', 'bulk', 'count', 'cluster', 'index', 'indices', 'field_caps', 'msearch', 'search', 'async_search', 'graph', '*_point_in_time', 'put_script', 'scripts_painless_execute' } restTests { From 71507291544f90ed79f5a7f2971149e1cc379711 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 19 Sep 2024 11:40:38 -0400 Subject: [PATCH 323/417] Multi term intervals: increase max_expansions (#112826) Currently multi term interval queries (prefix, wildcard, fuzzy, regexp and range) can expand maximum to 128 terms. To reach parity with span queries that we want to deprecate, set max expansions to indices.query.bool.max_clause_count which is used in span queries. Relates to #110491 --- docs/changelog/112826.yaml | 6 ++ .../query-dsl/intervals-query.asciidoc | 21 ++-- .../extras/MatchOnlyTextFieldMapper.java | 17 ++-- .../extras/MatchOnlyTextFieldTypeTests.java | 39 ++++++-- .../index/mapper/TextFieldMapper.java | 24 +++-- .../ConstantScoreTextFieldTypeTests.java | 24 ++++- .../index/mapper/TextFieldTypeTests.java | 28 ++++-- .../query/IntervalQueryBuilderTests.java | 95 ++++++++++++++++--- 8 files changed, 205 insertions(+), 49 deletions(-) create mode 100644 docs/changelog/112826.yaml diff --git a/docs/changelog/112826.yaml b/docs/changelog/112826.yaml new file mode 100644 index 0000000000000..65c05b4d6035a --- /dev/null +++ b/docs/changelog/112826.yaml @@ -0,0 +1,6 @@ +pr: 112826 +summary: "Multi term intervals: increase max_expansions" +area: Search +type: enhancement +issues: + - 110491 diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc index 84869838fe1e6..97b6be7d76ab6 100644 --- a/docs/reference/query-dsl/intervals-query.asciidoc +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -124,8 +124,9 @@ unstemmed ones. ==== `prefix` rule parameters The `prefix` rule matches terms that start with a specified set of characters. -This prefix can expand to match at most 128 terms. If the prefix matches more -than 128 terms, {es} returns an error. You can use the +This prefix can expand to match at most `indices.query.bool.max_clause_count` +<> terms. If the prefix matches more terms, +{es} returns an error. You can use the <> option in the field mapping to avoid this limit. @@ -151,7 +152,8 @@ separate `analyzer` is specified. ==== `wildcard` rule parameters The `wildcard` rule matches terms using a wildcard pattern. This pattern can -expand to match at most 128 terms. If the pattern matches more than 128 terms, +expand to match at most `indices.query.bool.max_clause_count` +<> terms. If the pattern matches more terms, {es} returns an error. `pattern`:: @@ -184,8 +186,9 @@ The `pattern` is normalized using the search analyzer from this field, unless ==== `regexp` rule parameters The `regexp` rule matches terms using a regular expression pattern. -This pattern can expand to match at most 128 terms. -If the pattern matches more than 128 terms,{es} returns an error. +This pattern can expand to match at most `indices.query.bool.max_clause_count` +<> terms. +If the pattern matches more terms,{es} returns an error. `pattern`:: (Required, string) Regexp pattern used to find matching terms. @@ -215,7 +218,8 @@ The `pattern` is normalized using the search analyzer from this field, unless The `fuzzy` rule matches terms that are similar to the provided term, within an edit distance defined by <>. If the fuzzy expansion matches more than -128 terms, {es} returns an error. +`indices.query.bool.max_clause_count` +<> terms, {es} returns an error. `term`:: (Required, string) The term to match @@ -250,8 +254,9 @@ The `term` is normalized using the search analyzer from this field, unless ==== `range` rule parameters The `range` rule matches terms contained within a provided range. -This range can expand to match at most 128 terms. -If the range matches more than 128 terms,{es} returns an error. +This range can expand to match at most `indices.query.bool.max_clause_count` +<> terms. +If the range matches more terms,{es} returns an error. `gt`:: (Optional, string) Greater than: match terms greater than the provided term. diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index 778824d34b6fa..5904169308fab 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -21,6 +21,7 @@ import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; @@ -270,7 +271,11 @@ public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext conte @Override public IntervalsSource prefixIntervals(BytesRef term, SearchExecutionContext context) { - return toIntervalsSource(Intervals.prefix(term), new PrefixQuery(new Term(name(), term)), context); + return toIntervalsSource( + Intervals.prefix(term, IndexSearcher.getMaxClauseCount()), + new PrefixQuery(new Term(name(), term)), + context + ); } @Override @@ -285,18 +290,18 @@ public IntervalsSource fuzzyIntervals( new Term(name(), term), maxDistance, prefixLength, - 128, + IndexSearcher.getMaxClauseCount(), transpositions, MultiTermQuery.CONSTANT_SCORE_BLENDED_REWRITE ); - IntervalsSource fuzzyIntervals = Intervals.multiterm(fuzzyQuery.getAutomata(), term); + IntervalsSource fuzzyIntervals = Intervals.multiterm(fuzzyQuery.getAutomata(), IndexSearcher.getMaxClauseCount(), term); return toIntervalsSource(fuzzyIntervals, fuzzyQuery, context); } @Override public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContext context) { return toIntervalsSource( - Intervals.wildcard(pattern), + Intervals.wildcard(pattern, IndexSearcher.getMaxClauseCount()), new MatchAllDocsQuery(), // wildcard queries can be expensive, what should the approximation be? context ); @@ -305,7 +310,7 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex @Override public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { return toIntervalsSource( - Intervals.regexp(pattern), + Intervals.regexp(pattern, IndexSearcher.getMaxClauseCount()), new MatchAllDocsQuery(), // regexp queries can be expensive, what should the approximation be? context ); @@ -320,7 +325,7 @@ public IntervalsSource rangeIntervals( SearchExecutionContext context ) { return toIntervalsSource( - Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper), + Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper, IndexSearcher.getMaxClauseCount()), new MatchAllDocsQuery(), // range queries can be expensive, what should the approximation be? context ); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java index 4c20802a45058..6970dd6739ecf 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.PhraseQuery; @@ -152,30 +153,56 @@ public void testPhrasePrefixQuery() throws IOException { assertNotEquals(new MatchAllDocsQuery(), SourceConfirmedTextQuery.approximate(delegate)); } - public void testTermIntervals() throws IOException { + public void testTermIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource termIntervals = ft.termIntervals(new BytesRef("foo"), MOCK_CONTEXT); assertThat(termIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); assertEquals(Intervals.term(new BytesRef("foo")), ((SourceIntervalsSource) termIntervals).getIntervalsSource()); } - public void testPrefixIntervals() throws IOException { + public void testPrefixIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); assertThat(prefixIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); - assertEquals(Intervals.prefix(new BytesRef("foo")), ((SourceIntervalsSource) prefixIntervals).getIntervalsSource()); + assertEquals( + Intervals.prefix(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) prefixIntervals).getIntervalsSource() + ); } - public void testWildcardIntervals() throws IOException { + public void testWildcardIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); assertThat(wildcardIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); - assertEquals(Intervals.wildcard(new BytesRef("foo")), ((SourceIntervalsSource) wildcardIntervals).getIntervalsSource()); + assertEquals( + Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) wildcardIntervals).getIntervalsSource() + ); + } + + public void testRegexpIntervals() { + MappedFieldType ft = new MatchOnlyTextFieldType("field"); + IntervalsSource regexpIntervals = ft.regexpIntervals(new BytesRef("foo"), MOCK_CONTEXT); + assertThat(regexpIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); + assertEquals( + Intervals.regexp(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) regexpIntervals).getIntervalsSource() + ); } - public void testFuzzyIntervals() throws IOException { + public void testFuzzyIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); assertThat(fuzzyIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); } + + public void testRangeIntervals() { + MappedFieldType ft = new MatchOnlyTextFieldType("field"); + IntervalsSource rangeIntervals = ft.rangeIntervals(new BytesRef("foo"), new BytesRef("foo1"), true, true, MOCK_CONTEXT); + assertThat(rangeIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); + assertEquals( + Intervals.range(new BytesRef("foo"), new BytesRef("foo1"), true, true, IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) rangeIntervals).getIntervalsSource() + ); + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index a3e8b3128d013..2c55fc35db57d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -36,6 +36,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PhraseQuery; @@ -620,7 +621,10 @@ public IntervalsSource intervals(BytesRef term) { return Intervals.fixField(name(), Intervals.term(term)); } String wildcardTerm = term.utf8ToString() + "?".repeat(Math.max(0, minChars - term.length)); - return Intervals.or(Intervals.fixField(name(), Intervals.wildcard(new BytesRef(wildcardTerm))), Intervals.term(term)); + return Intervals.or( + Intervals.fixField(name(), Intervals.wildcard(new BytesRef(wildcardTerm), IndexSearcher.getMaxClauseCount())), + Intervals.term(term) + ); } @Override @@ -822,7 +826,7 @@ public IntervalsSource prefixIntervals(BytesRef term, SearchExecutionContext con if (prefixFieldType != null) { return prefixFieldType.intervals(term); } - return Intervals.prefix(term); + return Intervals.prefix(term, IndexSearcher.getMaxClauseCount()); } @Override @@ -836,8 +840,14 @@ public IntervalsSource fuzzyIntervals( if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - FuzzyQuery fq = new FuzzyQuery(new Term(name(), term), maxDistance, prefixLength, 128, transpositions); - return Intervals.multiterm(fq.getAutomata(), term); + FuzzyQuery fq = new FuzzyQuery( + new Term(name(), term), + maxDistance, + prefixLength, + IndexSearcher.getMaxClauseCount(), + transpositions + ); + return Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), term); } @Override @@ -845,7 +855,7 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - return Intervals.wildcard(pattern); + return Intervals.wildcard(pattern, IndexSearcher.getMaxClauseCount()); } @Override @@ -853,7 +863,7 @@ public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - return Intervals.regexp(pattern); + return Intervals.regexp(pattern, IndexSearcher.getMaxClauseCount()); } @Override @@ -867,7 +877,7 @@ public IntervalsSource rangeIntervals( if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - return Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper); + return Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper, IndexSearcher.getMaxClauseCount()); } private void checkForPositions() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java index 2627ae9a39839..e454a4ffa0c8d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; @@ -231,20 +232,26 @@ public void testTermIntervals() throws IOException { public void testPrefixIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.prefix(new BytesRef("foo")), prefixIntervals); + assertEquals(Intervals.prefix(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), prefixIntervals); } public void testWildcardIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); + } + + public void testRegexpIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource regexpIntervals = ft.regexpIntervals(new BytesRef("foo"), MOCK_CONTEXT); + assertEquals(Intervals.regexp(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), regexpIntervals); } public void testFuzzyIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); FuzzyQuery fq = new FuzzyQuery(new Term("field", "foo"), 1, 2, 128, true); - IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), "foo"); + IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), "foo"); assertEquals(expectedIntervals, fuzzyIntervals); } @@ -259,6 +266,15 @@ public void testWildcardIntervalsWithIndexedPrefixes() { ConstantScoreTextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); + } + + public void testRangeIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource rangeIntervals = ft.rangeIntervals(new BytesRef("foo"), new BytesRef("foo1"), true, true, MOCK_CONTEXT); + assertEquals( + Intervals.range(new BytesRef("foo"), new BytesRef("foo1"), true, true, IndexSearcher.getMaxClauseCount()), + rangeIntervals + ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java index d73e8546a726a..4d246d3c557a6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; @@ -243,20 +244,26 @@ public void testTermIntervals() throws IOException { public void testPrefixIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.prefix(new BytesRef("foo")), prefixIntervals); + assertEquals(Intervals.prefix(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), prefixIntervals); } - public void testWildcardIntervals() throws IOException { + public void testWildcardIntervals() { MappedFieldType ft = createFieldType(); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); } - public void testFuzzyIntervals() throws IOException { + public void testRegexpIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource regexpIntervals = ft.regexpIntervals(new BytesRef("foo"), MOCK_CONTEXT); + assertEquals(Intervals.regexp(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), regexpIntervals); + } + + public void testFuzzyIntervals() { MappedFieldType ft = createFieldType(); IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); FuzzyQuery fq = new FuzzyQuery(new Term("field", "foo"), 1, 2, 128, true); - IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), "foo"); + IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), "foo"); assertEquals(expectedIntervals, fuzzyIntervals); } @@ -271,6 +278,15 @@ public void testWildcardIntervalsWithIndexedPrefixes() { TextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); + } + + public void testRangeIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource rangeIntervals = ft.rangeIntervals(new BytesRef("foo"), new BytesRef("foo1"), true, true, MOCK_CONTEXT); + assertEquals( + Intervals.range(new BytesRef("foo"), new BytesRef("foo1"), true, true, IndexSearcher.getMaxClauseCount()), + rangeIntervals + ); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 0da9d5ee178fd..aad8275f4749d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -9,14 +9,22 @@ package org.elasticsearch.index.query; +import org.apache.lucene.analysis.core.KeywordAnalyzer; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.queries.intervals.IntervalQuery; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -34,7 +42,9 @@ import java.util.Collections; import java.util.List; +import static java.util.Collections.singleton; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -606,7 +616,7 @@ public void testPrefixes() throws IOException { } }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.prefix(new BytesRef("term"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.prefix(new BytesRef("term"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String no_positions_json = Strings.format(""" @@ -667,7 +677,13 @@ public void testPrefixes() throws IOException { builder = (IntervalQueryBuilder) parseQuery(short_prefix_json); expected = new IntervalQuery( PREFIXED_FIELD, - Intervals.or(Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.wildcard(new BytesRef("t?"))), Intervals.term("t")) + Intervals.or( + Intervals.fixField( + PREFIXED_FIELD + "._index_prefix", + Intervals.wildcard(new BytesRef("t?"), IndexSearcher.getMaxClauseCount()) + ), + Intervals.term("t") + ) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -739,7 +755,7 @@ public void testRegexp() throws IOException { }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.regexp(new BytesRef("te.*m"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.regexp(new BytesRef("te.*m"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String no_positions_json = Strings.format(""" @@ -771,7 +787,10 @@ public void testRegexp() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(fixed_field_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.regexp(new BytesRef("te.*m")))); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.fixField(MASKED_FIELD, Intervals.regexp(new BytesRef("te.*m"), IndexSearcher.getMaxClauseCount())) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String fixed_field_json_no_positions = Strings.format(""" @@ -791,6 +810,40 @@ public void testRegexp() throws IOException { }); } + public void testMaxExpansionExceptionFailure() throws Exception { + IntervalsSourceProvider provider1 = new IntervalsSourceProvider.Prefix("bar", "keyword", null); + IntervalsSourceProvider provider2 = new IntervalsSourceProvider.Wildcard("bar*", "keyword", null); + IntervalsSourceProvider provider3 = new IntervalsSourceProvider.Fuzzy("bar", 0, true, Fuzziness.fromEdits(1), "keyword", null); + IntervalsSourceProvider provider4 = new IntervalsSourceProvider.Regexp("bar.*", "keyword", null); + IntervalsSourceProvider provider5 = new IntervalsSourceProvider.Range("bar", "bar2", true, true, "keyword", null); + IntervalsSourceProvider provider = randomFrom(provider1, provider2, provider3, provider4, provider5); + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory, new KeywordAnalyzer())) { + for (int i = 0; i < 3; i++) { + iw.addDocument(singleton(new TextField(TEXT_FIELD_NAME, "bar" + i, Field.Store.NO))); + } + try (IndexReader reader = iw.getReader()) { + int origBoolMaxClauseCount = IndexSearcher.getMaxClauseCount(); + IndexSearcher.setMaxClauseCount(1); + try { + + IntervalQueryBuilder queryBuilder = new IntervalQueryBuilder(TEXT_FIELD_NAME, provider); + IndexSearcher searcher = newSearcher(reader); + Query query = queryBuilder.toQuery(createSearchExecutionContext(searcher)); + RuntimeException exc = expectThrows( + RuntimeException.class, + () -> query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f).scorer(searcher.getLeafContexts().get(0)) + ); + assertThat(exc.getMessage(), containsString("expanded to too many terms (limit 1)")); + } finally { + IndexSearcher.setMaxClauseCount(origBoolMaxClauseCount); + } + } + } + } + } + public void testWildcard() throws IOException { String json = Strings.format(""" { @@ -804,7 +857,7 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("te?m"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("te?m"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String no_positions_json = Strings.format(""" @@ -836,7 +889,7 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(keyword_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("Te?m"))); + expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("Te?m"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String fixed_field_json = Strings.format(""" @@ -852,7 +905,10 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(fixed_field_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("te?m")))); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("te?m"), IndexSearcher.getMaxClauseCount())) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String fixed_field_json_no_positions = Strings.format(""" @@ -885,13 +941,22 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(fixed_field_analyzer_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("Te?m")))); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("Te?m"), IndexSearcher.getMaxClauseCount())) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); } private static IntervalsSource buildFuzzySource(String term, String label, int prefixLength, boolean transpositions, int editDistance) { - FuzzyQuery fq = new FuzzyQuery(new Term("field", term), editDistance, prefixLength, 128, transpositions); - return Intervals.multiterm(fq.getAutomata(), label); + FuzzyQuery fq = new FuzzyQuery( + new Term("field", term), + editDistance, + prefixLength, + IndexSearcher.getMaxClauseCount(), + transpositions + ); + return Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), label); } public void testFuzzy() throws IOException { @@ -1011,7 +1076,10 @@ public void testRange() throws IOException { } }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), true, true)); + Query expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), true, true, IndexSearcher.getMaxClauseCount()) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); json = Strings.format(""" @@ -1026,7 +1094,10 @@ public void testRange() throws IOException { } }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), false, false)); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), false, false, IndexSearcher.getMaxClauseCount()) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String incomplete_range = Strings.format(""" From 008591172ae76b201ff9c7679a32dc98d98e262e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 20 Sep 2024 06:12:21 +0000 Subject: [PATCH 324/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-e4ac57746eb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index da35190f85890..9fccc3c11a062 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-6d987e1ce1c +lucene = 10.0.0-snapshot-e4ac57746eb bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2de376504e277..aca87d9491ecc 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 1e3d353b82184ed7c163995525e0f0715fb880cb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 20 Sep 2024 06:12:21 +0000 Subject: [PATCH 325/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-a7ce3466d7c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7f6f7167dec1c..9014f042b21d5 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 9.12.0-snapshot-b467a2bb66d +lucene = 9.12.0-snapshot-a7ce3466d7c bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3250d73eeaf5f..a200468c428b4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 3bfd004d22fce3c370d306bc032ee09902224848 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 20 Sep 2024 08:53:06 +0200 Subject: [PATCH 326/417] Adapt QueryAnalyzer to use TermInSetQuery#getBytesRefIterator --- .../elasticsearch/percolator/QueryAnalyzer.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index cd166f869ddc7..0e9aa6de3a0c0 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -25,13 +25,15 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.index.query.DateRangeIncludingNowQuery; -import org.elasticsearch.index.search.NestedHelper; import org.elasticsearch.lucene.queries.BlendedTermQuery; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -198,10 +200,16 @@ public void consumeTerms(Query query, Term... termsToConsume) { @Override public void consumeTermsMatching(Query query, String field, Supplier automaton) { if (query instanceof TermInSetQuery q) { - // TODO Lucene 10 upgrade: this is a workaround that only gets one term - Term term = NestedHelper.getTermInSetTerm(q); + BytesRefIterator bytesRefIterator = q.getBytesRefIterator(); + BytesRef term; Set qe = new HashSet<>(); - qe.add(new QueryExtraction(term)); + try { + while ((term = bytesRefIterator.next()) != null) { + qe.add(new QueryExtraction(new Term(field, term))); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } this.terms.add(new Result(true, qe, 1)); } else { super.consumeTermsMatching(query, field, automaton); From bbac749e507f238f97845def3283eda88beea433 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 20 Sep 2024 09:21:47 +0200 Subject: [PATCH 327/417] restore ngram tokenizer removed due to a bad merge --- .../org/elasticsearch/analysis/common/CommonAnalysisPlugin.java | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 42c6f829215b4..35dafbec48b19 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -293,6 +293,7 @@ public Map> getTokenizers() { tokenizers.put("simple_pattern", SimplePatternTokenizerFactory::new); tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new); tokenizers.put("thai", ThaiTokenizerFactory::new); + tokenizers.put("ngram", NGramTokenizerFactory::new); tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new); tokenizers.put("char_group", CharGroupTokenizerFactory::new); tokenizers.put("classic", ClassicTokenizerFactory::new); From e5f4ef9d53e36cdf68840b9b2b391443a61512b8 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 20 Sep 2024 09:53:54 +0200 Subject: [PATCH 328/417] Address norwegian stemmer creation issues --- .../analysis/common/StemmerTokenFilterFactory.java | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 26b96cde755bc..3c70ff5b18615 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -38,7 +38,9 @@ import org.apache.lucene.analysis.lv.LatvianStemFilter; import org.apache.lucene.analysis.miscellaneous.EmptyTokenStream; import org.apache.lucene.analysis.no.NorwegianLightStemFilter; +import org.apache.lucene.analysis.no.NorwegianLightStemFilterFactory; import org.apache.lucene.analysis.no.NorwegianMinimalStemFilter; +import org.apache.lucene.analysis.no.NorwegianMinimalStemFilterFactory; import org.apache.lucene.analysis.pt.PortugueseLightStemFilter; import org.apache.lucene.analysis.pt.PortugueseMinimalStemFilter; import org.apache.lucene.analysis.pt.PortugueseStemFilter; @@ -74,6 +76,7 @@ import org.tartarus.snowball.ext.TurkishStemmer; import java.io.IOException; +import java.util.Collections; public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { @@ -211,12 +214,13 @@ public TokenStream create(TokenStream tokenStream) { // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { - // TODO Lucene 10 upgrade: NorwegianLightStemmer is now package private, we no longer have access to the flags constants - return new NorwegianLightStemFilter(tokenStream, 2); + NorwegianLightStemFilterFactory factory = new NorwegianLightStemFilterFactory(Collections.singletonMap("variant", "nn")); + return factory.create(tokenStream); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { - // TODO Lucene 10 upgrade: NorwegianLightStemmer is now package private, we no longer have access to the flags constants - return new NorwegianMinimalStemFilter(tokenStream, 2); - + NorwegianMinimalStemFilterFactory factory = new NorwegianMinimalStemFilterFactory( + Collections.singletonMap("variant", "nn") + ); + return factory.create(tokenStream); // Persian stemmers } else if ("persian".equalsIgnoreCase(language)) { return new PersianStemFilter(tokenStream); From c03d83d573022d45ab8574286464192a86ba28b4 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 20 Sep 2024 10:13:02 +0200 Subject: [PATCH 329/417] extend ESTestCase#newSearcher methods and add javadocs --- .../org/elasticsearch/test/ESTestCase.java | 40 +++++++++++++++++-- 1 file changed, 37 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index a7eb113b80c09..061b954cd39dd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -208,6 +208,7 @@ import java.util.stream.LongStream; import java.util.stream.Stream; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.anyOf; @@ -2573,9 +2574,42 @@ public static void ensureAllContextsReleased(SearchService searchService) { } } - // lucene 10 upgrade: this overwrites LuceneTestCase#newSearcher so we never get random INTRA_SEGMENT - // concurrency. We want to slowly migrate of stuff out of using this method after we got a working branch + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ public static IndexSearcher newSearcher(IndexReader r) { - return newSearcher(r, true, true, Concurrency.INTER_SEGMENT); + return newSearcher(r, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) { + return newSearcher(r, maybeWrap, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions) { + return newSearcher(r, maybeWrap, wrapWithAssertions, randomBoolean()); + } + + /** + * Create a new searcher over the reader. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions, boolean useThreads) { + if (useThreads) { + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.INTER_SEGMENT); + } + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.NONE); } } From 2318da103a34415a3554f868bd10162b782971df Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 20 Sep 2024 10:33:32 +0200 Subject: [PATCH 330/417] Rephrase comment in OldCodecsAvailableTests This isn't a blocker for the lucene 10 upgrade, it is for the release of stack 9. It's already annotated @UpdateForV9 so we are good. --- .../xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java index 42b5ba83a0828..1288ae30caa72 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java @@ -18,7 +18,7 @@ public class OldCodecsAvailableTests extends ESTestCase { * to the next major Lucene version. */ @UpdateForV9 - @AwaitsFix(bugUrl = "muted until we add bwc codecs as part of lucene 10 upgrade") + @AwaitsFix(bugUrl = "muted until we add bwc codecs to support 7.x indices in Elasticsearch 9.0") public void testLuceneBWCCodecsAvailable() { assertEquals("Add Lucene BWC codecs for Elasticsearch version 7", 8, Version.CURRENT.major); } From 46249a06624585fe11b04e4f76af146683819388 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 20 Sep 2024 11:12:20 +0200 Subject: [PATCH 331/417] clarify comment in BWCLucene70Codec Added update for V9 annotation so we don't forget, not a blocker for lucene 10 upgrade --- .../xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java index 9b226306edf62..8084819198fb3 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java @@ -22,6 +22,7 @@ import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xpack.lucene.bwc.codecs.BWCCodec; import org.elasticsearch.xpack.lucene.bwc.codecs.lucene60.Lucene60MetadataOnlyPointsFormat; @@ -32,8 +33,9 @@ public class BWCLucene70Codec extends BWCCodec { private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat(); private final CompoundFormat compoundFormat = new Lucene50CompoundFormat(); private final StoredFieldsFormat storedFieldsFormat; - // TODO lucene 10 upgrade: resolve below hack that gets us past missing Lucene70 codec for now - private final DocValuesFormat defaultDVFormat = null; + @UpdateForV9 + // this needs addressing to support 7.x indices in 9.x + private final DocValuesFormat defaultDVFormat = null; // DocValuesFormat.forName("Lucene70"); private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { From 266979f9318d977a5f8ff4cc6f5aeb350ecfde27 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 20 Sep 2024 11:48:52 +0100 Subject: [PATCH 332/417] Fix READONCE IOContext usage --- .../index/store/FsDirectoryFactory.java | 2 ++ .../org/elasticsearch/index/store/Store.java | 27 ++++++++++++++++--- .../SearchableSnapshotDirectoryTests.java | 10 +++---- 3 files changed, 31 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java index 1b8bb4215bdf1..bc94db13074db 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java @@ -123,6 +123,8 @@ public IndexInput openInput(String name, IOContext context) throws IOException { // we need to do these checks on the outer directory since the inner doesn't know about pending deletes ensureOpen(); ensureCanRead(name); + // we switch the context here since mmap checks for the READONCE context by identity + context = context == Store.READONCE_CHECKSUM ? IOContext.READONCE : context; // we only use the mmap to open inputs. Everything else is managed by the NIOFSDirectory otherwise // we might run into trouble with files that are pendingDelete in one directory but still // listed in listAll() from the other. We on the other hand don't want to list files from both dirs diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 8333c4e3236c3..3de266eabd9c8 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -28,11 +28,13 @@ import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.Lock; import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; @@ -147,8 +149,15 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * Specific {@link IOContext} indicating that we will read only the Lucene file footer (containing the file checksum) * See {@link MetadataSnapshot#checksumFromLuceneFile}. */ - // TODO Lucene 10 upgrade - public static final IOContext READONCE_CHECKSUM = IOContext.READONCE; + public static final IOContext READONCE_CHECKSUM = createReadOnceContext(); + + // while equivalent, these different read once contexts are checked by identity in directory implementations + private static IOContext createReadOnceContext() { + var context = IOContext.READONCE.withReadAdvice(ReadAdvice.SEQUENTIAL); + assert context != IOContext.READONCE; + assert context.equals(IOContext.READONCE); + return context; + } private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; @@ -911,6 +920,18 @@ public static boolean isReadAsHash(String file) { return SEGMENT_INFO_EXTENSION.equals(IndexFileNames.getExtension(file)) || file.startsWith(IndexFileNames.SEGMENTS + "_"); } + // We select the read once context carefully here since these constants, while equivalent are + // checked by identity in the different directory implementations. + private static IOContext contextForDirectory(String filename, Directory directory) { + if (directory instanceof StoreDirectory && filename.startsWith(IndexFileNames.SEGMENTS) == false) { + return READONCE_CHECKSUM; + } + if (FilterDirectory.unwrap(directory) instanceof FsDirectoryFactory.HybridDirectory) { + return READONCE_CHECKSUM; + } + return IOContext.READONCE; + } + private static void checksumFromLuceneFile( Directory directory, String file, @@ -920,7 +941,7 @@ private static void checksumFromLuceneFile( boolean readFileAsHash, BytesRef writerUuid ) throws IOException { - try (IndexInput in = directory.openInput(file, READONCE_CHECKSUM)) { + try (IndexInput in = directory.openInput(file, contextForDirectory(file, directory))) { final long length = in.length(); if (length < CodecUtil.footerLength()) { // If the file isn't long enough to contain the footer then verifying it triggers an IAE, but really it's corrupted diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index e65c4a60f89d5..98df96eca7772 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -401,9 +401,9 @@ public void testChecksumBlobContainerIndexInput() throws Exception { false, // no prewarming in this test because we want to ensure that files are accessed on purpose (directory, snapshotDirectory) -> { for (String fileName : randomSubsetOf(Arrays.asList(snapshotDirectory.listAll()))) { - final long checksum; - try (IndexInput input = directory.openInput(fileName, Store.READONCE_CHECKSUM)) { - checksum = CodecUtil.checksumEntireFile(input); + final long expectedChecksum; + try (IndexInput input = directory.openInput(fileName, IOContext.READONCE)) { + expectedChecksum = CodecUtil.checksumEntireFile(input); } final long snapshotChecksum; @@ -418,9 +418,9 @@ public void testChecksumBlobContainerIndexInput() throws Exception { } assertThat( - "Expected checksum [" + checksum + "] but got [" + snapshotChecksum + ']', + "Expected checksum [" + expectedChecksum + "] but got [" + snapshotChecksum + ']', snapshotChecksum, - equalTo(checksum) + equalTo(expectedChecksum) ); assertThat( "File [" + fileName + "] should have been read from heap", From c746906845591aedd66ddf7df30b1b26e50b9851 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 20 Sep 2024 13:23:25 +0200 Subject: [PATCH 333/417] fix TransportSimulateBulkActionIT compilation --- .../action/bulk/TransportSimulateBulkActionIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java index 75b34be3e6a72..339567f409c02 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java @@ -135,7 +135,7 @@ public void testMappingValidationIndexExistsWithComponentTemplate() throws IOExc // Now make sure nothing was actually changed: indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); From 451216712e6cefc96efbee8d8b7f91912d13b212 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 20 Sep 2024 16:43:02 +0100 Subject: [PATCH 334/417] another attempt to fix READONCE IOContext usage --- .../org/elasticsearch/index/store/Store.java | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 3de266eabd9c8..ed4c3d667ad22 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -920,18 +920,6 @@ public static boolean isReadAsHash(String file) { return SEGMENT_INFO_EXTENSION.equals(IndexFileNames.getExtension(file)) || file.startsWith(IndexFileNames.SEGMENTS + "_"); } - // We select the read once context carefully here since these constants, while equivalent are - // checked by identity in the different directory implementations. - private static IOContext contextForDirectory(String filename, Directory directory) { - if (directory instanceof StoreDirectory && filename.startsWith(IndexFileNames.SEGMENTS) == false) { - return READONCE_CHECKSUM; - } - if (FilterDirectory.unwrap(directory) instanceof FsDirectoryFactory.HybridDirectory) { - return READONCE_CHECKSUM; - } - return IOContext.READONCE; - } - private static void checksumFromLuceneFile( Directory directory, String file, @@ -941,7 +929,10 @@ private static void checksumFromLuceneFile( boolean readFileAsHash, BytesRef writerUuid ) throws IOException { - try (IndexInput in = directory.openInput(file, contextForDirectory(file, directory))) { + // We select the read once context carefully here since these constants, while equivalent are + // checked by identity in the different directory implementations. + var context = file.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : READONCE_CHECKSUM; + try (IndexInput in = directory.openInput(file, context)) { final long length = in.length(); if (length < CodecUtil.footerLength()) { // If the file isn't long enough to contain the footer then verifying it triggers an IAE, but really it's corrupted From c0b6794f0ffe50534be763f06613045b18980214 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 20 Sep 2024 17:04:18 +0100 Subject: [PATCH 335/417] spotless --- server/src/main/java/org/elasticsearch/index/store/Store.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index ed4c3d667ad22..c3d21b23d6a49 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -28,7 +28,6 @@ import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; From b7574b522480e5cd8bb2903afc6f92016a5af5ec Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Fri, 20 Sep 2024 20:47:56 +0100 Subject: [PATCH 336/417] Update docs/changelog/113018.yaml --- docs/changelog/113018.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 docs/changelog/113018.yaml diff --git a/docs/changelog/113018.yaml b/docs/changelog/113018.yaml new file mode 100644 index 0000000000000..781009e7b70aa --- /dev/null +++ b/docs/changelog/113018.yaml @@ -0,0 +1,5 @@ +pr: 113018 +summary: Upgrade to Lucene 9.12 +area: Search +type: upgrade +issues: [] From aaf1bbc048e2d9e6dcd2c09d704c5f96b57b38b0 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Fri, 20 Sep 2024 20:53:09 +0100 Subject: [PATCH 337/417] Use the RC build --- build-tools-internal/version.properties | 2 +- build.gradle | 5 + gradle/verification-metadata.xml | 150 ++++++++++++------------ 3 files changed, 81 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9014f042b21d5..ac75a3a968ed1 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 9.12.0-snapshot-a7ce3466d7c +lucene = 9.12.0 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/build.gradle b/build.gradle index 746f964cb6158..82780b5a52e4f 100644 --- a/build.gradle +++ b/build.gradle @@ -241,6 +241,11 @@ if (project.gradle.startParameter.taskNames.any { it.startsWith("checkPart") || subprojects { proj -> apply plugin: 'elasticsearch.base' + + repositories { + // TODO: Temporary for Lucene RC builds. REMOVE + maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.12.0-RC1-rev-f9cb943157870156af5f31cce315b95c08295c03/lucene/maven" } + } } allprojects { diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a200468c428b4..a927bf6b5c090 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 89c4af7811f715dee9292647ee8406e90a4fc4fd Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Sat, 21 Sep 2024 00:17:03 +0200 Subject: [PATCH 338/417] Address test failures in old-lucene-versions --- .../bwc/codecs/index/LegacyDocValuesIterables.java | 12 +++++++++--- .../lucene54/Lucene54DocValuesFormatTests.java | 4 ++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java index 7ce51b8ade09c..c7abed7d69a59 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java @@ -222,6 +222,7 @@ public Iterator iterator() { return new Iterator() { private boolean nextIsSet; + private int currentIndex = 0; private long nextOrd; private void setNext() { @@ -229,17 +230,22 @@ private void setNext() { if (nextIsSet == false) { if (values.docID() == -1) { values.nextDoc(); + currentIndex = 0; } while (true) { if (values.docID() == DocIdSetIterator.NO_MORE_DOCS) { nextOrd = -1; break; } - nextOrd = values.nextOrd(); - if (nextOrd != -1) { - break; + if (currentIndex < values.docValueCount()) { + nextOrd = values.nextOrd(); + currentIndex++; + if (nextOrd != -1) { + break; + } } values.nextDoc(); + currentIndex = 0; } nextIsSet = true; } diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java index c819dca3ec6ff..1a2aca0d63bde 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java @@ -10,12 +10,12 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.GraalVMThreadsFilter; @ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) -public class Lucene54DocValuesFormatTests extends BaseDocValuesFormatTestCase { +public class Lucene54DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene54DocValuesFormat()); From 989e48ae0528a678284be1b18ef3703c5ef78029 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Sat, 21 Sep 2024 00:22:30 +0200 Subject: [PATCH 339/417] Address test failure in BlockPostingsFormat3Tests --- .../bwc/codecs/lucene50/BlockPostingsFormat3Tests.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java index 304f7b0c934fb..59f5e5de1eff7 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java @@ -48,7 +48,9 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.test.ESTestCase; @@ -187,7 +189,11 @@ public void assertTerms(Terms leftTerms, Terms rightTerms, boolean deep) throws int numIntersections = atLeast(3); for (int i = 0; i < numIntersections; i++) { String re = AutomatonTestUtil.randomRegexp(random()); - CompiledAutomaton automaton = new CompiledAutomaton(new RegExp(re, RegExp.NONE).toAutomaton()); + Automaton determinized = Operations.determinize( + new RegExp(re, RegExp.NONE).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); + CompiledAutomaton automaton = new CompiledAutomaton(determinized); if (automaton.type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL) { // TODO: test start term too TermsEnum leftIntersection = leftTerms.intersect(automaton, null); From 08a78e946df7d7b6c33f2c1ee173900250534606 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 21 Sep 2024 06:11:26 +0000 Subject: [PATCH 340/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-53d1c2bd2fb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9fccc3c11a062..bfbb3fd4e9b97 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-e4ac57746eb +lucene = 10.0.0-snapshot-53d1c2bd2fb bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index aca87d9491ecc..124225effd8a6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6a3210611cc358163ca3a6a64eefc82bdefcce18 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 21 Sep 2024 06:11:51 +0000 Subject: [PATCH 341/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-11c4f071a7a --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ac75a3a968ed1..fe980c81546e4 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 9.12.0 +lucene = 9.12.0-snapshot-11c4f071a7a bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a927bf6b5c090..61b5a414a666d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From b594455b8074bb815dc086714b648c97138e916a Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Sat, 21 Sep 2024 09:58:50 +0100 Subject: [PATCH 342/417] Update docs/changelog/113333.yaml --- docs/changelog/113333.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 docs/changelog/113333.yaml diff --git a/docs/changelog/113333.yaml b/docs/changelog/113333.yaml new file mode 100644 index 0000000000000..c6a3584845729 --- /dev/null +++ b/docs/changelog/113333.yaml @@ -0,0 +1,5 @@ +pr: 113333 +summary: Upgrade to Lucene 9.12 +area: Search +type: upgrade +issues: [] From cf56c9b2f94e1bbc274ae5d85c968c0b01f80345 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sat, 21 Sep 2024 11:39:40 +0100 Subject: [PATCH 343/417] remove erroneous changelog --- docs/changelog/113018.yaml | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 docs/changelog/113018.yaml diff --git a/docs/changelog/113018.yaml b/docs/changelog/113018.yaml deleted file mode 100644 index 781009e7b70aa..0000000000000 --- a/docs/changelog/113018.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113018 -summary: Upgrade to Lucene 9.12 -area: Search -type: upgrade -issues: [] From ceaf86e6f07be01da1ed0411683ab2056cdd9724 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Sat, 21 Sep 2024 15:16:23 +0200 Subject: [PATCH 344/417] Address WildcardFieldMapperTests failure --- .../xpack/wildcard/mapper/WildcardFieldMapperTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index a11a5f75fc330..a01fd9ca90995 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -1018,7 +1018,7 @@ protected String convertToRandomRegex(String randomValue) { // Assert our randomly generated regex actually matches the provided raw input. RegExp regex = new RegExp(result.toString()); - Automaton automaton = regex.toAutomaton(); + Automaton automaton = Operations.determinize(regex.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); ByteRunAutomaton bytesMatcher = new ByteRunAutomaton(automaton); BytesRef br = new BytesRef(randomValue); assertTrue( From 303f22dd559ed92bc3c5e40ab14234c557e46cd4 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 19 Sep 2024 11:40:38 -0400 Subject: [PATCH 345/417] Multi term intervals: increase max_expansions (#112826) Currently multi term interval queries (prefix, wildcard, fuzzy, regexp and range) can expand maximum to 128 terms. To reach parity with span queries that we want to deprecate, set max expansions to indices.query.bool.max_clause_count which is used in span queries. Relates to #110491 --- docs/changelog/112826.yaml | 6 ++ .../query-dsl/intervals-query.asciidoc | 21 ++-- .../extras/MatchOnlyTextFieldMapper.java | 17 ++-- .../extras/MatchOnlyTextFieldTypeTests.java | 39 ++++++-- .../index/mapper/TextFieldMapper.java | 24 +++-- .../ConstantScoreTextFieldTypeTests.java | 24 ++++- .../index/mapper/TextFieldTypeTests.java | 28 ++++-- .../query/IntervalQueryBuilderTests.java | 95 ++++++++++++++++--- 8 files changed, 205 insertions(+), 49 deletions(-) create mode 100644 docs/changelog/112826.yaml diff --git a/docs/changelog/112826.yaml b/docs/changelog/112826.yaml new file mode 100644 index 0000000000000..65c05b4d6035a --- /dev/null +++ b/docs/changelog/112826.yaml @@ -0,0 +1,6 @@ +pr: 112826 +summary: "Multi term intervals: increase max_expansions" +area: Search +type: enhancement +issues: + - 110491 diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc index 84869838fe1e6..97b6be7d76ab6 100644 --- a/docs/reference/query-dsl/intervals-query.asciidoc +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -124,8 +124,9 @@ unstemmed ones. ==== `prefix` rule parameters The `prefix` rule matches terms that start with a specified set of characters. -This prefix can expand to match at most 128 terms. If the prefix matches more -than 128 terms, {es} returns an error. You can use the +This prefix can expand to match at most `indices.query.bool.max_clause_count` +<> terms. If the prefix matches more terms, +{es} returns an error. You can use the <> option in the field mapping to avoid this limit. @@ -151,7 +152,8 @@ separate `analyzer` is specified. ==== `wildcard` rule parameters The `wildcard` rule matches terms using a wildcard pattern. This pattern can -expand to match at most 128 terms. If the pattern matches more than 128 terms, +expand to match at most `indices.query.bool.max_clause_count` +<> terms. If the pattern matches more terms, {es} returns an error. `pattern`:: @@ -184,8 +186,9 @@ The `pattern` is normalized using the search analyzer from this field, unless ==== `regexp` rule parameters The `regexp` rule matches terms using a regular expression pattern. -This pattern can expand to match at most 128 terms. -If the pattern matches more than 128 terms,{es} returns an error. +This pattern can expand to match at most `indices.query.bool.max_clause_count` +<> terms. +If the pattern matches more terms,{es} returns an error. `pattern`:: (Required, string) Regexp pattern used to find matching terms. @@ -215,7 +218,8 @@ The `pattern` is normalized using the search analyzer from this field, unless The `fuzzy` rule matches terms that are similar to the provided term, within an edit distance defined by <>. If the fuzzy expansion matches more than -128 terms, {es} returns an error. +`indices.query.bool.max_clause_count` +<> terms, {es} returns an error. `term`:: (Required, string) The term to match @@ -250,8 +254,9 @@ The `term` is normalized using the search analyzer from this field, unless ==== `range` rule parameters The `range` rule matches terms contained within a provided range. -This range can expand to match at most 128 terms. -If the range matches more than 128 terms,{es} returns an error. +This range can expand to match at most `indices.query.bool.max_clause_count` +<> terms. +If the range matches more terms,{es} returns an error. `gt`:: (Optional, string) Greater than: match terms greater than the provided term. diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index 778824d34b6fa..5904169308fab 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -21,6 +21,7 @@ import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; @@ -270,7 +271,11 @@ public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext conte @Override public IntervalsSource prefixIntervals(BytesRef term, SearchExecutionContext context) { - return toIntervalsSource(Intervals.prefix(term), new PrefixQuery(new Term(name(), term)), context); + return toIntervalsSource( + Intervals.prefix(term, IndexSearcher.getMaxClauseCount()), + new PrefixQuery(new Term(name(), term)), + context + ); } @Override @@ -285,18 +290,18 @@ public IntervalsSource fuzzyIntervals( new Term(name(), term), maxDistance, prefixLength, - 128, + IndexSearcher.getMaxClauseCount(), transpositions, MultiTermQuery.CONSTANT_SCORE_BLENDED_REWRITE ); - IntervalsSource fuzzyIntervals = Intervals.multiterm(fuzzyQuery.getAutomata(), term); + IntervalsSource fuzzyIntervals = Intervals.multiterm(fuzzyQuery.getAutomata(), IndexSearcher.getMaxClauseCount(), term); return toIntervalsSource(fuzzyIntervals, fuzzyQuery, context); } @Override public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContext context) { return toIntervalsSource( - Intervals.wildcard(pattern), + Intervals.wildcard(pattern, IndexSearcher.getMaxClauseCount()), new MatchAllDocsQuery(), // wildcard queries can be expensive, what should the approximation be? context ); @@ -305,7 +310,7 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex @Override public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { return toIntervalsSource( - Intervals.regexp(pattern), + Intervals.regexp(pattern, IndexSearcher.getMaxClauseCount()), new MatchAllDocsQuery(), // regexp queries can be expensive, what should the approximation be? context ); @@ -320,7 +325,7 @@ public IntervalsSource rangeIntervals( SearchExecutionContext context ) { return toIntervalsSource( - Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper), + Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper, IndexSearcher.getMaxClauseCount()), new MatchAllDocsQuery(), // range queries can be expensive, what should the approximation be? context ); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java index 4c20802a45058..6970dd6739ecf 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.PhraseQuery; @@ -152,30 +153,56 @@ public void testPhrasePrefixQuery() throws IOException { assertNotEquals(new MatchAllDocsQuery(), SourceConfirmedTextQuery.approximate(delegate)); } - public void testTermIntervals() throws IOException { + public void testTermIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource termIntervals = ft.termIntervals(new BytesRef("foo"), MOCK_CONTEXT); assertThat(termIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); assertEquals(Intervals.term(new BytesRef("foo")), ((SourceIntervalsSource) termIntervals).getIntervalsSource()); } - public void testPrefixIntervals() throws IOException { + public void testPrefixIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); assertThat(prefixIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); - assertEquals(Intervals.prefix(new BytesRef("foo")), ((SourceIntervalsSource) prefixIntervals).getIntervalsSource()); + assertEquals( + Intervals.prefix(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) prefixIntervals).getIntervalsSource() + ); } - public void testWildcardIntervals() throws IOException { + public void testWildcardIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); assertThat(wildcardIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); - assertEquals(Intervals.wildcard(new BytesRef("foo")), ((SourceIntervalsSource) wildcardIntervals).getIntervalsSource()); + assertEquals( + Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) wildcardIntervals).getIntervalsSource() + ); + } + + public void testRegexpIntervals() { + MappedFieldType ft = new MatchOnlyTextFieldType("field"); + IntervalsSource regexpIntervals = ft.regexpIntervals(new BytesRef("foo"), MOCK_CONTEXT); + assertThat(regexpIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); + assertEquals( + Intervals.regexp(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) regexpIntervals).getIntervalsSource() + ); } - public void testFuzzyIntervals() throws IOException { + public void testFuzzyIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); assertThat(fuzzyIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); } + + public void testRangeIntervals() { + MappedFieldType ft = new MatchOnlyTextFieldType("field"); + IntervalsSource rangeIntervals = ft.rangeIntervals(new BytesRef("foo"), new BytesRef("foo1"), true, true, MOCK_CONTEXT); + assertThat(rangeIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); + assertEquals( + Intervals.range(new BytesRef("foo"), new BytesRef("foo1"), true, true, IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) rangeIntervals).getIntervalsSource() + ); + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 5ac9a644d8ba6..7a181f2ad223b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -36,6 +36,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PhraseQuery; @@ -621,7 +622,10 @@ public IntervalsSource intervals(BytesRef term) { return Intervals.fixField(name(), Intervals.term(term)); } String wildcardTerm = term.utf8ToString() + "?".repeat(Math.max(0, minChars - term.length)); - return Intervals.or(Intervals.fixField(name(), Intervals.wildcard(new BytesRef(wildcardTerm))), Intervals.term(term)); + return Intervals.or( + Intervals.fixField(name(), Intervals.wildcard(new BytesRef(wildcardTerm), IndexSearcher.getMaxClauseCount())), + Intervals.term(term) + ); } @Override @@ -823,7 +827,7 @@ public IntervalsSource prefixIntervals(BytesRef term, SearchExecutionContext con if (prefixFieldType != null) { return prefixFieldType.intervals(term); } - return Intervals.prefix(term); + return Intervals.prefix(term, IndexSearcher.getMaxClauseCount()); } @Override @@ -837,8 +841,14 @@ public IntervalsSource fuzzyIntervals( if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - FuzzyQuery fq = new FuzzyQuery(new Term(name(), term), maxDistance, prefixLength, 128, transpositions); - return Intervals.multiterm(fq.getAutomata(), term); + FuzzyQuery fq = new FuzzyQuery( + new Term(name(), term), + maxDistance, + prefixLength, + IndexSearcher.getMaxClauseCount(), + transpositions + ); + return Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), term); } @Override @@ -846,7 +856,7 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - return Intervals.wildcard(pattern); + return Intervals.wildcard(pattern, IndexSearcher.getMaxClauseCount()); } @Override @@ -854,7 +864,7 @@ public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - return Intervals.regexp(pattern); + return Intervals.regexp(pattern, IndexSearcher.getMaxClauseCount()); } @Override @@ -868,7 +878,7 @@ public IntervalsSource rangeIntervals( if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - return Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper); + return Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper, IndexSearcher.getMaxClauseCount()); } private void checkForPositions() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java index 2627ae9a39839..e454a4ffa0c8d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; @@ -231,20 +232,26 @@ public void testTermIntervals() throws IOException { public void testPrefixIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.prefix(new BytesRef("foo")), prefixIntervals); + assertEquals(Intervals.prefix(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), prefixIntervals); } public void testWildcardIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); + } + + public void testRegexpIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource regexpIntervals = ft.regexpIntervals(new BytesRef("foo"), MOCK_CONTEXT); + assertEquals(Intervals.regexp(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), regexpIntervals); } public void testFuzzyIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); FuzzyQuery fq = new FuzzyQuery(new Term("field", "foo"), 1, 2, 128, true); - IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), "foo"); + IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), "foo"); assertEquals(expectedIntervals, fuzzyIntervals); } @@ -259,6 +266,15 @@ public void testWildcardIntervalsWithIndexedPrefixes() { ConstantScoreTextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); + } + + public void testRangeIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource rangeIntervals = ft.rangeIntervals(new BytesRef("foo"), new BytesRef("foo1"), true, true, MOCK_CONTEXT); + assertEquals( + Intervals.range(new BytesRef("foo"), new BytesRef("foo1"), true, true, IndexSearcher.getMaxClauseCount()), + rangeIntervals + ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java index d73e8546a726a..4d246d3c557a6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; @@ -243,20 +244,26 @@ public void testTermIntervals() throws IOException { public void testPrefixIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.prefix(new BytesRef("foo")), prefixIntervals); + assertEquals(Intervals.prefix(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), prefixIntervals); } - public void testWildcardIntervals() throws IOException { + public void testWildcardIntervals() { MappedFieldType ft = createFieldType(); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); } - public void testFuzzyIntervals() throws IOException { + public void testRegexpIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource regexpIntervals = ft.regexpIntervals(new BytesRef("foo"), MOCK_CONTEXT); + assertEquals(Intervals.regexp(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), regexpIntervals); + } + + public void testFuzzyIntervals() { MappedFieldType ft = createFieldType(); IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); FuzzyQuery fq = new FuzzyQuery(new Term("field", "foo"), 1, 2, 128, true); - IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), "foo"); + IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), "foo"); assertEquals(expectedIntervals, fuzzyIntervals); } @@ -271,6 +278,15 @@ public void testWildcardIntervalsWithIndexedPrefixes() { TextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); + } + + public void testRangeIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource rangeIntervals = ft.rangeIntervals(new BytesRef("foo"), new BytesRef("foo1"), true, true, MOCK_CONTEXT); + assertEquals( + Intervals.range(new BytesRef("foo"), new BytesRef("foo1"), true, true, IndexSearcher.getMaxClauseCount()), + rangeIntervals + ); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 0da9d5ee178fd..aad8275f4749d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -9,14 +9,22 @@ package org.elasticsearch.index.query; +import org.apache.lucene.analysis.core.KeywordAnalyzer; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.queries.intervals.IntervalQuery; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -34,7 +42,9 @@ import java.util.Collections; import java.util.List; +import static java.util.Collections.singleton; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -606,7 +616,7 @@ public void testPrefixes() throws IOException { } }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.prefix(new BytesRef("term"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.prefix(new BytesRef("term"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String no_positions_json = Strings.format(""" @@ -667,7 +677,13 @@ public void testPrefixes() throws IOException { builder = (IntervalQueryBuilder) parseQuery(short_prefix_json); expected = new IntervalQuery( PREFIXED_FIELD, - Intervals.or(Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.wildcard(new BytesRef("t?"))), Intervals.term("t")) + Intervals.or( + Intervals.fixField( + PREFIXED_FIELD + "._index_prefix", + Intervals.wildcard(new BytesRef("t?"), IndexSearcher.getMaxClauseCount()) + ), + Intervals.term("t") + ) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -739,7 +755,7 @@ public void testRegexp() throws IOException { }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.regexp(new BytesRef("te.*m"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.regexp(new BytesRef("te.*m"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String no_positions_json = Strings.format(""" @@ -771,7 +787,10 @@ public void testRegexp() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(fixed_field_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.regexp(new BytesRef("te.*m")))); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.fixField(MASKED_FIELD, Intervals.regexp(new BytesRef("te.*m"), IndexSearcher.getMaxClauseCount())) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String fixed_field_json_no_positions = Strings.format(""" @@ -791,6 +810,40 @@ public void testRegexp() throws IOException { }); } + public void testMaxExpansionExceptionFailure() throws Exception { + IntervalsSourceProvider provider1 = new IntervalsSourceProvider.Prefix("bar", "keyword", null); + IntervalsSourceProvider provider2 = new IntervalsSourceProvider.Wildcard("bar*", "keyword", null); + IntervalsSourceProvider provider3 = new IntervalsSourceProvider.Fuzzy("bar", 0, true, Fuzziness.fromEdits(1), "keyword", null); + IntervalsSourceProvider provider4 = new IntervalsSourceProvider.Regexp("bar.*", "keyword", null); + IntervalsSourceProvider provider5 = new IntervalsSourceProvider.Range("bar", "bar2", true, true, "keyword", null); + IntervalsSourceProvider provider = randomFrom(provider1, provider2, provider3, provider4, provider5); + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory, new KeywordAnalyzer())) { + for (int i = 0; i < 3; i++) { + iw.addDocument(singleton(new TextField(TEXT_FIELD_NAME, "bar" + i, Field.Store.NO))); + } + try (IndexReader reader = iw.getReader()) { + int origBoolMaxClauseCount = IndexSearcher.getMaxClauseCount(); + IndexSearcher.setMaxClauseCount(1); + try { + + IntervalQueryBuilder queryBuilder = new IntervalQueryBuilder(TEXT_FIELD_NAME, provider); + IndexSearcher searcher = newSearcher(reader); + Query query = queryBuilder.toQuery(createSearchExecutionContext(searcher)); + RuntimeException exc = expectThrows( + RuntimeException.class, + () -> query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f).scorer(searcher.getLeafContexts().get(0)) + ); + assertThat(exc.getMessage(), containsString("expanded to too many terms (limit 1)")); + } finally { + IndexSearcher.setMaxClauseCount(origBoolMaxClauseCount); + } + } + } + } + } + public void testWildcard() throws IOException { String json = Strings.format(""" { @@ -804,7 +857,7 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("te?m"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("te?m"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String no_positions_json = Strings.format(""" @@ -836,7 +889,7 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(keyword_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("Te?m"))); + expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("Te?m"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String fixed_field_json = Strings.format(""" @@ -852,7 +905,10 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(fixed_field_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("te?m")))); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("te?m"), IndexSearcher.getMaxClauseCount())) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String fixed_field_json_no_positions = Strings.format(""" @@ -885,13 +941,22 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(fixed_field_analyzer_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("Te?m")))); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("Te?m"), IndexSearcher.getMaxClauseCount())) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); } private static IntervalsSource buildFuzzySource(String term, String label, int prefixLength, boolean transpositions, int editDistance) { - FuzzyQuery fq = new FuzzyQuery(new Term("field", term), editDistance, prefixLength, 128, transpositions); - return Intervals.multiterm(fq.getAutomata(), label); + FuzzyQuery fq = new FuzzyQuery( + new Term("field", term), + editDistance, + prefixLength, + IndexSearcher.getMaxClauseCount(), + transpositions + ); + return Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), label); } public void testFuzzy() throws IOException { @@ -1011,7 +1076,10 @@ public void testRange() throws IOException { } }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), true, true)); + Query expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), true, true, IndexSearcher.getMaxClauseCount()) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); json = Strings.format(""" @@ -1026,7 +1094,10 @@ public void testRange() throws IOException { } }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), false, false)); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), false, false, IndexSearcher.getMaxClauseCount()) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String incomplete_range = Strings.format(""" From 14451f2451e18c58e8857d10e7bb26a2efd08ef2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 22 Sep 2024 06:11:33 +0000 Subject: [PATCH 346/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-11c4f071a7a --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 61b5a414a666d..aca0968c3e031 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From e1dcf11a1213d8c28a32ae6ee01837da2859a370 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 22 Sep 2024 06:11:40 +0000 Subject: [PATCH 347/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-53d1c2bd2fb --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 124225effd8a6..64fdda8987b44 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From e3c24b205ed4b40d421b9277dd85a3ec0e1d221a Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 22 Sep 2024 11:15:49 +0100 Subject: [PATCH 348/417] fix WildcardFieldMapperTests to include --- .../xpack/wildcard/mapper/WildcardFieldMapperTests.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index a01fd9ca90995..8b834722b2209 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -1017,7 +1017,8 @@ protected String convertToRandomRegex(String randomValue) { } // Assert our randomly generated regex actually matches the provided raw input. - RegExp regex = new RegExp(result.toString()); + int includeDeprecatedComplement = RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT; + RegExp regex = new RegExp(result.toString(), includeDeprecatedComplement); Automaton automaton = Operations.determinize(regex.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); ByteRunAutomaton bytesMatcher = new ByteRunAutomaton(automaton); BytesRef br = new BytesRef(randomValue); From c74d361b025cb70fa27d109612c1c1284cf7c9ef Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 22 Sep 2024 11:34:08 +0100 Subject: [PATCH 349/417] Fix docs build --- docs/reference/query-dsl/intervals-query.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc index 97b6be7d76ab6..069021dddb69f 100644 --- a/docs/reference/query-dsl/intervals-query.asciidoc +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -202,7 +202,7 @@ performance. `analyzer`:: (Optional, string) <> used to normalize the `pattern`. Defaults to the top-level ``'s analyzer. - +-- `use_field`:: + -- From 30d23b201704b17d506d63ccf71d9fd523627f57 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Sun, 22 Sep 2024 20:30:36 +0100 Subject: [PATCH 350/417] revert --- build.gradle | 5 ----- 1 file changed, 5 deletions(-) diff --git a/build.gradle b/build.gradle index 82780b5a52e4f..746f964cb6158 100644 --- a/build.gradle +++ b/build.gradle @@ -241,11 +241,6 @@ if (project.gradle.startParameter.taskNames.any { it.startsWith("checkPart") || subprojects { proj -> apply plugin: 'elasticsearch.base' - - repositories { - // TODO: Temporary for Lucene RC builds. REMOVE - maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.12.0-RC1-rev-f9cb943157870156af5f31cce315b95c08295c03/lucene/maven" } - } } allprojects { From c9cb409576680ae06326bd4939b93b84f3faf909 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 23 Sep 2024 06:11:28 +0000 Subject: [PATCH 351/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-53d1c2bd2fb --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 64fdda8987b44..d4cbf3560831b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 2d25cbcb610e3fb112fd903d3a3952a288c648d5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 23 Sep 2024 06:11:29 +0000 Subject: [PATCH 352/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-11c4f071a7a --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index aca0968c3e031..9681431e6b210 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 381132cba390b8282e1066c4c41d0921095c4fcc Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 23 Sep 2024 12:27:32 +0200 Subject: [PATCH 353/417] Restore index versions 7 in lucene_snapshot_10 (#113317) --- .../analysis/common/CommonAnalysisPlugin.java | 5 +- .../common/SynonymsAnalysisTests.java | 4 +- .../mapper/LegacyGeoShapeFieldMapper.java | 5 +- .../legacygeo/GeoJsonShapeParserTests.java | 3 +- .../legacygeo/GeoWKTShapeParserTests.java | 29 +++++++++ .../LegacyGeoShapeFieldMapperTests.java | 4 +- .../mapper/LegacyGeoShapeFieldTypeTests.java | 3 +- .../s3/S3BlobStoreRepositoryTests.java | 5 +- .../repositories/s3/S3Repository.java | 18 +++--- .../MultiVersionRepositoryAccessIT.java | 5 +- .../elasticsearch/upgrades/RecoveryIT.java | 2 + .../snapshots/CloneSnapshotIT.java | 16 ++++- .../snapshots/ConcurrentSnapshotsIT.java | 39 ++++++++++++ .../CorruptedBlobStoreRepositoryIT.java | 7 +-- ...ransportGetFeatureUpgradeStatusAction.java | 4 +- .../cluster/metadata/IndexMetadata.java | 2 + .../metadata/MetadataCreateIndexService.java | 22 ++++--- .../allocator/DesiredBalanceReconciler.java | 5 +- .../settings/AbstractScopedSettings.java | 3 - .../common/settings/IndexScopedSettings.java | 4 +- .../gateway/PersistedClusterStateService.java | 2 +- .../elasticsearch/index/IndexVersions.java | 21 +++++++ .../index/engine/ReadOnlyEngine.java | 6 +- .../index/mapper/MapperRegistry.java | 2 +- .../index/mapper/TextFieldMapper.java | 5 +- .../vectors/DenseVectorFieldMapper.java | 15 +++-- .../mapper/vectors/VectorEncoderDecoder.java | 12 +++- .../index/seqno/ReplicationTracker.java | 7 ++- .../elasticsearch/index/shard/IndexShard.java | 3 +- .../index/similarity/SimilarityProviders.java | 52 +++++++++++++--- .../indices/analysis/AnalysisModule.java | 16 ++++- .../indices/recovery/RecoverySettings.java | 7 +-- .../recovery/RecoverySourceHandler.java | 4 +- .../IndexMetaDataGenerations.java | 3 +- .../repositories/RepositoryData.java | 60 +++++++++++-------- .../repositories/ShardGeneration.java | 3 +- .../repositories/ShardGenerations.java | 2 +- .../blobstore/BlobStoreRepository.java | 49 +++++++++------ .../completion/context/GeoContextMapping.java | 46 ++++++++++---- .../snapshots/SnapshotsService.java | 32 ++++++---- .../MetadataCreateIndexServiceTests.java | 30 ++++++++++ .../cluster/metadata/MetadataTests.java | 58 ++++++++++++++++-- .../elasticsearch/env/NodeMetadataTests.java | 2 +- .../index/engine/InternalEngineTests.java | 2 - .../index/mapper/DateFieldMapperTests.java | 1 + .../index/mapper/ParametrizedMapperTests.java | 6 ++ ...BinaryDenseVectorScriptDocValuesTests.java | 12 ++-- .../indices/IndicesModuleTests.java | 30 +++++++--- .../index/mapper/MapperTestCase.java | 2 + .../AbstractSnapshotIntegTestCase.java | 4 +- .../test/rest/ESRestTestCase.java | 30 +++++++--- .../action/AutoFollowCoordinatorTests.java | 3 +- .../deprecation/IndexDeprecationChecks.java | 7 +-- .../IndexDeprecationChecksTests.java | 2 +- .../querydsl/query/SpatialRelatesQuery.java | 5 ++ .../SearchableSnapshotsIntegTests.java | 10 +++- ...archableSnapshotIndexMetadataUpgrader.java | 10 +++- ...bleSnapshotIndexMetadataUpgraderTests.java | 11 +++- .../SnapshotsRecoveryPlannerServiceTests.java | 20 +++++-- .../GeoShapeWithDocValuesFieldMapper.java | 10 +++- .../index/mapper/ShapeFieldMapper.java | 7 +++ ...GeoShapeWithDocValuesFieldMapperTests.java | 3 +- ...LegacyGeoShapeWithDocValuesQueryTests.java | 6 +- .../wildcard/mapper/WildcardFieldMapper.java | 7 ++- 64 files changed, 614 insertions(+), 196 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 35dafbec48b19..a97154fd4d1ff 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -102,6 +102,7 @@ import org.apache.lucene.analysis.util.ElisionFilter; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory; @@ -480,9 +481,10 @@ public List getPreConfiguredTokenFilters() { ) ); filters.add(PreConfiguredTokenFilter.indexVersion("word_delimiter_graph", false, false, (input, version) -> { + boolean adjustOffsets = version.onOrAfter(IndexVersions.V_7_3_0); return new WordDelimiterGraphFilter( input, - true, + adjustOffsets, WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE, WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS | WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS @@ -515,6 +517,7 @@ public List getPreConfiguredTokenizers() { // This is already broken with normalization, so backwards compat isn't necessary? tokenizers.add(PreConfiguredTokenizer.singleton("lowercase", XLowerCaseTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new)); + return tokenizers; } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java index 9b59427b97782..4fc6ca96b5f08 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java @@ -338,7 +338,7 @@ public void testShingleFilters() { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .put("index.analysis.filter.synonyms.type", "synonym") @@ -392,7 +392,7 @@ public void testPreconfiguredTokenFilters() throws IOException { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .build(); diff --git a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java index 615d1dce5b692..1616d2727bf8a 100644 --- a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java +++ b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java @@ -30,6 +30,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper; import org.elasticsearch.index.mapper.DocumentParserContext; @@ -232,7 +233,9 @@ public Builder(String name, IndexVersion version, boolean ignoreMalformedByDefau }); // Set up serialization - this.strategy.alwaysSerialize(); + if (version.onOrAfter(IndexVersions.V_7_0_0)) { + this.strategy.alwaysSerialize(); + } // serialize treeLevels if treeLevels is configured, OR if defaults are requested and precision is not configured treeLevels.setSerializerCheck((id, ic, v) -> ic || (id && precision.get() == null)); // serialize precision if precision is configured, OR if defaults are requested and treeLevels is not configured diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java index f2b5a9841bdac..b53a850f9489c 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.geometry.MultiLine; import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper; import org.elasticsearch.legacygeo.parsers.ShapeParser; @@ -390,7 +391,7 @@ public void testParse3DPolygon() throws IOException, ParseException { LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); - final IndexVersion version = IndexVersionUtils.randomCompatibleVersion(random()); + final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( MapperBuilderContext.root(false, false) ); diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java index 3a78ca809b4fc..58474d5521b74 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.geometry.MultiLine; import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.legacygeo.builders.CoordinatesBuilder; import org.elasticsearch.legacygeo.builders.EnvelopeBuilder; @@ -35,6 +36,7 @@ import org.elasticsearch.legacygeo.parsers.GeoWKTParser; import org.elasticsearch.legacygeo.parsers.ShapeParser; import org.elasticsearch.legacygeo.test.RandomShapeGenerator; +import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -325,6 +327,15 @@ public void testParseMixedDimensionPolyWithHoleStoredZ() throws IOException { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().value(builder.toWKT()); XContentParser parser = createParser(xContentBuilder); parser.nextToken(); + + final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); + final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( + MapperBuilderContext.root(false, false) + ); + + // test store z disabled + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> ShapeParser.parse(parser, mapperBuilder)); + assertThat(e, hasToString(containsString("unable to add coordinate to CoordinateBuilder: coordinate dimensions do not match"))); } @UpdateForV9 @@ -342,6 +353,14 @@ public void testParsePolyWithStoredZ() throws IOException { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().value(builder.toWKT()); XContentParser parser = createParser(xContentBuilder); parser.nextToken(); + + final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); + final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( + MapperBuilderContext.root(false, false) + ); + + ShapeBuilder shapeBuilder = ShapeParser.parse(parser, mapperBuilder); + assertEquals(shapeBuilder.numDimensions(), 3); } @UpdateForV9 @@ -353,6 +372,16 @@ public void testParseOpenPolygon() throws IOException { XContentParser parser = createParser(xContentBuilder); parser.nextToken(); + final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); + final LegacyGeoShapeFieldMapper defaultMapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).coerce( + false + ).build(MapperBuilderContext.root(false, false)); + ElasticsearchParseException exception = expectThrows( + ElasticsearchParseException.class, + () -> ShapeParser.parse(parser, defaultMapperBuilder) + ); + assertEquals("invalid LinearRing found (coordinates are not closed)", exception.getMessage()); + final LegacyGeoShapeFieldMapper coercingMapperBuilder = new LegacyGeoShapeFieldMapper.Builder( "test", IndexVersion.current(), diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java index fe576d146c4f0..a0d5894ff6220 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.legacygeo.test.TestLegacyGeoShapeFieldMapperPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.AssumptionViolatedException; @@ -54,6 +55,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +@SuppressWarnings("deprecation") @UpdateForV9 @AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public class LegacyGeoShapeFieldMapperTests extends MapperTestCase { @@ -125,7 +127,7 @@ protected boolean supportsMeta() { @Override protected IndexVersion getVersion() { - return IndexVersions.V_8_0_0; + return IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); } public void testLegacySwitches() throws IOException { diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java index 8084834f0a102..78ba5db1f95c6 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper.GeoShapeFieldType; +import org.elasticsearch.test.index.IndexVersionUtils; import java.io.IOException; import java.util.List; @@ -40,7 +41,7 @@ public void testSetStrategyName() { } public void testFetchSourceValue() throws IOException { - IndexVersion version = IndexVersions.V_8_0_0; + IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); MappedFieldType mapper = new LegacyGeoShapeFieldMapper.Builder("field", version, false, true).build( MapperBuilderContext.root(false, false) ).fieldType(); diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 0643a5388d79e..429a81b02bd5e 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -42,7 +42,6 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; @@ -57,6 +56,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotState; +import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.snapshots.mockstore.BlobStoreWrapper; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; @@ -426,8 +426,7 @@ public void testEnforcedCooldownPeriod() throws IOException { ) ); final BytesReference serialized = BytesReference.bytes( - // TODO lucene 10 upgrade, we can probably remove the IndexVersions here once we delete all V7 versions - modifiedRepositoryData.snapshotsToXContent(XContentFactory.jsonBuilder(), IndexVersions.V_8_0_0) + modifiedRepositoryData.snapshotsToXContent(XContentFactory.jsonBuilder(), SnapshotsService.OLD_SNAPSHOT_FORMAT) ); if (randomBoolean()) { repository.blobStore() diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 3a2d7be727f7d..f919284d8e897 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.repositories.FinalizeSnapshotContext; @@ -155,12 +156,13 @@ class S3Repository extends MeteredBlobStoreRepository { /** * Artificial delay to introduce after a snapshot finalization or delete has finished so long as the repository is still using the - * backwards compatible snapshot format from before V_7_6_0. This delay is necessary so that the eventually consistent - * nature of AWS S3 does not randomly result in repository corruption when doing repository operations in rapid succession on a - * repository in the old metadata format. + * backwards compatible snapshot format from before + * {@link org.elasticsearch.snapshots.SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION} ({@link IndexVersions#V_7_6_0}). + * This delay is necessary so that the eventually consistent nature of AWS S3 does not randomly result in repository corruption when + * doing repository operations in rapid succession on a repository in the old metadata format. * This setting should not be adjusted in production when working with an AWS S3 backed repository. Doing so risks the repository * becoming silently corrupted. To get rid of this waiting period, either create a new S3 repository or remove all snapshots older than - * V_7_6_0 from the repository which will trigger an upgrade of the repository metadata to the new + * {@link IndexVersions#V_7_6_0} from the repository which will trigger an upgrade of the repository metadata to the new * format and disable the cooldown period. */ static final Setting COOLDOWN_PERIOD = Setting.timeSetting( @@ -369,12 +371,14 @@ public void onFailure(Exception e) { private void logCooldownInfo() { logger.info( - "Sleeping for [{}] after modifying repository [{}] because it contains snapshots older than version [\"7060099\"]" + "Sleeping for [{}] after modifying repository [{}] because it contains snapshots older than version [{}]" + " and therefore is using a backwards compatible metadata format that requires this cooldown period to avoid " + "repository corruption. To get rid of this message and move to the new repository metadata format, either remove " - + "all snapshots older than version [\"7060099\"] from the repository or create a new repository at an empty location.", + + "all snapshots older than version [{}] from the repository or create a new repository at an empty location.", coolDown, - metadata.name() + metadata.name(), + SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION, + SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION ); } diff --git a/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java b/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java index fe0a153df9214..dc002ea1a44c1 100644 --- a/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java +++ b/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; @@ -181,7 +182,9 @@ public void testUpgradeMovesRepoToNewMetaVersion() throws IOException { // 7.12.0+ will try to load RepositoryData during repo creation if verify is true, which is impossible in case of version // incompatibility in the downgrade test step. We verify that it is impossible here and then create the repo using verify=false // to check behavior on other operations below. - final boolean verify = TEST_STEP != TestStep.STEP3_OLD_CLUSTER || SnapshotsService.includesUUIDs(minNodeVersion); + final boolean verify = TEST_STEP != TestStep.STEP3_OLD_CLUSTER + || SnapshotsService.includesUUIDs(minNodeVersion) + || minNodeVersion.before(IndexVersions.V_7_12_0); if (verify == false) { expectThrowsAnyOf(EXPECTED_BWC_EXCEPTIONS, () -> createRepository(repoName, false, true)); } diff --git a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 166b95c867398..a72a82142c872 100644 --- a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -448,6 +448,7 @@ public void testCloseIndexDuringRollingUpgrade() throws Exception { ensureGreen(indexName); closeIndex(indexName); } + if (minimumIndexVersion().onOrAfter(IndexVersions.V_8_0_0)) { // index is created on a version that supports the replication of closed indices, // so we expect the index to be closed and replicated @@ -494,6 +495,7 @@ public void testClosedIndexNoopRecovery() throws Exception { } else { assertClosedIndex(indexName, false); } + } /** diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java index ccf8f90ecba87..209195301a659 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java @@ -54,6 +54,11 @@ public void testShardClone() throws Exception { final Path repoPath = randomRepoPath(); createRepository(repoName, "fs", repoPath); + final boolean useBwCFormat = randomBoolean(); + if (useBwCFormat) { + initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); + } + final String indexName = "test-index"; createIndexWithRandomDocs(indexName, randomIntBetween(5, 10)); final String sourceSnapshot = "source-snapshot"; @@ -68,8 +73,11 @@ public void testShardClone() throws Exception { final SnapshotId targetSnapshotId = new SnapshotId("target-snapshot", UUIDs.randomBase64UUID(random())); final ShardGeneration currentShardGen; - - currentShardGen = repositoryData.shardGenerations().getShardGen(indexId, shardId); + if (useBwCFormat) { + currentShardGen = null; + } else { + currentShardGen = repositoryData.shardGenerations().getShardGen(indexId, shardId); + } final ShardSnapshotResult shardSnapshotResult = safeAwait( listener -> repository.cloneShardSnapshot( sourceSnapshotInfo.snapshotId(), @@ -81,6 +89,10 @@ public void testShardClone() throws Exception { ); final ShardGeneration newShardGeneration = shardSnapshotResult.getGeneration(); + if (useBwCFormat) { + assertEquals(newShardGeneration, new ShardGeneration(1L)); // Initial snapshot brought it to 0, clone increments it to 1 + } + final BlobStoreIndexShardSnapshot targetShardSnapshot = readShardSnapshot(repository, repositoryShardId, targetSnapshotId); final BlobStoreIndexShardSnapshot sourceShardSnapshot = readShardSnapshot( repository, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 4a5bff737d439..de62c0152817a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -38,6 +38,8 @@ import org.elasticsearch.repositories.RepositoryConflictException; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.ShardGenerations; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.mockstore.MockRepository; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESIntegTestCase; @@ -1371,6 +1373,43 @@ public void testConcurrentOperationsLimit() throws Exception { } } + public void testConcurrentSnapshotWorksWithOldVersionRepo() throws Exception { + internalCluster().startMasterOnlyNode(); + final String dataNode = internalCluster().startDataOnlyNode(); + final String repoName = "test-repo"; + final Path repoPath = randomRepoPath(); + createRepository( + repoName, + "mock", + Settings.builder().put(BlobStoreRepository.CACHE_REPOSITORY_DATA.getKey(), false).put("location", repoPath) + ); + initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); + + createIndexWithContent("index-slow"); + + final ActionFuture createSlowFuture = startFullSnapshotBlockedOnDataNode( + "slow-snapshot", + repoName, + dataNode + ); + + final String dataNode2 = internalCluster().startDataOnlyNode(); + ensureStableCluster(3); + final String indexFast = "index-fast"; + createIndexWithContent(indexFast, dataNode2, dataNode); + + final ActionFuture createFastSnapshot = startFullSnapshot(repoName, "fast-snapshot"); + + assertThat(createSlowFuture.isDone(), is(false)); + unblockNode(repoName, dataNode); + + assertSuccessful(createFastSnapshot); + assertSuccessful(createSlowFuture); + + final RepositoryData repositoryData = getRepositoryData(repoName); + assertThat(repositoryData.shardGenerations(), is(ShardGenerations.EMPTY)); + } + public void testQueuedDeleteAfterFinalizationFailure() throws Exception { final String masterNode = internalCluster().startMasterOnlyNode(); final String repoName = "test-repo"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java index 27be9c9bbf457..2206f34e4d2f3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java @@ -27,7 +27,6 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshotsIntegritySuppressor; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.IndexMetaDataGenerations; @@ -330,7 +329,7 @@ public void testHandlingMissingRootLevelSnapshotMetadata() throws Exception { logger.info("--> verify that repo is assumed in old metadata format"); assertThat( SnapshotsService.minCompatibleVersion(IndexVersion.current(), getRepositoryData(repoName), null), - is(IndexVersions.V_8_0_0) + is(SnapshotsService.OLD_SNAPSHOT_FORMAT) ); logger.info("--> verify that snapshot with missing root level metadata can be deleted"); @@ -404,8 +403,6 @@ public void testMountCorruptedRepositoryData() throws Exception { ); } - // TODO lucene 10 upgrade, deleting old IndexVersions and with this SnapshotsService.OLD_SNAPSHOT_FORMAT - // can the following test be deleted? public void testHandleSnapshotErrorWithBwCFormat() throws Exception { final String repoName = "test-repo"; final Path repoPath = randomRepoPath(); @@ -431,8 +428,6 @@ public void testHandleSnapshotErrorWithBwCFormat() throws Exception { createFullSnapshot(repoName, "snapshot-2"); } - // TODO lucene 10 upgrade, deleting old IndexVersions and with this SnapshotsService.OLD_SNAPSHOT_FORMAT - // can the following test be deleted? public void testRepairBrokenShardGenerations() throws Exception { final String repoName = "test-repo"; final Path repoPath = randomRepoPath(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java index 1483594ce52b8..be151143a8171 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java @@ -58,9 +58,7 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA */ @UpdateForV9 public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_7_0_0; - @UpdateForV9 - // TODO lucene 10 upgrade, we increased this to IndexVersions.V_8_0_0, not sure if that was premature. Check with ie. core/infra - public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_8_0_0; + public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_7_0_0; private final SystemIndices systemIndices; PersistentTasksService persistentTasksService; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 4841263c5eea8..9760d84c67c5b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -2734,6 +2734,8 @@ public static IndexMetadata fromXContent(XContentParser parser, Map // skip any pre-7.2 closed indices which have no routing table entries at all - indexMetadata.getState() == IndexMetadata.State.OPEN || MetadataIndexStateService.isIndexVerifiedBeforeClosed(indexMetadata)) + indexMetadata.getCreationVersion().onOrAfter(IndexVersions.V_7_2_0) + || indexMetadata.getState() == IndexMetadata.State.OPEN + || MetadataIndexStateService.isIndexVerifiedBeforeClosed(indexMetadata)) .flatMap( indexMetadata -> IntStream.range(0, indexMetadata.getNumberOfShards()) .mapToObj( diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index ab0da05d86d46..60626b9e2375f 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; -import org.elasticsearch.core.UpdateForV9; import java.util.ArrayList; import java.util.Collections; @@ -438,8 +437,6 @@ public synchronized void initializeAndWatch(Setting setting, Consumer addSettingsUpdateConsumer(setting, consumer); } - @UpdateForV9 - // do we need to rename / rework this method for v9? protected void validateDeprecatedAndRemovedSettingV7(Settings settings, Setting setting) {} /** diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 33e7d5b9484f3..69abb59689c00 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -267,9 +267,7 @@ protected void validateDeprecatedAndRemovedSettingV7(Settings settings, Setting< // error out on those validations, we will check with the creation version present at index // creation time, as well as on index update settings. if (indexVersion.equals(IndexVersions.ZERO) == false - // TODO lucene 10 upgrade, check if we need to change anything for pre8 indices - // old: && (indexVersion.before(IndexVersions.V_7_0_0) || indexVersion.onOrAfter(IndexVersions.V_8_0_0))) { - && indexVersion.onOrAfter(IndexVersions.V_8_0_0)) { + && (indexVersion.before(IndexVersions.V_7_0_0) || indexVersion.onOrAfter(IndexVersions.V_8_0_0))) { throw new IllegalArgumentException("unknown setting [" + setting.getKey() + "]"); } } diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 62b45142c5632..0c6cf2c8a0761 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -715,7 +715,7 @@ private static void consumeFromType( if (document.getField(PAGE_FIELD_NAME) == null) { // legacy format: not paginated or compressed - assert IndexVersions.MINIMUM_COMPATIBLE.before(IndexVersion.fromId(7_16_00_99)); + assert IndexVersions.MINIMUM_COMPATIBLE.before(IndexVersions.V_7_16_0); bytesReferenceConsumer.accept(documentData); continue; } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7350354a6dca0..c227387be3748 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -60,6 +60,27 @@ private static Version parseUnchecked(String version) { @UpdateForV9 // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); + + public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_1_0 = def(7_01_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_0 = def(7_02_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_1 = def(7_02_01_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_3_0 = def(7_03_00_99, parseUnchecked("8.1.0")); + public static final IndexVersion V_7_4_0 = def(7_04_00_99, parseUnchecked("8.2.0")); + public static final IndexVersion V_7_5_0 = def(7_05_00_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_5_2 = def(7_05_02_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_6_0 = def(7_06_00_99, parseUnchecked("8.4.0")); + public static final IndexVersion V_7_7_0 = def(7_07_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_8_0 = def(7_08_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_9_0 = def(7_09_00_99, parseUnchecked("8.6.0")); + public static final IndexVersion V_7_10_0 = def(7_10_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_11_0 = def(7_11_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_12_0 = def(7_12_00_99, parseUnchecked("8.8.0")); + public static final IndexVersion V_7_13_0 = def(7_13_00_99, parseUnchecked("8.8.2")); + public static final IndexVersion V_7_14_0 = def(7_14_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_15_0 = def(7_15_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_16_0 = def(7_16_00_99, parseUnchecked("8.10.1")); + public static final IndexVersion V_7_17_0 = def(7_17_00_99, parseUnchecked("8.11.1")); public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index 191cb98c54d0c..d4a2fe1b57903 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -26,6 +26,8 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentParser; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.seqno.SeqNoStats; @@ -174,7 +176,9 @@ protected void ensureMaxSeqNoEqualsToGlobalCheckpoint(final SeqNoStats seqNoStat // In addition to that we only execute the check if the index the engine belongs to has been // created after the refactoring of the Close Index API and its TransportVerifyShardBeforeCloseAction // that guarantee that all operations have been flushed to Lucene. - if (seqNoStats.getGlobalCheckpoint() != SequenceNumbers.UNASSIGNED_SEQ_NO) { + IndexVersion indexVersionCreated = engineConfig.getIndexSettings().getIndexVersionCreated(); + if (indexVersionCreated.onOrAfter(IndexVersions.V_7_2_0) + || (seqNoStats.getGlobalCheckpoint() != SequenceNumbers.UNASSIGNED_SEQ_NO)) { assert assertMaxSeqNoEqualsToGlobalCheckpoint(seqNoStats.getMaxSeqNo(), seqNoStats.getGlobalCheckpoint()); if (seqNoStats.getMaxSeqNo() != seqNoStats.getGlobalCheckpoint()) { throw new IllegalStateException( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java index a0afaa93357d5..ea94576f5c536 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java @@ -75,7 +75,7 @@ public Map getRuntimeFieldParsers() { public Map getMetadataMapperParsers(IndexVersion indexCreatedVersion) { if (indexCreatedVersion.onOrAfter(IndexVersions.V_8_0_0)) { return metadataMapperParsers; - } else if (indexCreatedVersion.onOrAfter(IndexVersion.fromId(7000099))) { + } else if (indexCreatedVersion.onOrAfter(IndexVersions.V_7_0_0)) { return metadataMapperParsers7x; } else if (indexCreatedVersion.onOrAfter(IndexVersion.fromId(6000099))) { return metadataMapperParsers6x; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 7a181f2ad223b..c78a933838d7c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -56,6 +56,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -416,9 +417,7 @@ private SubFieldInfo buildPrefixInfo(MapperBuilderContext context, FieldType fie * or a multi-field). This way search will continue to work on old indices and new indices * will use the expected full name. */ - // TODO lucene 10 upgrade, we remove pre 8.x index versions but may need these for legacy archive support here? - // String fullName = indexCreatedVersion.before(IndexVersions.V_7_2_1) ? leafName() : context.buildFullName(leafName()); - String fullName = context.buildFullName(leafName()); + String fullName = indexCreatedVersion.before(IndexVersions.V_7_2_1) ? leafName() : context.buildFullName(leafName()); // Copy the index options of the main field to allow phrase queries on // the prefix field. FieldType pft = new FieldType(fieldType); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 07ccbc10ad7dc..4adfe619ca4e1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -106,6 +106,7 @@ public static boolean isNotUnitVector(float magnitude) { public static final NodeFeature INT4_QUANTIZATION = new NodeFeature("mapper.vectors.int4_quantization"); public static final NodeFeature BIT_VECTORS = new NodeFeature("mapper.vectors.bit_vectors"); + public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; public static final IndexVersion NORMALIZE_COSINE = IndexVersions.NORMALIZED_VECTOR_COSINE; public static final IndexVersion DEFAULT_TO_INT8 = DEFAULT_DENSE_VECTOR_TO_INT8_HNSW; @@ -2036,15 +2037,19 @@ private void parseBinaryDocValuesVectorAndIndex(DocumentParserContext context) t // this code is here and not in the VectorEncoderDecoder so not to create extra arrays int dims = fieldType().dims; ElementType elementType = fieldType().elementType; - int numBytes = elementType.getNumBytes(dims) + MAGNITUDE_BYTES; + int numBytes = indexCreatedVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION) + ? elementType.getNumBytes(dims) + MAGNITUDE_BYTES + : elementType.getNumBytes(dims); ByteBuffer byteBuffer = elementType.createByteBuffer(indexCreatedVersion, numBytes); VectorData vectorData = elementType.parseKnnVector(context, this); vectorData.addToBuffer(byteBuffer); - // encode vector magnitude at the end - double dotProduct = elementType.computeSquaredMagnitude(vectorData); - float vectorMagnitude = (float) Math.sqrt(dotProduct); - byteBuffer.putFloat(vectorMagnitude); + if (indexCreatedVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION)) { + // encode vector magnitude at the end + double dotProduct = elementType.computeSquaredMagnitude(vectorData); + float vectorMagnitude = (float) Math.sqrt(dotProduct); + byteBuffer.putFloat(vectorMagnitude); + } Field field = new BinaryDocValuesField(fieldType().name(), new BytesRef(byteBuffer.array())); context.doc().addWithKey(fieldType().name(), field); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java index 81d32a776e446..9d09a7493d605 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoder.java @@ -18,6 +18,7 @@ import java.nio.FloatBuffer; import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION; +import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MAGNITUDE_STORED_INDEX_VERSION; public final class VectorEncoderDecoder { public static final byte INT_BYTES = 4; @@ -25,7 +26,9 @@ public final class VectorEncoderDecoder { private VectorEncoderDecoder() {} public static int denseVectorLength(IndexVersion indexVersion, BytesRef vectorBR) { - return (vectorBR.length - INT_BYTES) / INT_BYTES; + return indexVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION) + ? (vectorBR.length - INT_BYTES) / INT_BYTES + : vectorBR.length / INT_BYTES; } /** @@ -34,6 +37,7 @@ public static int denseVectorLength(IndexVersion indexVersion, BytesRef vectorBR * equal to 7.5.0, since vectors created prior to that do not store the magnitude. */ public static float decodeMagnitude(IndexVersion indexVersion, BytesRef vectorBR) { + assert indexVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION); ByteBuffer byteBuffer = indexVersion.onOrAfter(LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION) ? ByteBuffer.wrap(vectorBR.bytes, vectorBR.offset, vectorBR.length).order(ByteOrder.LITTLE_ENDIAN) : ByteBuffer.wrap(vectorBR.bytes, vectorBR.offset, vectorBR.length); @@ -51,7 +55,11 @@ public static float getMagnitude(IndexVersion indexVersion, BytesRef vectorBR, f if (vectorBR == null) { throw new IllegalArgumentException(DenseVectorScriptDocValues.MISSING_VECTOR_FIELD_MESSAGE); } - return decodeMagnitude(indexVersion, vectorBR); + if (indexVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION)) { + return decodeMagnitude(indexVersion, vectorBR); + } else { + return calculateMagnitude(decodedVector); + } } /** diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 8813852f4bddd..003eb4010415b 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.replication.ReplicationResponse; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; @@ -975,8 +976,10 @@ public ReplicationTracker( this.pendingInSync = new HashSet<>(); this.routingTable = null; this.replicationGroup = null; - // TODO lucene 10 upgrade, remove the following field since its trivially true after V_8_0_0 - this.hasAllPeerRecoveryRetentionLeases = true; + this.hasAllPeerRecoveryRetentionLeases = indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0) + || (indexSettings.isSoftDeleteEnabled() + && indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_4_0) + && indexSettings.getIndexMetadata().getState() == IndexMetadata.State.OPEN); this.fileBasedRecoveryThreshold = IndexSettings.FILE_BASED_RECOVERY_THRESHOLD_SETTING.get(indexSettings.getSettings()); this.safeCommitInfoSupplier = safeCommitInfoSupplier; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index d594222d4b5be..62d2aa1f026f7 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -74,6 +74,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.bulk.stats.BulkOperationListener; import org.elasticsearch.index.bulk.stats.BulkStats; @@ -3396,7 +3397,7 @@ public RetentionLease addPeerRecoveryRetentionLease( ) { assert assertPrimaryMode(); // only needed for BWC reasons involving rolling upgrades from versions that do not support PRRLs: - assert indexSettings.isSoftDeleteEnabled() == false; + assert indexSettings.getIndexVersionCreated().before(IndexVersions.V_7_4_0) || indexSettings.isSoftDeleteEnabled() == false; return replicationTracker.addPeerRecoveryRetentionLease(nodeId, globalCheckpoint, listener); } diff --git a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java index c89909ce6e1e5..b0783880ee943 100644 --- a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java +++ b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java @@ -38,9 +38,11 @@ import org.apache.lucene.search.similarities.NormalizationH2; import org.apache.lucene.search.similarities.NormalizationH3; import org.apache.lucene.search.similarities.NormalizationZ; +import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.lucene.similarity.LegacyBM25Similarity; import java.util.Arrays; @@ -100,9 +102,23 @@ private static BasicModel parseBasicModel(IndexVersion indexCreatedVersion, Sett if (model == null) { String replacement = LEGACY_BASIC_MODELS.get(basicModel); if (replacement != null) { - throw new IllegalArgumentException( - "Basic model [" + basicModel + "] isn't supported anymore, " + "please use another model." - ); + if (indexCreatedVersion.onOrAfter(IndexVersions.V_7_0_0)) { + throw new IllegalArgumentException( + "Basic model [" + basicModel + "] isn't supported anymore, " + "please use another model." + ); + } else { + deprecationLogger.warn( + DeprecationCategory.INDICES, + basicModel + "_similarity_model_replaced", + "Basic model [" + + basicModel + + "] isn't supported anymore and has arbitrarily been replaced with [" + + replacement + + "]." + ); + model = BASIC_MODELS.get(replacement); + assert model != null; + } } } @@ -125,9 +141,23 @@ private static AfterEffect parseAfterEffect(IndexVersion indexCreatedVersion, Se if (effect == null) { String replacement = LEGACY_AFTER_EFFECTS.get(afterEffect); if (replacement != null) { - throw new IllegalArgumentException( - "After effect [" + afterEffect + "] isn't supported anymore, please use another effect." - ); + if (indexCreatedVersion.onOrAfter(IndexVersions.V_7_0_0)) { + throw new IllegalArgumentException( + "After effect [" + afterEffect + "] isn't supported anymore, please use another effect." + ); + } else { + deprecationLogger.warn( + DeprecationCategory.INDICES, + afterEffect + "_after_effect_replaced", + "After effect [" + + afterEffect + + "] isn't supported anymore and has arbitrarily been replaced with [" + + replacement + + "]." + ); + effect = AFTER_EFFECTS.get(replacement); + assert effect != null; + } } } @@ -211,7 +241,15 @@ static void assertSettingsIsSubsetOf(String type, IndexVersion version, Settings unknownSettings.removeAll(Arrays.asList(supportedSettings)); unknownSettings.remove("type"); // used to figure out which sim this is if (unknownSettings.isEmpty() == false) { - throw new IllegalArgumentException("Unknown settings for similarity of type [" + type + "]: " + unknownSettings); + if (version.onOrAfter(IndexVersions.V_7_0_0)) { + throw new IllegalArgumentException("Unknown settings for similarity of type [" + type + "]: " + unknownSettings); + } else { + deprecationLogger.warn( + DeprecationCategory.INDICES, + "unknown_similarity_setting", + "Unknown settings for similarity of type [" + type + "]: " + unknownSettings + ); + } } } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index d3df4cc88c0c8..fe4723f56e033 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -138,7 +138,7 @@ private static NamedRegistry> setupTokenFil tokenFilters.register("standard", new AnalysisProvider() { @Override public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - if (indexSettings.getIndexVersionCreated().before(IndexVersions.V_8_0_0)) { + if (indexSettings.getIndexVersionCreated().before(IndexVersions.V_7_0_0)) { deprecationLogger.warn( DeprecationCategory.ANALYSIS, "standard_deprecation", @@ -200,7 +200,19 @@ static Map setupPreConfiguredTokenFilters(List preConfiguredTokenFilters.register("lowercase", PreConfiguredTokenFilter.singleton("lowercase", true, LowerCaseFilter::new)); // Add "standard" for old indices (bwc) preConfiguredTokenFilters.register("standard", PreConfiguredTokenFilter.indexVersion("standard", true, (reader, version) -> { - throw new IllegalArgumentException("The [standard] token filter has been removed."); + // This was originally removed in 7_0_0 but due to a cacheing bug it was still possible + // in certain circumstances to create a new index referencing the standard token filter + // until version 7_5_2 + if (version.before(IndexVersions.V_7_6_0)) { + deprecationLogger.warn( + DeprecationCategory.ANALYSIS, + "standard_deprecation", + "The [standard] token filter is deprecated and will be removed in a future version." + ); + } else { + throw new IllegalArgumentException("The [standard] token filter has been removed."); + } + return reader; })); /* Note that "stop" is available in lucene-core but it's pre-built * version uses a set of English stop words that are in diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index a0ef9f3601c1d..1ec187ea4a34b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.monitor.os.OsProbe; import org.elasticsearch.node.NodeRoleSettings; @@ -48,11 +49,9 @@ import static org.elasticsearch.node.NodeRoleSettings.NODE_ROLES_SETTING; public class RecoverySettings { - // TODO lucene 10 upgrade can the following constant be removed? - public static final IndexVersion SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION = IndexVersion.fromId(7_15_00_99); + public static final IndexVersion SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION = IndexVersions.V_7_15_0; public static final TransportVersion SNAPSHOT_RECOVERIES_SUPPORTED_TRANSPORT_VERSION = TransportVersions.V_7_15_0; - // TODO lucene 10 upgrade can the following constant be removed? - public static final IndexVersion SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION = IndexVersion.fromId(7_16_00_99); + public static final IndexVersion SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION = IndexVersions.V_7_16_0; public static final TransportVersion SNAPSHOT_FILE_DOWNLOAD_THROTTLING_SUPPORTED_TRANSPORT_VERSION = TransportVersions.V_7_16_0; private static final Logger logger = LogManager.getLogger(RecoverySettings.class); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 25d389d167f22..bf76e7efbdbea 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -39,6 +39,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.RecoveryEngineException; import org.elasticsearch.index.seqno.ReplicationTracker; @@ -969,7 +970,8 @@ void createRetentionLease(final long startingSeqNo, ActionListener IndexVersion.fromId(7_12_00_99); - case "7.9.0" -> IndexVersion.fromId(7_09_00_99); - case "7.6.0" -> IndexVersion.fromId(7_06_00_99); + case "7.12.0" -> IndexVersions.V_7_12_0; + case "7.9.0" -> IndexVersions.V_7_9_0; + case "7.6.0" -> IndexVersions.V_7_6_0; default -> // All (known) versions only ever emit one of the above strings for the format version, so if we see something // else it must be a newer version or else something wholly invalid. Report the raw string rather than trying diff --git a/server/src/main/java/org/elasticsearch/repositories/ShardGeneration.java b/server/src/main/java/org/elasticsearch/repositories/ShardGeneration.java index 12622fe207867..a3c96bba05b97 100644 --- a/server/src/main/java/org/elasticsearch/repositories/ShardGeneration.java +++ b/server/src/main/java/org/elasticsearch/repositories/ShardGeneration.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots; +import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -24,7 +25,7 @@ /** * The generation ID of a shard, used to name the shard-level {@code index-$SHARD_GEN} file that represents a {@link - * BlobStoreIndexShardSnapshots} instance. Before 7.6 these generations were + * BlobStoreIndexShardSnapshots} instance. Before 7.6 ({@link SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION}) these generations were * numeric, but recent versions use a UUID instead. */ public final class ShardGeneration implements Writeable, ToXContentFragment { diff --git a/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java b/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java index e2287ebbe84c0..7ba8019bb14bb 100644 --- a/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java +++ b/server/src/main/java/org/elasticsearch/repositories/ShardGenerations.java @@ -136,7 +136,7 @@ public Map> obsoleteShardGenerations(Shar *
  • {@link #DELETED_SHARD_GEN} a deleted shard that isn't referenced by any snapshot in the repository any longer
  • *
  • {@link #NEW_SHARD_GEN} a new shard that we know doesn't hold any valid data yet in the repository
  • *
  • {@code null} unknown state. The shard either does not exist at all or it was created by a node older than - * 7.6. If a caller expects a shard to exist in the + * {@link org.elasticsearch.snapshots.SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION}. If a caller expects a shard to exist in the * repository but sees a {@code null} return, it should try to recover the generation by falling back to listing the contents * of the respective shard directory.
  • * diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index ed10fa5e6bb08..5e45aec6a5240 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1734,7 +1734,7 @@ public void finalizeSnapshot(final FinalizeSnapshotContext finalizeSnapshotConte final Collection indices = shardGenerations.indices(); final SnapshotId snapshotId = snapshotInfo.snapshotId(); // Once we are done writing the updated index-N blob we remove the now unreferenced index-${uuid} blobs in each shard - // directory if all nodes are at least at version 7.6 + // directory if all nodes are at least at version SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION // If there are older version nodes in the cluster, we don't need to run this cleanup as it will have already happened // when writing the index-${N} to each shard directory. final IndexVersion repositoryMetaVersion = finalizeSnapshotContext.repositoryMetaVersion(); @@ -1742,6 +1742,8 @@ public void finalizeSnapshot(final FinalizeSnapshotContext finalizeSnapshotConte final Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT); + final boolean writeIndexGens = SnapshotsService.useIndexGenerations(repositoryMetaVersion); + record MetadataWriteResult( RepositoryData existingRepositoryData, Map indexMetas, @@ -1777,11 +1779,15 @@ record RootBlobUpdateResult(RepositoryData oldRepositoryData, RepositoryData new } final MetadataWriteResult metadataWriteResult; - metadataWriteResult = new MetadataWriteResult( - existingRepositoryData, - ConcurrentCollections.newConcurrentMap(), - ConcurrentCollections.newConcurrentMap() - ); + if (writeIndexGens) { + metadataWriteResult = new MetadataWriteResult( + existingRepositoryData, + ConcurrentCollections.newConcurrentMap(), + ConcurrentCollections.newConcurrentMap() + ); + } else { + metadataWriteResult = new MetadataWriteResult(existingRepositoryData, null, null); + } try (var allMetaListeners = new RefCountingListener(l.map(ignored -> metadataWriteResult))) { // We ignore all FileAlreadyExistsException when writing metadata since otherwise a master failover while in this method @@ -1803,16 +1809,24 @@ record RootBlobUpdateResult(RepositoryData oldRepositoryData, RepositoryData new for (IndexId index : indices) { executor.execute(ActionRunnable.run(allMetaListeners.acquire(), () -> { final IndexMetadata indexMetaData = clusterMetadata.index(index.getName()); - - final String identifiers = IndexMetaDataGenerations.buildUniqueIdentifier(indexMetaData); - String metaUUID = existingRepositoryData.indexMetaDataGenerations().getIndexMetaBlobId(identifiers); - if (metaUUID == null) { - // We don't yet have this version of the metadata so we write it - metaUUID = UUIDs.base64UUID(); - INDEX_METADATA_FORMAT.write(indexMetaData, indexContainer(index), metaUUID, compress); - metadataWriteResult.indexMetaIdentifiers().put(identifiers, metaUUID); - } // else this task was largely a no-op - TODO no need to fork in that case - metadataWriteResult.indexMetas().put(index, identifiers); + if (writeIndexGens) { + final String identifiers = IndexMetaDataGenerations.buildUniqueIdentifier(indexMetaData); + String metaUUID = existingRepositoryData.indexMetaDataGenerations().getIndexMetaBlobId(identifiers); + if (metaUUID == null) { + // We don't yet have this version of the metadata so we write it + metaUUID = UUIDs.base64UUID(); + INDEX_METADATA_FORMAT.write(indexMetaData, indexContainer(index), metaUUID, compress); + metadataWriteResult.indexMetaIdentifiers().put(identifiers, metaUUID); + } // else this task was largely a no-op - TODO no need to fork in that case + metadataWriteResult.indexMetas().put(index, identifiers); + } else { + INDEX_METADATA_FORMAT.write( + clusterMetadata.index(index.getName()), + indexContainer(index), + snapshotId.getUUID(), + compress + ); + } })); } @@ -3853,7 +3867,8 @@ public BlobStoreIndexShardSnapshots getBlobStoreIndexShardSnapshots(IndexId inde * @param shardId The 0-based shard id, see also {@link ShardId#id()} * @param blobs list of blobs in repository * @param generation shard generation or {@code null} in case there was no shard generation tracked in the {@link RepositoryData} for - * this shard because its snapshot was created in a version older than 7.6. + * this shard because its snapshot was created in a version older than + * {@link SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION}. * @return tuple of BlobStoreIndexShardSnapshots and the last snapshot index generation */ private Tuple buildBlobStoreIndexShardSnapshots( diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index e1fc9cbbe56f2..94505d3096dec 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -18,9 +18,11 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; @@ -271,19 +273,39 @@ public void validateReferences(IndexVersion indexVersionCreated, Function metadataWithIndices(currentVersionSystem, regularIndex) + () -> metadataWithIndices(currentVersionSystem, oldVersionSystem, regularIndex) ); assertThat( exception.getMessage(), @@ -2020,6 +2019,53 @@ public void testSystemAliasValidationNewSystemAndRegularFails() { ); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this test needs to be updated or removed after the version 9.0 bump") + public void testSystemAliasOldSystemAndNewRegular() { + final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) + ); + final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true); + final IndexMetadata regularIndex = buildIndexWithAlias("regular1", SYSTEM_ALIAS_NAME, false, IndexVersion.current(), false); + + // Should be ok: + metadataWithIndices(oldVersionSystem, regularIndex); + } + + @UpdateForV9 + @AwaitsFix(bugUrl = "this test needs to be updated or removed after the version 9.0 bump") + public void testSystemIndexValidationAllRegular() { + final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) + ); + final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); + final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); + final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true); + + // Should be ok + metadataWithIndices(currentVersionSystem, currentVersionSystem2, oldVersionSystem); + } + + @UpdateForV9 + @AwaitsFix(bugUrl = "this test needs to be updated or removed after the version 9.0 bump") + public void testSystemAliasValidationAllSystemSomeOld() { + final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) + ); + final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); + final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); + final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true); + + // Should be ok: + metadataWithIndices(currentVersionSystem, currentVersionSystem2, oldVersionSystem); + } + public void testSystemAliasValidationAll8x() { final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java index 5df4013681d7a..e74b9d25c36bb 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java @@ -84,7 +84,7 @@ public void testEqualsHashcodeSerialization() { @AwaitsFix(bugUrl = "as mentioned in the comment below, the behavior here is changing for 9.0 so this test needs updating") public void testReadsFormatWithoutVersion() throws IOException { // the behaviour tested here is only appropriate if the current version is compatible with versions 7 and earlier - assertTrue(IndexVersions.MINIMUM_COMPATIBLE.onOrBefore(IndexVersions.V_8_0_0)); + assertTrue(IndexVersions.MINIMUM_COMPATIBLE.onOrBefore(IndexVersions.V_7_0_0)); // when the current version is incompatible with version 7, the behaviour should change to reject files like the given resource // which do not have the version field diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 32fc435228ef6..712d685921f0a 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -6638,8 +6638,6 @@ void assertLuceneOperations(InternalEngine engine, long expectedAppends, long ex assertThat(message, engine.getNumDocDeletes(), equalTo(expectedDeletes)); } - // TODO Lucene 10 upgrade, we need to remove old IndexVersions for this to work - @AwaitsFix(bugUrl = "") public void testStoreHonorsLuceneVersion() throws IOException { // this expects a big IndexVersion bump when the lucene major version is bumped IndexVersion lowestCompatiblePreviousVersion = IndexVersion.fromId((IndexVersion.current().id() / 1_000_000) * 1_000_000); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index f3d77fa472a25..54f6143d5cb30 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -756,4 +756,5 @@ public void testLegacyField() throws Exception { assertThat(service.fieldType("mydate"), instanceOf(DateFieldType.class)); assertNotEquals(DEFAULT_DATE_TIME_FORMATTER, ((DateFieldType) service.fieldType("mydate")).dateTimeFormatter); } + } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index cb5ecda18c468..ee805d3e37580 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -584,6 +584,12 @@ public void testDeprecatedParameters() { // 'index' is declared explicitly, 'store' is not, but is one of the previously always-accepted params String mapping = """ {"type":"test_mapper","index":false,"store":true,"required":"value"}"""; + TestMapper mapper = fromMapping(mapping, IndexVersions.V_7_8_0, TransportVersions.V_7_8_0); + assertWarnings("Parameter [store] has no effect on type [test_mapper] and will be removed in future"); + assertFalse(mapper.index); + assertEquals(""" + {"field":{"type":"test_mapper","index":false,"required":"value"}}""", Strings.toString(mapper)); + MapperParsingException e = expectThrows( MapperParsingException.class, () -> fromMapping(mapping, IndexVersions.V_8_0_0, TransportVersions.V_8_0_0) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java index 5e727903176e3..c29d8335bbcd6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java @@ -236,7 +236,9 @@ public long cost() { } public static BytesRef mockEncodeDenseVector(float[] values, ElementType elementType, IndexVersion indexVersion) { - int numBytes = elementType.getNumBytes(values.length) + DenseVectorFieldMapper.MAGNITUDE_BYTES; + int numBytes = indexVersion.onOrAfter(DenseVectorFieldMapper.MAGNITUDE_STORED_INDEX_VERSION) + ? elementType.getNumBytes(values.length) + DenseVectorFieldMapper.MAGNITUDE_BYTES + : elementType.getNumBytes(values.length); double dotProduct = 0f; ByteBuffer byteBuffer = elementType.createByteBuffer(indexVersion, numBytes); for (float value : values) { @@ -250,9 +252,11 @@ public static BytesRef mockEncodeDenseVector(float[] values, ElementType element dotProduct += value * value; } - // encode vector magnitude at the end - float vectorMagnitude = (float) Math.sqrt(dotProduct); - byteBuffer.putFloat(vectorMagnitude); + if (indexVersion.onOrAfter(DenseVectorFieldMapper.MAGNITUDE_STORED_INDEX_VERSION)) { + // encode vector magnitude at the end + float vectorMagnitude = (float) Math.sqrt(dotProduct); + byteBuffer.putFloat(vectorMagnitude); + } return new BytesRef(byteBuffer.array()); } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index 579ec623b83a9..66ab09f3a6113 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -102,14 +102,28 @@ public Map getMetadataMappers() { @AwaitsFix(bugUrl = "test is referencing 7.x index versions so needs to be updated for 9.0 bump") public void testBuiltinMappers() { IndicesModule module = new IndicesModule(Collections.emptyList()); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); - assertThat(module.getMapperRegistry().getMapperParser("object", IndexVersion.current()), instanceOf(ObjectMapper.TypeParser.class)); - assertFalse(module.getMapperRegistry().getMetadataMapperParsers(version).isEmpty()); - Map metadataMapperParsers = module.getMapperRegistry().getMetadataMapperParsers(version); - assertEquals(EXPECTED_METADATA_FIELDS.length, metadataMapperParsers.size()); - int i = 0; - for (String field : metadataMapperParsers.keySet()) { - assertEquals(EXPECTED_METADATA_FIELDS[i++], field); + { + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); + assertThat( + module.getMapperRegistry().getMapperParser("object", IndexVersion.current()), + instanceOf(ObjectMapper.TypeParser.class) + ); + assertFalse(module.getMapperRegistry().getMetadataMapperParsers(version).isEmpty()); + Map metadataMapperParsers = module.getMapperRegistry() + .getMetadataMapperParsers(version); + assertEquals(EXPECTED_METADATA_FIELDS.length, metadataMapperParsers.size()); + int i = 0; + for (String field : metadataMapperParsers.keySet()) { + assertEquals(EXPECTED_METADATA_FIELDS[i++], field); + } + } + { + IndexVersion version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) + ); + assertEquals(EXPECTED_METADATA_FIELDS.length - 1, module.getMapperRegistry().getMetadataMapperParsers(version).size()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 05f9afcb6de64..a7d18ff782400 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -93,6 +93,8 @@ */ public abstract class MapperTestCase extends MapperServiceTestCase { + public static final IndexVersion DEPRECATED_BOOST_INDEX_VERSION = IndexVersions.V_7_10_0; + protected abstract void minimalMapping(XContentBuilder b) throws IOException; /** diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 43b7effbf8bef..c28358d7a62be 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -533,7 +533,7 @@ protected void addBwCFailedSnapshot(String repoName, String snapshotName, Map {} ) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 9744ea13433ed..7950eb460d430 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1875,7 +1875,13 @@ protected static void expectSoftDeletesWarning(Request request, String indexName + indexName + "]."; - request.setOptions(expectVersionSpecificWarnings(v -> { v.current(expectedWarning); })); + final var softDeleteDisabledDeprecated = minimumIndexVersion().onOrAfter(IndexVersions.V_7_6_0); + request.setOptions(expectVersionSpecificWarnings(v -> { + if (softDeleteDisabledDeprecated) { + v.current(expectedWarning); + } + v.compatible(expectedWarning); + })); } protected static Map getIndexSettings(String index) throws IOException { @@ -2205,6 +2211,7 @@ public void assertEmptyTranslog(String index) throws Exception { * that we have renewed every PRRL to the global checkpoint of the corresponding copy and properly synced to all copies. */ public void ensurePeerRecoveryRetentionLeasesRenewedAndSynced(String index) throws Exception { + boolean mustHavePRRLs = minimumIndexVersion().onOrAfter(IndexVersions.V_7_6_0); assertBusy(() -> { Map stats = entityAsMap(client().performRequest(new Request("GET", index + "/_stats?level=shards"))); @SuppressWarnings("unchecked") @@ -2222,20 +2229,25 @@ public void ensurePeerRecoveryRetentionLeasesRenewedAndSynced(String index) thro "retention_leases.leases", copy ); + if (mustHavePRRLs == false && retentionLeases == null) { + continue; + } assertNotNull(retentionLeases); for (Map retentionLease : retentionLeases) { if (((String) retentionLease.get("id")).startsWith("peer_recovery/")) { assertThat(retentionLease.get("retaining_seq_no"), equalTo(globalCheckpoint + 1)); } } - List existingLeaseIds = retentionLeases.stream() - .map(lease -> (String) lease.get("id")) - .collect(Collectors.toList()); - List expectedLeaseIds = shard.stream() - .map(shr -> (String) XContentMapValues.extractValue("routing.node", shr)) - .map(ReplicationTracker::getPeerRecoveryRetentionLeaseId) - .collect(Collectors.toList()); - assertThat("not every active copy has established its PPRL", expectedLeaseIds, everyItem(in(existingLeaseIds))); + if (mustHavePRRLs) { + List existingLeaseIds = retentionLeases.stream() + .map(lease -> (String) lease.get("id")) + .collect(Collectors.toList()); + List expectedLeaseIds = shard.stream() + .map(shr -> (String) XContentMapValues.extractValue("routing.node", shr)) + .map(ReplicationTracker::getPeerRecoveryRetentionLeaseId) + .collect(Collectors.toList()); + assertThat("not every active copy has established its PPRL", expectedLeaseIds, everyItem(in(existingLeaseIds))); + } } } }, 60, TimeUnit.SECONDS); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index bafc128320c57..008792966a4b2 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -2522,7 +2523,7 @@ private static ClusterState createRemoteClusterState( ) { Settings.Builder indexSettings; if (enableSoftDeletes == false) { - indexSettings = settings(IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.current())).put( + indexSettings = settings(IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0)).put( IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false ); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index 9428da1844e9b..3da32c7f5a4c2 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -10,7 +10,6 @@ import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.LegacyFormatNames; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -31,15 +30,13 @@ */ public class IndexDeprecationChecks { - @UpdateForV9 static DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata) { // TODO: this check needs to be revised. It's trivially true right now. IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); - if (currentCompatibilityVersion.before(IndexVersions.V_8_0_0)) { + if (currentCompatibilityVersion.before(IndexVersions.V_7_0_0)) { return new DeprecationIssue( DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 8.0", - // we probably have to link this to "breaking-changes-9.0.html", + "Old index with a compatibility version < 7.0", "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + "breaking-changes-8.0.html", "This index has version: " + currentCompatibilityVersion.toReleaseVersion(), false, diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index c2dd057bc2ddb..18872d00d54a0 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -36,7 +36,7 @@ public void testOldIndicesCheck() { .build(); DeprecationIssue expected = new DeprecationIssue( DeprecationIssue.Level.CRITICAL, - "Old index with a compatibility version < 8.0", + "Old index with a compatibility version < 7.0", "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + "breaking-changes-8.0.html", "This index has version: " + createdWith.toReleaseVersion(), false, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index f264b04c9d159..4f0bcbb43e260 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.GeoShapeQueryable; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryBuilder; @@ -213,6 +214,10 @@ private static org.apache.lucene.search.Query shapeShapeQuery( ShapeRelation relation, SearchExecutionContext context ) { + // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0); + if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { + throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); + } final MappedFieldType fieldType = context.getFieldType(fieldName); try { return XYQueriesUtils.toXYShapeQuery(geometry, fieldName, relation, fieldType.isIndexed(), fieldType.hasDocValues()); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java index 1f0f6af3a2eda..84a632a419ead 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java @@ -828,7 +828,15 @@ public void testSnapshotOfSearchableSnapshotIncludesNoDataButCanBeRestored() thr .settings() .get("location") ); - initWithSnapshotVersion(tmpRepositoryName, repoPath, SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION); + initWithSnapshotVersion( + tmpRepositoryName, + repoPath, + randomFrom( + SnapshotsService.OLD_SNAPSHOT_FORMAT, + SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION, + SnapshotsService.INDEX_GEN_IN_REPO_DATA_VERSION + ) + ); assertAcked(clusterAdmin().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, tmpRepositoryName)); createRepository(repositoryName, "fs", repoPath); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java index ed384ed013df3..ccdad61adee52 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java @@ -97,7 +97,10 @@ private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String static boolean needsUpgrade(ClusterState state) { return state.metadata() .stream() - .filter(imd -> imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0)) + .filter( + imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersions.V_7_12_0) + && imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0) + ) .filter(IndexMetadata::isPartialSearchableSnapshot) .map(IndexMetadata::getSettings) .anyMatch(SearchableSnapshotIndexMetadataUpgrader::notFrozenShardLimitGroup); @@ -110,7 +113,10 @@ static ClusterState upgradeIndices(ClusterState currentState) { Metadata.Builder builder = Metadata.builder(currentState.metadata()); currentState.metadata() .stream() - .filter(imd -> imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0)) + .filter( + imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersions.V_7_12_0) + && imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0) + ) .filter(imd -> imd.isPartialSearchableSnapshot() && notFrozenShardLimitGroup(imd.getSettings())) .map(SearchableSnapshotIndexMetadataUpgrader::setShardLimitGroupFrozen) .forEach(imd -> builder.put(imd, true)); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java index 1f336330cc7ee..bd090b528bb97 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java @@ -120,7 +120,14 @@ private Settings normal() { * other than 7.12 versions here, but not 8.0 (since a rolling upgrade to 8.0 requires an upgrade to 7.latest first). */ private Settings partialNeedsUpgrade() { - return searchableSnapshotSettings(randomFrom(IndexVersion.fromId(7_12_00_99), IndexVersion.fromId(7_17_00_99)), true); + return searchableSnapshotSettings( + IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_7_12_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) + ), + true + ); } /** @@ -129,7 +136,7 @@ private Settings partialNeedsUpgrade() { private Settings partial_7_13plus() { return shardLimitGroupFrozen( searchableSnapshotSettings( - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_13_0, IndexVersion.current()), true ) ); diff --git a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java index 0e5d50d892e62..449edd8f40ee2 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java @@ -29,10 +29,12 @@ import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetadata; +import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.plan.ShardRecoveryPlan; import org.elasticsearch.indices.recovery.plan.ShardSnapshot; import org.elasticsearch.indices.recovery.plan.ShardSnapshotsService; @@ -61,6 +63,7 @@ import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.index.engine.Engine.ES_VERSION; import static org.elasticsearch.index.engine.Engine.HISTORY_UUID_KEY; +import static org.elasticsearch.test.index.IndexVersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -214,9 +217,18 @@ public void testLogicallyEquivalentSnapshotIsUsedEvenIfFilesAreDifferent() throw final IndexVersion snapshotVersion; final Version luceneVersion; if (compatibleVersion) { - // TODO lucene 10 upgrade, double check the removal of pre V_8_0_0 IndexVersions here - snapshotVersion = IndexVersionUtils.randomCompatibleVersion(random()); - luceneVersion = IndexVersionUtils.randomCompatibleVersion(random()).luceneVersion(); + snapshotVersion = randomBoolean() ? null : IndexVersionUtils.randomCompatibleVersion(random()); + // If snapshotVersion is not present, + // then lucene version must be < RecoverySettings.SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION + if (snapshotVersion == null) { + luceneVersion = randomVersionBetween( + random(), + IndexVersions.V_7_0_0, + RecoverySettings.SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION + ).luceneVersion(); + } else { + luceneVersion = IndexVersionUtils.randomCompatibleVersion(random()).luceneVersion(); + } } else { snapshotVersion = IndexVersion.fromId(Integer.MAX_VALUE); luceneVersion = org.apache.lucene.util.Version.parse("255.255.255"); @@ -404,7 +416,7 @@ public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener builder(m).ignoreMalformed.get(), ignoreMalformedByDefault); this.coerce = coerceParam(m -> builder(m).coerce.get(), coerceByDefault); - this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), true); + this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), IndexVersions.V_7_8_0.onOrBefore(version)); addScriptValidation(script, indexed, hasDocValues); } @@ -245,6 +246,13 @@ public String typeName() { @Override public Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... geometries) { failIfNotIndexedNorDocValuesFallback(context); + // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0) + if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { + throw new QueryShardException( + context, + ShapeRelation.CONTAINS + " query relation not supported for Field [" + fieldName + "]." + ); + } Query query; if (isIndexed()) { query = LatLonShape.newGeometryQuery(fieldName, relation.getLuceneRelation(), geometries); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index dead3d9730583..e5d5354327f5a 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -161,6 +161,13 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext @Override public Query shapeQuery(Geometry shape, String fieldName, ShapeRelation relation, SearchExecutionContext context) { failIfNotIndexedNorDocValuesFallback(context); + // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0); + if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { + throw new QueryShardException( + context, + ShapeRelation.CONTAINS + " query relation not supported for Field [" + fieldName + "]." + ); + } try { return XYQueriesUtils.toXYShapeQuery(shape, fieldName, relation, isIndexed(), hasDocValues()); } catch (IllegalArgumentException e) { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 5d72908c7c6f9..0e04cfe6757bf 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; import org.junit.AssumptionViolatedException; @@ -282,7 +283,7 @@ public void testInvalidCurrentVersion() { @UpdateForV9 @AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public void testGeoShapeLegacyMerge() throws Exception { - IndexVersion version = IndexVersions.V_8_0_0; + IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); MapperService m = createMapperService(version, fieldMapping(b -> b.field("type", getFieldName()))); Exception e = expectThrows( IllegalArgumentException.class, diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java index 8540b68389b61..275385dca3535 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.spatial.index.query; -import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.settings.Settings; @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.containsString; @UpdateForV9 -@LuceneTestCase.AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") +@AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public class LegacyGeoShapeWithDocValuesQueryTests extends GeoShapeQueryTestCase { @SuppressWarnings("deprecation") @@ -76,7 +76,7 @@ protected void createMapping(String indexName, String fieldName, Settings settin ex.getMessage(), containsString("using deprecated parameters [tree] in mapper [" + fieldName + "] of type [geo_shape] is no longer allowed") ); - IndexVersion version = IndexVersions.V_8_0_0; + IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); finalSetting = settings(version).put(settings).build(); indicesAdmin().prepareCreate(indexName).setMapping(xcb).setSettings(finalSetting).get(); } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index fdb353b6df52a..266e82d67219a 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -50,6 +50,7 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.LowercaseNormalizer; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -266,7 +267,11 @@ public static final class WildcardFieldType extends MappedFieldType { private WildcardFieldType(String name, String nullValue, int ignoreAbove, IndexVersion version, Map meta) { super(name, true, false, true, Defaults.TEXT_SEARCH_INFO, meta); - this.analyzer = WILDCARD_ANALYZER_7_10; + if (version.onOrAfter(IndexVersions.V_7_10_0)) { + this.analyzer = WILDCARD_ANALYZER_7_10; + } else { + this.analyzer = WILDCARD_ANALYZER_7_9; + } this.nullValue = nullValue; this.ignoreAbove = ignoreAbove; } From 12dfe382440725ab1b14de131b77f51998bf7bb4 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Mon, 23 Sep 2024 21:01:29 +0100 Subject: [PATCH 354/417] Add Lucene70DocValuesFormat to old-lucene-versions plugin (#113377) Add Lucene70DocValuesFormat to old-lucene-versions plugin. Lucene 10 has remove the old Lucene70DocValuesFormat from its backward_codecs, so we can no longer lookup the doc values format by name (DocValuesFormat.forName("Lucene70")). Instead we make a copy and reference it statically. The code is for the most part a direct copy, with the following changes: 1. package and import updates 2. Elasticsearch's spotless 3. A couple of changes in Lucene70DocValuesConsumer, where we no longer iterate SortedSetDocValues until NO_MORE_DOCS, but rather use a counted loop with docValueCount. As per what is done elsewhere in lucene 10. TheLucene70DocValuesFormatTests has been, similar to how we test other formats in this plugin. --- .../lucene/bwc/AbstractArchiveTestCase.java | 2 +- .../bwc/codecs/lucene70/BWCLucene70Codec.java | 11 +- .../bwc/codecs/lucene70/IndexedDISI.java | 327 ++++ .../bwc/codecs/lucene70/Lucene70Codec.java | 15 + .../lucene70/Lucene70DocValuesConsumer.java | 681 ++++++++ .../lucene70/Lucene70DocValuesFormat.java | 171 ++ .../lucene70/Lucene70DocValuesProducer.java | 1461 +++++++++++++++++ .../services/org.apache.lucene.codecs.Codec | 1 + .../org.apache.lucene.codecs.DocValuesFormat | 1 + .../Lucene70DocValuesFormatTests.java | 26 + 10 files changed, 2690 insertions(+), 6 deletions(-) create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java create mode 100644 x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java index 803c7f410c41d..71f788727aa23 100644 --- a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java @@ -97,7 +97,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna .getAsVersionId( "version", IndexVersion::fromId, - IndexVersion.fromId(randomBoolean() ? 5000099 : 6000099) + IndexVersion.fromId(randomFrom(5000099, 6000099, 7000099)) ) ) ) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java index 8084819198fb3..0100a8bd14635 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java @@ -22,7 +22,6 @@ import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xpack.lucene.bwc.codecs.BWCCodec; import org.elasticsearch.xpack.lucene.bwc.codecs.lucene60.Lucene60MetadataOnlyPointsFormat; @@ -33,9 +32,7 @@ public class BWCLucene70Codec extends BWCCodec { private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat(); private final CompoundFormat compoundFormat = new Lucene50CompoundFormat(); private final StoredFieldsFormat storedFieldsFormat; - @UpdateForV9 - // this needs addressing to support 7.x indices in 9.x - private final DocValuesFormat defaultDVFormat = null; // DocValuesFormat.forName("Lucene70"); + private final DocValuesFormat defaultDVFormat = new Lucene70DocValuesFormat(); private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { @@ -50,7 +47,11 @@ public PostingsFormat getPostingsFormatForField(String field) { }; public BWCLucene70Codec() { - super("BWCLucene70Codec"); + this("BWCLucene70Codec"); + } + + protected BWCLucene70Codec(String name) { + super(name); storedFieldsFormat = new Lucene50StoredFieldsFormat(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java new file mode 100644 index 0000000000000..75119247cdb13 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java @@ -0,0 +1,327 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BitSetIterator; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.RoaringDocIdSet; + +import java.io.DataInput; +import java.io.IOException; + +/** + * Disk-based implementation of a {@link DocIdSetIterator} which can return the index of the current + * document, i.e. the ordinal of the current document among the list of documents that this iterator + * can return. This is useful to implement sparse doc values by only having to encode values for + * documents that actually have a value. + * + *

    Implementation-wise, this {@link DocIdSetIterator} is inspired of {@link RoaringDocIdSet + * roaring bitmaps} and encodes ranges of {@code 65536} documents independently and picks between 3 + * encodings depending on the density of the range: + * + *

      + *
    • {@code ALL} if the range contains 65536 documents exactly, + *
    • {@code DENSE} if the range contains 4096 documents or more; in that case documents are + * stored in a bit set, + *
    • {@code SPARSE} otherwise, and the lower 16 bits of the doc IDs are stored in a {@link + * DataInput#readShort() short}. + *
    + * + *

    Only ranges that contain at least one value are encoded. + * + *

    This implementation uses 6 bytes per document in the worst-case, which happens in the case + * that all ranges contain exactly one document. + */ +final class IndexedDISI extends DocIdSetIterator { + + static final int MAX_ARRAY_LENGTH = (1 << 12) - 1; + + private static void flush(int block, FixedBitSet buffer, int cardinality, IndexOutput out) throws IOException { + assert block >= 0 && block < 65536; + out.writeShort((short) block); + assert cardinality > 0 && cardinality <= 65536; + out.writeShort((short) (cardinality - 1)); + if (cardinality > MAX_ARRAY_LENGTH) { + if (cardinality != 65536) { // all docs are set + for (long word : buffer.getBits()) { + out.writeLong(word); + } + } + } else { + BitSetIterator it = new BitSetIterator(buffer, cardinality); + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + out.writeShort((short) doc); + } + } + } + + static void writeBitSet(DocIdSetIterator it, IndexOutput out) throws IOException { + int i = 0; + final FixedBitSet buffer = new FixedBitSet(1 << 16); + int prevBlock = -1; + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + final int block = doc >>> 16; + if (prevBlock != -1 && block != prevBlock) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + prevBlock = block; + i = 0; + } + buffer.set(doc & 0xFFFF); + i++; + prevBlock = block; + } + if (i > 0) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + } + // NO_MORE_DOCS is stored explicitly + buffer.set(DocIdSetIterator.NO_MORE_DOCS & 0xFFFF); + flush(DocIdSetIterator.NO_MORE_DOCS >>> 16, buffer, 1, out); + } + + /** The slice that stores the {@link DocIdSetIterator}. */ + private final IndexInput slice; + + private final long cost; + + IndexedDISI(IndexInput in, long offset, long length, long cost) throws IOException { + this(in.slice("docs", offset, length), cost); + } + + // This constructor allows to pass the slice directly in case it helps reuse + // see eg. Lucene70 norms producer's merge instance + IndexedDISI(IndexInput slice, long cost) throws IOException { + this.slice = slice; + this.cost = cost; + } + + private int block = -1; + private long blockEnd; + private int nextBlockIndex = -1; + Method method; + + private int doc = -1; + private int index = -1; + + // SPARSE variables + boolean exists; + + // DENSE variables + private long word; + private int wordIndex = -1; + // number of one bits encountered so far, including those of `word` + private int numberOfOnes; + + // ALL variables + private int gap; + + @Override + public int docID() { + return doc; + } + + @Override + public int advance(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + if (block == targetBlock) { + if (method.advanceWithinBlock(this, target)) { + return doc; + } + readBlockHeader(); + } + boolean found = method.advanceWithinBlock(this, block); + assert found; + return doc; + } + + public boolean advanceExact(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + boolean found = block == targetBlock && method.advanceExactWithinBlock(this, target); + this.doc = target; + return found; + } + + private void advanceBlock(int targetBlock) throws IOException { + do { + slice.seek(blockEnd); + readBlockHeader(); + } while (block < targetBlock); + } + + private void readBlockHeader() throws IOException { + block = Short.toUnsignedInt(slice.readShort()) << 16; + assert block >= 0; + final int numValues = 1 + Short.toUnsignedInt(slice.readShort()); + index = nextBlockIndex; + nextBlockIndex = index + numValues; + if (numValues <= MAX_ARRAY_LENGTH) { + method = Method.SPARSE; + blockEnd = slice.getFilePointer() + (numValues << 1); + } else if (numValues == 65536) { + method = Method.ALL; + blockEnd = slice.getFilePointer(); + gap = block - index - 1; + } else { + method = Method.DENSE; + blockEnd = slice.getFilePointer() + (1 << 13); + wordIndex = -1; + numberOfOnes = index + 1; + } + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + public int index() { + return index; + } + + @Override + public long cost() { + return cost; + } + + enum Method { + SPARSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + disi.doc = disi.block | doc; + disi.exists = true; + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + if (target == disi.doc) { + return disi.exists; + } + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + if (doc != targetInBlock) { + disi.index--; + disi.slice.seek(disi.slice.getFilePointer() - Short.BYTES); + break; + } + disi.exists = true; + return true; + } + } + disi.exists = false; + return false; + } + }, + DENSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + if (leftBits != 0L) { + disi.doc = target + Long.numberOfTrailingZeros(leftBits); + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return true; + } + + while (++disi.wordIndex < 1024) { + disi.word = disi.slice.readLong(); + if (disi.word != 0) { + disi.index = disi.numberOfOnes; + disi.numberOfOnes += Long.bitCount(disi.word); + disi.doc = disi.block | (disi.wordIndex << 6) | Long.numberOfTrailingZeros(disi.word); + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return (leftBits & 1L) != 0; + } + }, + ALL { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.doc = target; + disi.index = target - disi.gap; + return true; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.index = target - disi.gap; + return true; + } + }; + + /** + * Advance to the first doc from the block that is equal to or greater than {@code target}. + * Return true if there is such a doc and false otherwise. + */ + abstract boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException; + + /** + * Advance the iterator exactly to the position corresponding to the given {@code target} and + * return whether this document exists. + */ + abstract boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java new file mode 100644 index 0000000000000..77de24b53069d --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +public class Lucene70Codec extends BWCLucene70Codec { + + public Lucene70Codec() { + super("Lucene70"); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java new file mode 100644 index 0000000000000..1d35a60235d35 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java @@ -0,0 +1,681 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicWriter; +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.EmptyDocValuesProducer; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.SortedSetSelector; +import org.apache.lucene.store.ByteBuffersDataOutput; +import org.apache.lucene.store.ByteBuffersIndexOutput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.MathUtil; +import org.apache.lucene.util.StringHelper; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE; + +/** writer for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesConsumer extends DocValuesConsumer { + + IndexOutput data, meta; + final int maxDoc; + + /** expert: Creates a new writer */ + Lucene70DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + boolean success = false; + try { + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + data = EndiannessReverserUtil.createOutput(state.directory, dataName, state.context); + CodecUtil.writeIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + meta = EndiannessReverserUtil.createOutput(state.directory, metaName, state.context); + CodecUtil.writeIndexHeader( + meta, + metaCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + maxDoc = state.segmentInfo.maxDoc(); + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + @Override + public void close() throws IOException { + boolean success = false; + try { + if (meta != null) { + meta.writeInt(-1); // write EOF marker + CodecUtil.writeFooter(meta); // write checksum + } + if (data != null) { + CodecUtil.writeFooter(data); // write checksum + } + success = true; + } finally { + if (success) { + IOUtils.close(data, meta); + } else { + IOUtils.closeWhileHandlingException(data, meta); + } + meta = data = null; + } + } + + @Override + public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.NUMERIC); + + writeValues(field, new EmptyDocValuesProducer() { + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + return DocValues.singleton(valuesProducer.getNumeric(field)); + } + }); + } + + private static class MinMaxTracker { + long min, max, numValues, spaceInBits; + + MinMaxTracker() { + reset(); + spaceInBits = 0; + } + + private void reset() { + min = Long.MAX_VALUE; + max = Long.MIN_VALUE; + numValues = 0; + } + + /** Accumulate a new value. */ + void update(long v) { + min = Math.min(min, v); + max = Math.max(max, v); + ++numValues; + } + + /** Update the required space. */ + void finish() { + if (max > min) { + spaceInBits += LegacyDirectWriter.unsignedBitsRequired(max - min) * numValues; + } + } + + /** Update space usage and get ready for accumulating values for the next block. */ + void nextBlock() { + finish(); + reset(); + } + } + + private long[] writeValues(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + int numDocsWithValue = 0; + MinMaxTracker minMax = new MinMaxTracker(); + MinMaxTracker blockMinMax = new MinMaxTracker(); + long gcd = 0; + Set uniqueValues = new HashSet<>(); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + + if (gcd != 1) { + if (v < Long.MIN_VALUE / 2 || v > Long.MAX_VALUE / 2) { + // in that case v - minValue might overflow and make the GCD computation return + // wrong results. Since these extreme values are unlikely, we just discard + // GCD computation for them + gcd = 1; + } else if (minMax.numValues != 0) { // minValue needs to be set first + gcd = MathUtil.gcd(gcd, v - minMax.min); + } + } + + minMax.update(v); + blockMinMax.update(v); + if (blockMinMax.numValues == NUMERIC_BLOCK_SIZE) { + blockMinMax.nextBlock(); + } + + if (uniqueValues != null && uniqueValues.add(v) && uniqueValues.size() > 256) { + uniqueValues = null; + } + } + + numDocsWithValue++; + } + + minMax.finish(); + blockMinMax.finish(); + + final long numValues = minMax.numValues; + long min = minMax.min; + final long max = minMax.max; + assert blockMinMax.spaceInBits <= minMax.spaceInBits; + + if (numDocsWithValue == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithValue == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedNumeric(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeLong(numValues); + final int numBitsPerValue; + boolean doBlocks = false; + Map encode = null; + if (min >= max) { + numBitsPerValue = 0; + meta.writeInt(-1); + } else { + if (uniqueValues != null + && uniqueValues.size() > 1 + && LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1) < LegacyDirectWriter.unsignedBitsRequired( + (max - min) / gcd + )) { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1); + final Long[] sortedUniqueValues = uniqueValues.toArray(new Long[0]); + Arrays.sort(sortedUniqueValues); + meta.writeInt(sortedUniqueValues.length); + for (Long v : sortedUniqueValues) { + meta.writeLong(v); + } + encode = new HashMap<>(); + for (int i = 0; i < sortedUniqueValues.length; ++i) { + encode.put(sortedUniqueValues[i], i); + } + min = 0; + gcd = 1; + } else { + uniqueValues = null; + // we do blocks if that appears to save 10+% storage + doBlocks = minMax.spaceInBits > 0 && (double) blockMinMax.spaceInBits / minMax.spaceInBits <= 0.9; + if (doBlocks) { + numBitsPerValue = 0xFF; + meta.writeInt(-2 - NUMERIC_BLOCK_SHIFT); + } else { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired((max - min) / gcd); + if (gcd == 1 + && min > 0 + && LegacyDirectWriter.unsignedBitsRequired(max) == LegacyDirectWriter.unsignedBitsRequired(max - min)) { + min = 0; + } + meta.writeInt(-1); + } + } + } + + meta.writeByte((byte) numBitsPerValue); + meta.writeLong(min); + meta.writeLong(gcd); + long startOffset = data.getFilePointer(); + meta.writeLong(startOffset); + if (doBlocks) { + writeValuesMultipleBlocks(valuesProducer.getSortedNumeric(field), gcd); + } else if (numBitsPerValue != 0) { + writeValuesSingleBlock(valuesProducer.getSortedNumeric(field), numValues, numBitsPerValue, min, gcd, encode); + } + meta.writeLong(data.getFilePointer() - startOffset); + + return new long[] { numDocsWithValue, numValues }; + } + + private void writeValuesSingleBlock( + SortedNumericDocValues values, + long numValues, + int numBitsPerValue, + long min, + long gcd, + Map encode + ) throws IOException { + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numValues, numBitsPerValue); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + if (encode == null) { + writer.add((v - min) / gcd); + } else { + writer.add(encode.get(v)); + } + } + } + writer.finish(); + } + + private void writeValuesMultipleBlocks(SortedNumericDocValues values, long gcd) throws IOException { + final long[] buffer = new long[NUMERIC_BLOCK_SIZE]; + final ByteBuffersDataOutput encodeBuffer = ByteBuffersDataOutput.newResettableInstance(); + int upTo = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + buffer[upTo++] = values.nextValue(); + if (upTo == NUMERIC_BLOCK_SIZE) { + writeBlock(buffer, NUMERIC_BLOCK_SIZE, gcd, encodeBuffer); + upTo = 0; + } + } + } + if (upTo > 0) { + writeBlock(buffer, upTo, gcd, encodeBuffer); + } + } + + private void writeBlock(long[] values, int length, long gcd, ByteBuffersDataOutput buffer) throws IOException { + assert length > 0; + long min = values[0]; + long max = values[0]; + for (int i = 1; i < length; ++i) { + final long v = values[i]; + assert Math.floorMod(values[i] - min, gcd) == 0; + min = Math.min(min, v); + max = Math.max(max, v); + } + if (min == max) { + data.writeByte((byte) 0); + data.writeLong(min); + } else { + final int bitsPerValue = LegacyDirectWriter.unsignedBitsRequired(max - min); + buffer.reset(); + assert buffer.size() == 0; + final LegacyDirectWriter w = LegacyDirectWriter.getInstance(buffer, length, bitsPerValue); + for (int i = 0; i < length; ++i) { + w.add((values[i] - min) / gcd); + } + w.finish(); + data.writeByte((byte) bitsPerValue); + data.writeLong(min); + data.writeInt(Math.toIntExact(buffer.size())); + buffer.copyTo(data); + } + } + + @Override + public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.BINARY); + + BinaryDocValues values = valuesProducer.getBinary(field); + long start = data.getFilePointer(); + meta.writeLong(start); + int numDocsWithField = 0; + int minLength = Integer.MAX_VALUE; + int maxLength = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + BytesRef v = values.binaryValue(); + int length = v.length; + data.writeBytes(v.bytes, v.offset, v.length); + minLength = Math.min(length, minLength); + maxLength = Math.max(length, maxLength); + } + assert numDocsWithField <= maxDoc; + meta.writeLong(data.getFilePointer() - start); + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getBinary(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + meta.writeInt(minLength); + meta.writeInt(maxLength); + if (maxLength > minLength) { + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + writer.add(addr); + values = valuesProducer.getBinary(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.binaryValue().length; + writer.add(addr); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED); + doAddSortedField(field, valuesProducer); + } + + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedDocValues values = valuesProducer.getSorted(field); + int numDocsWithField = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + } + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSorted(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + if (values.getValueCount() <= 1) { + meta.writeByte((byte) 0); + meta.writeLong(0L); + meta.writeLong(0L); + } else { + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numDocsWithField, numberOfBitsPerOrd); + values = valuesProducer.getSorted(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + writer.add(values.ordValue()); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + + addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); + } + + private void addTermsDict(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeVLong(size); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT); + + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + meta.writeInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + long numBlocks = (size + Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) >>> Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT; + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + BytesRefBuilder previous = new BytesRefBuilder(); + long ord = 0; + long start = data.getFilePointer(); + int maxLength = 0; + TermsEnum iterator = values.termsEnum(); + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) == 0) { + writer.add(data.getFilePointer() - start); + data.writeVInt(term.length); + data.writeBytes(term.bytes, term.offset, term.length); + } else { + final int prefixLength = StringHelper.bytesDifference(previous.get(), term); + final int suffixLength = term.length - prefixLength; + assert suffixLength > 0; // terms are unique + + data.writeByte((byte) (Math.min(prefixLength, 15) | (Math.min(15, suffixLength - 1) << 4))); + if (prefixLength >= 15) { + data.writeVInt(prefixLength - 15); + } + if (suffixLength >= 16) { + data.writeVInt(suffixLength - 16); + } + data.writeBytes(term.bytes, term.offset + prefixLength, term.length - prefixLength); + } + maxLength = Math.max(maxLength, term.length); + previous.copyBytes(term); + ++ord; + } + writer.finish(); + meta.writeInt(maxLength); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + + // Now write the reverse terms index + writeTermsIndex(values); + } + + private void writeTermsIndex(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + long start = data.getFilePointer(); + + long numBlocks = 1L + ((size + Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) + >>> Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + TermsEnum iterator = values.termsEnum(); + BytesRefBuilder previous = new BytesRefBuilder(); + long offset = 0; + long ord = 0; + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == 0) { + writer.add(offset); + final int sortKeyLength; + if (ord == 0) { + // no previous term: no bytes to write + sortKeyLength = 0; + } else { + sortKeyLength = StringHelper.sortKeyLength(previous.get(), term); + } + offset += sortKeyLength; + data.writeBytes(term.bytes, term.offset, sortKeyLength); + } else if ((ord + & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) { + previous.copyBytes(term); + } + ++ord; + } + writer.add(offset); + writer.finish(); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + } + + @Override + public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_NUMERIC); + + long[] stats = writeValues(field, valuesProducer); + int numDocsWithField = Math.toIntExact(stats[0]); + long numValues = stats[1]; + assert numValues >= numDocsWithField; + + meta.writeInt(numDocsWithField); + if (numValues > numDocsWithField) { + long start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1L, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_SET); + + SortedSetDocValues values = valuesProducer.getSortedSet(field); + int numDocsWithField = 0; + long numOrds = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + numOrds += values.docValueCount(); + } + + if (numDocsWithField == numOrds) { + meta.writeByte((byte) 0); + doAddSortedField(field, new EmptyDocValuesProducer() { + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); + } + }); + return; + } + meta.writeByte((byte) 1); + + assert numDocsWithField != 0; + if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedSet(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numOrds, numberOfBitsPerOrd); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0; i < values.docValueCount(); i++) { + writer.add(values.nextOrd()); + } + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + + meta.writeInt(numDocsWithField); + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + values.nextOrd(); + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + + addTermsDict(values); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java new file mode 100644 index 0000000000000..76fce4cd15c93 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java @@ -0,0 +1,171 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.util.SmallFloat; + +import java.io.IOException; + +/** + * Lucene 7.0 DocValues format. + * + *

    Documents that have a value for the field are encoded in a way that it is always possible to + * know the ordinal of the current document in the set of documents that have a value. For instance, + * say the set of documents that have a value for the field is {1, 5, 6, 11}. When the + * iterator is on 6, it knows that this is the 3rd item of the set. This way, values + * can be stored densely and accessed based on their index at search time. If all documents in a + * segment have a value for the field, the index is the same as the doc ID, so this case is encoded + * implicitly and is very fast at query time. On the other hand if some documents are missing a + * value for the field then the set of documents that have a value is encoded into blocks. All doc + * IDs that share the same upper 16 bits are encoded into the same block with the following + * strategies: + * + *

      + *
    • SPARSE: This strategy is used when a block contains at most 4095 documents. The lower 16 + * bits of doc IDs are stored as {@link DataOutput#writeShort(short) shorts} while the upper + * 16 bits are given by the block ID. + *
    • DENSE: This strategy is used when a block contains between 4096 and 65535 documents. The + * lower bits of doc IDs are stored in a bit set. Advancing is performed using {@link + * Long#numberOfTrailingZeros(long) ntz} operations while the index is computed by + * accumulating the {@link Long#bitCount(long) bit counts} of the visited longs. + *
    • ALL: This strategy is used when a block contains exactly 65536 documents, meaning that the + * block is full. In that case doc IDs do not need to be stored explicitly. This is typically + * faster than both SPARSE and DENSE which is a reason why it is preferable to have all + * documents that have a value for a field using contiguous doc IDs, for instance by using + * {@link IndexWriterConfig#setIndexSort(org.apache.lucene.search.Sort) index sorting}. + *
    + * + *

    Then the five per-document value types (Numeric,Binary,Sorted,SortedSet,SortedNumeric) are + * encoded using the following strategies: + * + *

    {@link DocValuesType#NUMERIC NUMERIC}: + * + *

      + *
    • Delta-compressed: per-document integers written as deltas from the minimum value, + * compressed with bitpacking. For more information, see {@link LegacyDirectWriter}. + *
    • Table-compressed: when the number of unique values is very small (< 256), and when there + * are unused "gaps" in the range of values used (such as {@link SmallFloat}), a lookup table + * is written instead. Each per-document entry is instead the ordinal to this table, and those + * ordinals are compressed with bitpacking ({@link LegacyDirectWriter}). + *
    • GCD-compressed: when all numbers share a common divisor, such as dates, the greatest common + * denominator (GCD) is computed, and quotients are stored using Delta-compressed Numerics. + *
    • Monotonic-compressed: when all numbers are monotonically increasing offsets, they are + * written as blocks of bitpacked integers, encoding the deviation from the expected delta. + *
    • Const-compressed: when there is only one possible value, no per-document data is needed and + * this value is encoded alone. + *
    + * + *

    {@link DocValuesType#BINARY BINARY}: + * + *

      + *
    • Fixed-width Binary: one large concatenated byte[] is written, along with the fixed length. + * Each document's value can be addressed directly with multiplication ({@code docID * + * length}). + *
    • Variable-width Binary: one large concatenated byte[] is written, along with end addresses + * for each document. The addresses are written as Monotonic-compressed numerics. + *
    • Prefix-compressed Binary: values are written in chunks of 16, with the first value written + * completely and other values sharing prefixes. chunk addresses are written as + * Monotonic-compressed numerics. A reverse lookup index is written from a portion of every + * 1024th term. + *
    + * + *

    {@link DocValuesType#SORTED SORTED}: + * + *

      + *
    • Sorted: a mapping of ordinals to deduplicated terms is written as Prefix-compressed Binary, + * along with the per-document ordinals written using one of the numeric strategies above. + *
    + * + *

    {@link DocValuesType#SORTED_SET SORTED_SET}: + * + *

      + *
    • Single: if all documents have 0 or 1 value, then data are written like SORTED. + *
    • SortedSet: a mapping of ordinals to deduplicated terms is written as Binary, an ordinal + * list and per-document index into this list are written using the numeric strategies above. + *
    + * + *

    {@link DocValuesType#SORTED_NUMERIC SORTED_NUMERIC}: + * + *

      + *
    • Single: if all documents have 0 or 1 value, then data are written like NUMERIC. + *
    • SortedNumeric: a value list and per-document index into this list are written using the + * numeric strategies above. + *
    + * + *

    Files: + * + *

      + *
    1. .dvd: DocValues data + *
    2. .dvm: DocValues metadata + *
    + */ +public final class Lucene70DocValuesFormat extends DocValuesFormat { + + /** Sole Constructor */ + public Lucene70DocValuesFormat() { + super("Lucene70"); + } + + @Override + public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { + return new Lucene70DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + @Override + public DocValuesProducer fieldsProducer(SegmentReadState state) throws IOException { + return new Lucene70DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + static final String DATA_CODEC = "Lucene70DocValuesData"; + static final String DATA_EXTENSION = "dvd"; + static final String META_CODEC = "Lucene70DocValuesMetadata"; + static final String META_EXTENSION = "dvm"; + static final int VERSION_START = 0; + static final int VERSION_CURRENT = VERSION_START; + + // indicates docvalues type + static final byte NUMERIC = 0; + static final byte BINARY = 1; + static final byte SORTED = 2; + static final byte SORTED_SET = 3; + static final byte SORTED_NUMERIC = 4; + + static final int DIRECT_MONOTONIC_BLOCK_SHIFT = 16; + + static final int NUMERIC_BLOCK_SHIFT = 14; + static final int NUMERIC_BLOCK_SIZE = 1 << NUMERIC_BLOCK_SHIFT; + + static final int TERMS_DICT_BLOCK_SHIFT = 4; + static final int TERMS_DICT_BLOCK_SIZE = 1 << TERMS_DICT_BLOCK_SHIFT; + static final int TERMS_DICT_BLOCK_MASK = TERMS_DICT_BLOCK_SIZE - 1; + + static final int TERMS_DICT_REVERSE_INDEX_SHIFT = 10; + static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; + static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java new file mode 100644 index 0000000000000..5164a67c428b3 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java @@ -0,0 +1,1461 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicReader; +import org.apache.lucene.backward_codecs.packed.LegacyDirectReader; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BaseTermsEnum; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.ImpactsEnum; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.index.TermsEnum.SeekStatus; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.RandomAccessInput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.LongValues; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +/** reader for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesProducer extends DocValuesProducer { + private final Map numerics = new HashMap<>(); + private final Map binaries = new HashMap<>(); + private final Map sorted = new HashMap<>(); + private final Map sortedSets = new HashMap<>(); + private final Map sortedNumerics = new HashMap<>(); + private final IndexInput data; + private final int maxDoc; + + static final long NO_MORE_ORDS = -1; + + /** expert: instantiates a new reader */ + Lucene70DocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + this.maxDoc = state.segmentInfo.maxDoc(); + + int version = -1; + + // read in the entries from the metadata file. + try (ChecksumIndexInput in = EndiannessReverserUtil.openChecksumInput(state.directory, metaName, state.context)) { + Throwable priorE = null; + try { + version = CodecUtil.checkIndexHeader( + in, + metaCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + readFields(in, state.fieldInfos); + } catch (Throwable exception) { + priorE = exception; + } finally { + CodecUtil.checkFooter(in, priorE); + } + } + + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + this.data = EndiannessReverserUtil.openInput(state.directory, dataName, state.context); + boolean success = false; + try { + final int version2 = CodecUtil.checkIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + if (version != version2) { + throw new CorruptIndexException("Format versions mismatch: meta=" + version + ", data=" + version2, data); + } + + // NOTE: data file is too costly to verify checksum against all the bytes on open, + // but for now we at least verify proper structure of the checksum footer: which looks + // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption + // such as file truncation. + CodecUtil.retrieveChecksum(data); + + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this.data); + } + } + } + + private void readFields(ChecksumIndexInput meta, FieldInfos infos) throws IOException { + for (int fieldNumber = meta.readInt(); fieldNumber != -1; fieldNumber = meta.readInt()) { + FieldInfo info = infos.fieldInfo(fieldNumber); + if (info == null) { + throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); + } + byte type = meta.readByte(); + if (type == Lucene70DocValuesFormat.NUMERIC) { + numerics.put(info.name, readNumeric(meta)); + } else if (type == Lucene70DocValuesFormat.BINARY) { + binaries.put(info.name, readBinary(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED) { + sorted.put(info.name, readSorted(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_SET) { + sortedSets.put(info.name, readSortedSet(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_NUMERIC) { + sortedNumerics.put(info.name, readSortedNumeric(meta)); + } else { + throw new CorruptIndexException("invalid type: " + type, meta); + } + } + } + + private NumericEntry readNumeric(ChecksumIndexInput meta) throws IOException { + NumericEntry entry = new NumericEntry(); + readNumeric(meta, entry); + return entry; + } + + private void readNumeric(ChecksumIndexInput meta, NumericEntry entry) throws IOException { + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numValues = meta.readLong(); + int tableSize = meta.readInt(); + if (tableSize > 256) { + throw new CorruptIndexException("invalid table size: " + tableSize, meta); + } + if (tableSize >= 0) { + entry.table = new long[tableSize]; + for (int i = 0; i < tableSize; ++i) { + entry.table[i] = meta.readLong(); + } + } + if (tableSize < -1) { + entry.blockShift = -2 - tableSize; + } else { + entry.blockShift = -1; + } + entry.bitsPerValue = meta.readByte(); + entry.minValue = meta.readLong(); + entry.gcd = meta.readLong(); + entry.valuesOffset = meta.readLong(); + entry.valuesLength = meta.readLong(); + } + + private BinaryEntry readBinary(ChecksumIndexInput meta) throws IOException { + BinaryEntry entry = new BinaryEntry(); + entry.dataOffset = meta.readLong(); + entry.dataLength = meta.readLong(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.minLength = meta.readInt(); + entry.maxLength = meta.readInt(); + if (entry.minLength < entry.maxLength) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1L, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + private SortedEntry readSorted(ChecksumIndexInput meta) throws IOException { + SortedEntry entry = new SortedEntry(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private SortedSetEntry readSortedSet(ChecksumIndexInput meta) throws IOException { + SortedSetEntry entry = new SortedSetEntry(); + byte multiValued = meta.readByte(); + switch (multiValued) { + case 0: // singlevalued + entry.singleValueEntry = readSorted(meta); + return entry; + case 1: // multivalued + break; + default: + throw new CorruptIndexException("Invalid multiValued flag: " + multiValued, meta); + } + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private static void readTermDict(ChecksumIndexInput meta, TermsDictEntry entry) throws IOException { + entry.termsDictSize = meta.readVLong(); + entry.termsDictBlockShift = meta.readInt(); + final int blockShift = meta.readInt(); + final long addressesSize = (entry.termsDictSize + (1L << entry.termsDictBlockShift) - 1) >>> entry.termsDictBlockShift; + entry.termsAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, addressesSize, blockShift); + entry.maxTermLength = meta.readInt(); + entry.termsDataOffset = meta.readLong(); + entry.termsDataLength = meta.readLong(); + entry.termsAddressesOffset = meta.readLong(); + entry.termsAddressesLength = meta.readLong(); + entry.termsDictIndexShift = meta.readInt(); + final long indexSize = (entry.termsDictSize + (1L << entry.termsDictIndexShift) - 1) >>> entry.termsDictIndexShift; + entry.termsIndexAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, 1 + indexSize, blockShift); + entry.termsIndexOffset = meta.readLong(); + entry.termsIndexLength = meta.readLong(); + entry.termsIndexAddressesOffset = meta.readLong(); + entry.termsIndexAddressesLength = meta.readLong(); + } + + private SortedNumericEntry readSortedNumeric(ChecksumIndexInput meta) throws IOException { + SortedNumericEntry entry = new SortedNumericEntry(); + readNumeric(meta, entry); + entry.numDocsWithField = meta.readInt(); + if (entry.numDocsWithField != entry.numValues) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + @Override + public void close() throws IOException { + data.close(); + } + + private static class NumericEntry { + long[] table; + int blockShift; + byte bitsPerValue; + long docsWithFieldOffset; + long docsWithFieldLength; + long numValues; + long minValue; + long gcd; + long valuesOffset; + long valuesLength; + } + + private static class BinaryEntry { + long dataOffset; + long dataLength; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + int minLength; + int maxLength; + long addressesOffset; + long addressesLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + } + + private static class TermsDictEntry { + long termsDictSize; + int termsDictBlockShift; + LegacyDirectMonotonicReader.Meta termsAddressesMeta; + int maxTermLength; + long termsDataOffset; + long termsDataLength; + long termsAddressesOffset; + long termsAddressesLength; + int termsDictIndexShift; + LegacyDirectMonotonicReader.Meta termsIndexAddressesMeta; + long termsIndexOffset; + long termsIndexLength; + long termsIndexAddressesOffset; + long termsIndexAddressesLength; + } + + private static class SortedEntry extends TermsDictEntry { + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + } + + private static class SortedSetEntry extends TermsDictEntry { + SortedEntry singleValueEntry; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + private static class SortedNumericEntry extends NumericEntry { + int numDocsWithField; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + @Override + public NumericDocValues getNumeric(FieldInfo field) throws IOException { + NumericEntry entry = numerics.get(field.name); + return getNumeric(entry); + } + + private abstract static class DenseNumericDocValues extends NumericDocValues { + + final int maxDoc; + int doc = -1; + + DenseNumericDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public long cost() { + return maxDoc; + } + } + + private abstract static class SparseNumericDocValues extends NumericDocValues { + + final IndexedDISI disi; + + SparseNumericDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + } + + private NumericDocValues getNumeric(NumericEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + // empty + return DocValues.emptyNumeric(); + } else if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.bitsPerValue == 0) { + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // dense but split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new DenseNumericDocValues(maxDoc) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int block = doc >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(doc & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(doc)]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return mul * values.get(doc) + delta; + } + }; + } + } + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numValues); + if (entry.bitsPerValue == 0) { + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // sparse and split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new SparseNumericDocValues(disi) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int index = disi.index(); + final int block = index >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(disi.index())]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return mul * values.get(disi.index()) + delta; + } + }; + } + } + } + } + } + + private LongValues getNumericValues(NumericEntry entry) throws IOException { + if (entry.bitsPerValue == 0) { + return new LongValues() { + @Override + public long get(long index) { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + final int shift = entry.blockShift; + final long mul = entry.gcd; + final long mask = (1L << shift) - 1; + return new LongValues() { + long block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long get(long index) { + final long block = index >>> shift; + if (this.block != block) { + assert block > this.block : "Reading backwards is illegal: " + this.block + " < " + block; + int bitsPerValue; + do { + offset = blockEndOffset; + try { + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + } catch (IOException e) { + throw new RuntimeException(e); + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 ? LongValues.ZEROES : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new LongValues() { + @Override + public long get(long index) { + return table[(int) values.get(index)]; + } + }; + } else if (entry.gcd != 1) { + final long gcd = entry.gcd; + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) * gcd + minValue; + } + }; + } else if (entry.minValue != 0) { + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) + minValue; + } + }; + } else { + return values; + } + } + } + } + + private abstract static class DenseBinaryDocValues extends BinaryDocValues { + + final int maxDoc; + int doc = -1; + + DenseBinaryDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + doc = target; + return true; + } + } + + private abstract static class SparseBinaryDocValues extends BinaryDocValues { + + final IndexedDISI disi; + + SparseBinaryDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + } + + @Override + public BinaryDocValues getBinary(FieldInfo field) throws IOException { + BinaryEntry entry = binaries.get(field.name); + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptyBinary(); + } + + final IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength); + + if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) doc * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + long startOffset = addresses.get(doc); + bytes.length = (int) (addresses.get(doc + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) disi.index() * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + final int index = disi.index(); + long startOffset = addresses.get(index); + bytes.length = (int) (addresses.get(index + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } + } + + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + SortedEntry entry = sorted.get(field.name); + return getSorted(entry); + } + + private SortedDocValues getSorted(SortedEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptySorted(); + } + + final LongValues ords; + if (entry.bitsPerValue == 0) { + ords = new LongValues() { + @Override + public long get(long index) { + return 0L; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + } + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedDocValues(entry, data) { + + int doc = -1; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public int ordValue() { + return (int) ords.get(doc); + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedDocValues(entry, data) { + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int ordValue() { + return (int) ords.get(disi.index()); + } + }; + } + } + + private abstract static class BaseSortedDocValues extends SortedDocValues { + + final SortedEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedDocValues(SortedEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public int getValueCount() { + return Math.toIntExact(entry.termsDictSize); + } + + @Override + public BytesRef lookupOrd(int ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public int lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return Math.toIntExact(termsEnum.ord()); + case NOT_FOUND: + case END: + default: + return Math.toIntExact(-1L - termsEnum.ord()); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private abstract static class BaseSortedSetDocValues extends SortedSetDocValues { + + final SortedSetEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedSetDocValues(SortedSetEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public long getValueCount() { + return entry.termsDictSize; + } + + @Override + public BytesRef lookupOrd(long ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public long lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return termsEnum.ord(); + case NOT_FOUND: + case END: + default: + return -1L - termsEnum.ord(); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private static class TermsDict extends BaseTermsEnum { + + final TermsDictEntry entry; + final LongValues blockAddresses; + final IndexInput bytes; + final long blockMask; + final LongValues indexAddresses; + final IndexInput indexBytes; + final BytesRef term; + long ord = -1; + + TermsDict(TermsDictEntry entry, IndexInput data) throws IOException { + this.entry = entry; + RandomAccessInput addressesSlice = data.randomAccessSlice(entry.termsAddressesOffset, entry.termsAddressesLength); + blockAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsAddressesMeta, addressesSlice); + bytes = data.slice("terms", entry.termsDataOffset, entry.termsDataLength); + blockMask = (1L << entry.termsDictBlockShift) - 1; + RandomAccessInput indexAddressesSlice = data.randomAccessSlice( + entry.termsIndexAddressesOffset, + entry.termsIndexAddressesLength + ); + indexAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsIndexAddressesMeta, indexAddressesSlice); + indexBytes = data.slice("terms-index", entry.termsIndexOffset, entry.termsIndexLength); + term = new BytesRef(entry.maxTermLength); + } + + @Override + public BytesRef next() throws IOException { + if (++ord >= entry.termsDictSize) { + return null; + } + if ((ord & blockMask) == 0L) { + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + } else { + final int token = Byte.toUnsignedInt(bytes.readByte()); + int prefixLength = token & 0x0F; + int suffixLength = 1 + (token >>> 4); + if (prefixLength == 15) { + prefixLength += bytes.readVInt(); + } + if (suffixLength == 16) { + suffixLength += bytes.readVInt(); + } + term.length = prefixLength + suffixLength; + bytes.readBytes(term.bytes, prefixLength, suffixLength); + } + return term; + } + + @Override + public void seekExact(long ord) throws IOException { + if (ord < 0 || ord >= entry.termsDictSize) { + throw new IndexOutOfBoundsException(); + } + final long blockIndex = ord >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(blockIndex); + bytes.seek(blockAddress); + this.ord = (blockIndex << entry.termsDictBlockShift) - 1; + do { + next(); + } while (this.ord < ord); + } + + private BytesRef getTermFromIndex(long index) throws IOException { + assert index >= 0 && index <= (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + final long start = indexAddresses.get(index); + term.length = (int) (indexAddresses.get(index + 1) - start); + indexBytes.seek(start); + indexBytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekTermsIndex(BytesRef text) throws IOException { + long lo = 0L; + long hi = (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + while (lo <= hi) { + final long mid = (lo + hi) >>> 1; + getTermFromIndex(mid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + lo = mid + 1; + } else { + hi = mid - 1; + } + } + + assert hi < 0 || getTermFromIndex(hi).compareTo(text) <= 0; + assert hi == ((entry.termsDictSize - 1) >>> entry.termsDictIndexShift) || getTermFromIndex(hi + 1).compareTo(text) > 0; + + return hi; + } + + private BytesRef getFirstTermFromBlock(long block) throws IOException { + assert block >= 0 && block <= (entry.termsDictSize - 1) >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(block); + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekBlock(BytesRef text) throws IOException { + long index = seekTermsIndex(text); + if (index == -1L) { + return -1L; + } + + long ordLo = index << entry.termsDictIndexShift; + long ordHi = Math.min(entry.termsDictSize, ordLo + (1L << entry.termsDictIndexShift)) - 1L; + + long blockLo = ordLo >>> entry.termsDictBlockShift; + long blockHi = ordHi >>> entry.termsDictBlockShift; + + while (blockLo <= blockHi) { + final long blockMid = (blockLo + blockHi) >>> 1; + getFirstTermFromBlock(blockMid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + blockLo = blockMid + 1; + } else { + blockHi = blockMid - 1; + } + } + + assert blockHi < 0 || getFirstTermFromBlock(blockHi).compareTo(text) <= 0; + assert blockHi == ((entry.termsDictSize - 1) >>> entry.termsDictBlockShift) + || getFirstTermFromBlock(blockHi + 1).compareTo(text) > 0; + + return blockHi; + } + + @Override + public SeekStatus seekCeil(BytesRef text) throws IOException { + final long block = seekBlock(text); + if (block == -1) { + // before the first term + seekExact(0L); + return SeekStatus.NOT_FOUND; + } + final long blockAddress = blockAddresses.get(block); + this.ord = block << entry.termsDictBlockShift; + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + while (true) { + int cmp = term.compareTo(text); + if (cmp == 0) { + return SeekStatus.FOUND; + } else if (cmp > 0) { + return SeekStatus.NOT_FOUND; + } + if (next() == null) { + return SeekStatus.END; + } + } + } + + @Override + public BytesRef term() throws IOException { + return term; + } + + @Override + public long ord() throws IOException { + return ord; + } + + @Override + public long totalTermFreq() throws IOException { + return -1L; + } + + @Override + public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public ImpactsEnum impacts(int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int docFreq() throws IOException { + throw new UnsupportedOperationException(); + } + } + + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + SortedNumericEntry entry = sortedNumerics.get(field.name); + if (entry.numValues == entry.numDocsWithField) { + return DocValues.singleton(getNumeric(entry)); + } + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + final LongValues values = getNumericValues(entry); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new SortedNumericDocValues() { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextValue() throws IOException { + return values.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new SortedNumericDocValues() { + + boolean set; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + @Override + public long nextValue() throws IOException { + set(); + return values.get(start++); + } + + @Override + public int docValueCount() { + set(); + return count; + } + + private void set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + } + } + }; + } + } + + @Override + public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { + SortedSetEntry entry = sortedSets.get(field.name); + if (entry.singleValueEntry != null) { + return DocValues.singleton(getSorted(entry.singleValueEntry)); + } + + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + final LongValues ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedSetDocValues(entry, data) { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextOrd() throws IOException { + if (start == end) { + return NO_MORE_ORDS; + } + return ords.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedSetDocValues(entry, data) { + + boolean set; + long start; + long end = 0; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + private boolean set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + return true; + } + return false; + } + + @Override + public long nextOrd() throws IOException { + if (set()) { + return ords.get(start++); + } else if (start == end) { + return NO_MORE_ORDS; + } else { + return ords.get(start++); + } + } + + @Override + public int docValueCount() { + set(); + return count; + } + }; + } + } + + @Override + public void checkIntegrity() throws IOException { + CodecUtil.checksumEntireFile(data); + } + + @Override + public DocValuesSkipper getSkipper(FieldInfo field) { + return null; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 6e5205d664f2d..0215e9f7ca4ab 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -6,5 +6,6 @@ # org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene62.Lucene62Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene60.Lucene60Codec diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat index 2d46b4bca3d0c..8d24d86982da8 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat @@ -14,3 +14,4 @@ # limitations under the License. org.elasticsearch.xpack.lucene.bwc.codecs.lucene54.Lucene54DocValuesFormat +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java new file mode 100644 index 0000000000000..ce645feb854d1 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; +import org.apache.lucene.tests.util.TestUtil; +import org.elasticsearch.test.GraalVMThreadsFilter; + +@ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) +public class Lucene70DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { + + private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene70DocValuesFormat()); + + @Override + protected Codec getCodec() { + return codec; + } +} From 8ecb4071056655bb292f501e94b468793e81b5bd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 24 Sep 2024 06:12:02 +0000 Subject: [PATCH 355/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-53d1c2bd2fb --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d4cbf3560831b..8d99daa892419 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@
    - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 147eb47957a6a2c5070fd845c41d83a897aecbc1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 24 Sep 2024 06:12:03 +0000 Subject: [PATCH 356/417] [Automated] Update Lucene snapshot to 9.12.0-snapshot-11c4f071a7a --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9681431e6b210..a04d425cee4f6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 1e62951ab5d643a5f14d01bfdc0142d7d946c26a Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 24 Sep 2024 15:51:27 +0200 Subject: [PATCH 357/417] Remove leftover TODO --- .../elasticsearch/index/analysis/PreBuiltAnalyzerTests.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 1af059823f4bd..0a7bd495f2f22 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -81,8 +81,7 @@ public void testThatAnalyzersAreUsedInMapping() throws IOException { PreBuiltAnalyzers randomPreBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt]; String analyzerName = randomPreBuiltAnalyzer.name().toLowerCase(Locale.ROOT); - // TODO Lucene 10 upgrade, after removal of old IndexVersions, return to "IndexVersionUtils.randomVersion(random())" - IndexVersion randomVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); + IndexVersion randomVersion = IndexVersionUtils.randomVersion(random()); Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, randomVersion).build(); NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider( From c7c24b0a271b51a05a418cd952afe4b317cc82f3 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Tue, 24 Sep 2024 15:08:48 +0100 Subject: [PATCH 358/417] Remove TODO in RegexpFlag --- .../main/java/org/elasticsearch/index/query/RegexpFlag.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index 542d00a9ae2b7..932f807d55d44 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -39,9 +39,8 @@ public enum RegexpFlag { /** * Enables complement expression of the form: {@code ~<expression>} - * TODO Lucene 10 upgrade: Lucenes RegExp class has removed the COMPLEMENT flag in https://issues.apache.org/jira/browse/LUCENE-10010 - * We can use the deprecated constant for now but need a plan for deprecation / removal - * at some point before removal of this functionality in Lucene + * We use the deprecated support in Lucene 10. Will be removed in Lucene 11 + * https://github.com/elastic/elasticsearch/issues/113465 */ COMPLEMENT(DEPRECATED_COMPLEMENT), From eb06cecccf34b9cdeb04845500eee03f173a39c4 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 24 Sep 2024 16:16:48 +0200 Subject: [PATCH 359/417] Add UpdateForV10 annotation --- .../src/main/java/org/elasticsearch/index/query/RegexpFlag.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index 932f807d55d44..19be37e3d21be 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.UpdateForV10; import java.util.Locale; @@ -42,6 +43,7 @@ public enum RegexpFlag { * We use the deprecated support in Lucene 10. Will be removed in Lucene 11 * https://github.com/elastic/elasticsearch/issues/113465 */ + @UpdateForV10 COMPLEMENT(DEPRECATED_COMPLEMENT), /** From 7a8cab6b584fc4ee6c680cc8b906ab8637f304d7 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 24 Sep 2024 21:43:11 +0200 Subject: [PATCH 360/417] Revert needless jdk change in legacy file --- .ci/matrix-runtime-javas-fips.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ci/matrix-runtime-javas-fips.yml b/.ci/matrix-runtime-javas-fips.yml index 624eb6abf222e..e9ace78b35823 100644 --- a/.ci/matrix-runtime-javas-fips.yml +++ b/.ci/matrix-runtime-javas-fips.yml @@ -2,4 +2,4 @@ # values below are included as an axis of the matrix. ES_RUNTIME_JAVA: - - openjdk21 + - openjdk17 From 5490a47b47f6087c3b01d397052b163065f23a65 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 25 Sep 2024 06:14:31 +0000 Subject: [PATCH 361/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-53d1c2bd2fb --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 8d99daa892419..e0d62641f01d2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From b295803acfd8de90f5520691a181708fc85a062d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 25 Sep 2024 15:37:32 +0200 Subject: [PATCH 362/417] Fix compile issues after last merge with main --- .../java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 223ee81e84a92..5233a0cd564ef 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -685,7 +685,7 @@ public void testDateMathIndexes() throws ExecutionException, InterruptedExceptio assertNotNull(localClusterSearchInfo); Cluster remoteClusterSearchInfo = clusters.getCluster(REMOTE_CLUSTER); assertNotNull(remoteClusterSearchInfo); - assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value, greaterThan(2L)); + assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value(), greaterThan(2L)); for (var hit : response.getHits()) { assertThat(hit.getIndex(), anyOf(equalTo("datemath-2001-01-01-14"), equalTo("remotemath-2001-01-01-14"))); } From 518fb08600077e85fffdb93d30399d4ead4355b0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 26 Sep 2024 06:14:44 +0000 Subject: [PATCH 363/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-ff57fa7b423 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index bfbb3fd4e9b97..294ffb6e0938a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-53d1c2bd2fb +lucene = 10.0.0-snapshot-ff57fa7b423 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e0d62641f01d2..475152a5d0dfb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@
    - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 52de43ffd312b1a12f16fbc96aacfc3e1e7c7546 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 27 Sep 2024 06:12:47 +0000 Subject: [PATCH 364/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-7b4b0238d70 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 294ffb6e0938a..03f3ddf8e9125 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-ff57fa7b423 +lucene = 10.0.0-snapshot-7b4b0238d70 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 475152a5d0dfb..a16763a4247fd 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 731219a84b14147e63545fffb30a367f45ddc35e Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 27 Sep 2024 13:56:13 +0200 Subject: [PATCH 365/417] update profile tests We run in a mixed clusters, replacing the profile description isn't enough, we need to skip the test against previous versions because we get a different output depending on the version of the node that has the shard. I isolated the offending assertion to a separate test, so that we don't need to skip the entire profile test. --- rest-api-spec/build.gradle | 2 - .../rest-api-spec/test/search/370_profile.yml | 45 +++++++++++++++++-- 2 files changed, 42 insertions(+), 5 deletions(-) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index c205fe60d3bc9..646026c68e29a 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -55,8 +55,6 @@ tasks.named("precommit").configure { } tasks.named("yamlRestCompatTestTransform").configure ({ task -> - task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") - task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") task.skipTestsByFilePattern("indices.create/synthetic_source*.yml", "@UpdateForV9 -> tests do not pass after bumping API version to 9 [ES-9597]") diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 8a03833cb003a..075545c01eef7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -212,7 +212,6 @@ dfs knn vector profiling: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 } @@ -235,6 +234,48 @@ dfs knn vector profiling: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } +--- +dfs knn vector profiling description: + - requires: + cluster_features: "gte_v9.0.0" + reason: the profile description changed with the lucene 10 upgrade + + - do: + indices.create: + index: images + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + image: + type: "dense_vector" + dims: 3 + index: true + similarity: "l2_norm" + + - do: + index: + index: images + id: "1" + refresh: true + body: + image: [1, 5, -20] + + - do: + search: + index: images + body: + profile: true + knn: + field: "image" + query_vector: [-5, 9, -12] + k: 1 + num_candidates: 100 + + - match: { hits.total.value: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } + --- dfs knn vector profiling with vector_operations_count: - requires: @@ -276,7 +317,6 @@ dfs knn vector profiling with vector_operations_count: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } - match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } @@ -300,7 +340,6 @@ dfs knn vector profiling with vector_operations_count: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } - --- dfs profile for search with dfs_query_then_fetch: - requires: From c0646368f782decdd9d1109c25e12b36746c6e1a Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 27 Sep 2024 15:35:54 +0200 Subject: [PATCH 366/417] add lucene 10 upgrade node feature and fix profile yaml test --- .../resources/rest-api-spec/test/search/370_profile.yml | 5 ++--- .../main/java/org/elasticsearch/search/SearchFeatures.java | 5 ++++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 075545c01eef7..81ca84a06f815 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -237,9 +237,8 @@ dfs knn vector profiling: --- dfs knn vector profiling description: - requires: - cluster_features: "gte_v9.0.0" - reason: the profile description changed with the lucene 10 upgrade - + cluster_features: ["lucene_10_upgrade"] + reason: "the profile description changed with Lucene 10" - do: indices.create: index: images diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 6a89d66bb3411..beac39c2de304 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -16,8 +16,11 @@ import java.util.Set; public final class SearchFeatures implements FeatureSpecification { + + public static final NodeFeature LUCENE_10_0_0_UPGRADE = new NodeFeature("lucene_10_upgrade"); + @Override public Set getFeatures() { - return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED); + return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED, LUCENE_10_0_0_UPGRADE); } } From 2b1516359e275a1cf3162d68c2a17c4dbe83c63f Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 27 Sep 2024 16:18:16 +0200 Subject: [PATCH 367/417] restore replaceValueInMatch for profile description tests from 8.x --- rest-api-spec/build.gradle | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 646026c68e29a..c205fe60d3bc9 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -55,6 +55,8 @@ tasks.named("precommit").configure { } tasks.named("yamlRestCompatTestTransform").configure ({ task -> + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") task.skipTestsByFilePattern("indices.create/synthetic_source*.yml", "@UpdateForV9 -> tests do not pass after bumping API version to 9 [ES-9597]") From d912d7b807a71c798d0c6af3034ff13bf8ec2d26 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 28 Sep 2024 06:12:47 +0000 Subject: [PATCH 368/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-7b4b0238d70 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a16763a4247fd..fca071f26b964 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From f93db4961957672dd993bd9cbdc766385fb8860a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 29 Sep 2024 06:12:00 +0000 Subject: [PATCH 369/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-0a8604d908c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 03f3ddf8e9125..467986cc7f7a6 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-7b4b0238d70 +lucene = 10.0.0-snapshot-0a8604d908c bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fca071f26b964..cc7e8363224fa 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 4a79e5100d7faa5e757df6ff5d397f5a4f963855 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Sun, 29 Sep 2024 11:26:36 +0200 Subject: [PATCH 370/417] Revert "[Automated] Update Lucene snapshot to 10.0.0-snapshot-0a8604d908c" This reverts commit f93db4961957672dd993bd9cbdc766385fb8860a. --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 467986cc7f7a6..03f3ddf8e9125 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-0a8604d908c +lucene = 10.0.0-snapshot-7b4b0238d70 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cc7e8363224fa..fca071f26b964 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 1dc8b4ce15a05251e741738cf4b609be42589ed9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 30 Sep 2024 06:13:23 +0000 Subject: [PATCH 371/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-22ac47c07ad --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 03f3ddf8e9125..f17785ae4b464 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-7b4b0238d70 +lucene = 10.0.0-snapshot-22ac47c07ad bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fca071f26b964..47272f8c109fa 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 63e524d6014cd3c43ebc4f344c25d2bbec5df68d Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 30 Sep 2024 17:18:44 +0200 Subject: [PATCH 372/417] Address compile errors after vector api changes upstream (#113766) Our lucene_snapshot branch requires updating after https://github.com/apache/lucene/pull/13779 --- .../vector/VectorScorerBenchmark.java | 10 +- .../simdvec/VectorScorerFactory.java | 10 +- .../simdvec/VectorScorerFactoryImpl.java | 6 +- .../simdvec/VectorScorerFactoryImpl.java | 6 +- .../simdvec/internal/Int7SQVectorScorer.java | 8 +- .../internal/Int7SQVectorScorerSupplier.java | 24 +--- .../simdvec/internal/Int7SQVectorScorer.java | 21 +--- .../simdvec/VectorScorerFactoryTests.java | 7 +- .../diskusage/IndexDiskUsageAnalyzer.java | 19 +-- .../ES814ScalarQuantizedVectorsFormat.java | 15 ++- .../vectors/ES815BitFlatVectorsFormat.java | 48 ++++--- .../DenormalizedCosineFloatVectorValues.java | 29 ++--- .../vectors/DenseVectorFieldMapper.java | 14 ++- .../ByteKnnDenseVectorDocValuesField.java | 13 +- .../vectors/KnnDenseVectorDocValuesField.java | 13 +- .../internal/ExitableDirectoryReader.java | 119 +++++++++++------- .../BaseKnnBitVectorsFormatTestCase.java | 6 +- ...HnswScalarQuantizedVectorsFormatTests.java | 17 +-- ...ormalizedCosineFloatVectorValuesTests.java | 8 +- .../KnnDenseVectorScriptDocValuesTests.java | 104 ++++++++++----- .../search/SearchCancellationTests.java | 7 +- .../accesscontrol/FieldSubsetReaderTests.java | 11 +- 22 files changed, 285 insertions(+), 230 deletions(-) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java index 569e8909e1e12..b294fe97c7e7c 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java @@ -19,7 +19,7 @@ import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.core.IOUtils; @@ -217,19 +217,17 @@ public float squareDistanceScalar() { return 1 / (1f + adjustedDistance); } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } - RandomVectorScorer luceneScorer(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) - throws IOException { + RandomVectorScorer luceneScorer(QuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorer(sim, values, queryVec); } diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java index e2aea6b3ebd9f..4ed60b2f5e8b2 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java @@ -13,7 +13,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -39,7 +39,7 @@ static Optional instance() { Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ); @@ -52,9 +52,5 @@ Optional getInt7SQVectorScorerSupplier( * @param queryVector the query vector * @return an optional containing the vector scorer, or empty */ - Optional getInt7SQVectorScorer( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ); + Optional getInt7SQVectorScorer(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector); } diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a22d787980252..6248902c32e7a 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -13,7 +13,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -25,7 +25,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory { public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { throw new UnsupportedOperationException("should not reach here"); @@ -34,7 +34,7 @@ public Optional getInt7SQVectorScorerSupplier( @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { throw new UnsupportedOperationException("should not reach here"); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a65fe582087d9..a863d9e3448ca 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.simdvec.internal.Int7SQVectorScorer; import org.elasticsearch.simdvec.internal.Int7SQVectorScorerSupplier.DotProductSupplier; @@ -38,7 +38,7 @@ private VectorScorerFactoryImpl() {} public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { input = FilterIndexInput.unwrapOnlyTest(input); @@ -57,7 +57,7 @@ public Optional getInt7SQVectorScorerSupplier( @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { return Int7SQVectorScorer.create(sim, values, queryVector); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index 0b41436ce2242..e02df124ad0f0 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -11,18 +11,14 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; public final class Int7SQVectorScorer { // Unconditionally returns an empty optional on <= JDK 21, since the scorer is only supported on JDK 22+ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { return Optional.empty(); } diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java index f6d874cd3e728..198e10406056e 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java @@ -12,7 +12,7 @@ import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; import java.io.IOException; @@ -31,12 +31,12 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS final int maxOrd; final float scoreCorrectionConstant; final MemorySegmentAccessInput input; - final RandomAccessQuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds + final QuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds final ScalarQuantizedVectorSimilarity fallbackScorer; protected Int7SQVectorScorerSupplier( MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant, ScalarQuantizedVectorSimilarity fallbackScorer ) { @@ -104,11 +104,7 @@ public float score(int node) throws IOException { public static final class EuclideanSupplier extends Int7SQVectorScorerSupplier { - public EuclideanSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public EuclideanSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(EUCLIDEAN, scoreCorrectionConstant, BITS)); } @@ -127,11 +123,7 @@ public EuclideanSupplier copy() { public static final class DotProductSupplier extends Int7SQVectorScorerSupplier { - public DotProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public DotProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(DOT_PRODUCT, scoreCorrectionConstant, BITS)); } @@ -151,11 +143,7 @@ public DotProductSupplier copy() { public static final class MaxInnerProductSupplier extends Int7SQVectorScorerSupplier { - public MaxInnerProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public MaxInnerProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(MAXIMUM_INNER_PRODUCT, scoreCorrectionConstant, BITS)); } diff --git a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index c9659ea1af9a8..3d0e1e71a3744 100644 --- a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -35,11 +35,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr byte[] scratch; /** Return an optional whose value, if present, is the scorer. Otherwise, an empty optional is returned. */ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { checkDimensions(queryVector.length, values.dimension()); var input = values.getSlice(); if (input == null) { @@ -63,12 +59,7 @@ public static Optional create( }; } - Int7SQVectorScorer( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - byte[] queryVector, - float queryCorrection - ) { + Int7SQVectorScorer(MemorySegmentAccessInput input, QuantizedByteVectorValues values, byte[] queryVector, float queryCorrection) { super(values); this.input = input; assert queryVector.length == values.getVectorByteLength(); @@ -105,7 +96,7 @@ final void checkOrdinal(int ord) { } public static final class DotProductScorer extends Int7SQVectorScorer { - public DotProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public DotProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -122,7 +113,7 @@ public float score(int node) throws IOException { } public static final class EuclideanScorer extends Int7SQVectorScorer { - public EuclideanScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public EuclideanScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -136,7 +127,7 @@ public float score(int node) throws IOException { } public static final class MaxInnerProductScorer extends Int7SQVectorScorer { - public MaxInnerProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float corr) { + public MaxInnerProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float corr) { super(in, values, query, corr); } diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java index db57dc936e794..0f967127f6f2c 100644 --- a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java @@ -21,7 +21,7 @@ import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -431,14 +431,13 @@ public Optional call() { } } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index 5acbbb5536560..e668624440351 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PointValues; @@ -544,13 +545,14 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I if (field.getVectorDimension() > 0) { switch (field.getVectorEncoding()) { case BYTE -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file ByteVectorValues vectorValues = vectorReader.getByteVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -558,22 +560,23 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } case FLOAT32 -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file FloatVectorValues vectorValues = vectorReader.getFloatVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -581,11 +584,11 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index 4313aa40cf13e..e78fc22f3215f 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -22,18 +22,17 @@ import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.QuantizedVectorsReader; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.simdvec.VectorScorerFactory; import org.elasticsearch.simdvec.VectorSimilarityType; @@ -243,9 +242,9 @@ public String toString() { } @Override - public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, RandomAccessVectorValues values) + public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, KnnVectorValues values) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorerSupplier(sim, values); @@ -253,7 +252,7 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarity if (factory != null) { var scorer = factory.getInt7SQVectorScorerSupplier( VectorSimilarityType.of(sim), - values.getSlice(), + qValues.getSlice(), qValues, qValues.getScalarQuantizer().getConstantMultiplier() ); @@ -266,9 +265,9 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarity } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, float[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, float[] query) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorer(sim, values, query); @@ -284,7 +283,7 @@ public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, Ra } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, byte[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, byte[] query) throws IOException { return delegate.getRandomVectorScorer(sim, values, query); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java index f1ae4e3fdeded..29e179dfc7c5d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java @@ -14,13 +14,14 @@ import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; import org.apache.lucene.codecs.lucene99.Lucene99FlatVectorsFormat; +import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorSimilarityFunction; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.elasticsearch.script.field.vectors.ESVectorUtil; import java.io.IOException; @@ -61,14 +62,14 @@ public String toString() { @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues + KnnVectorValues vectorValues ) throws IOException { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - assert randomAccessVectorValues instanceof RandomAccessQuantizedByteVectorValues == false; + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + assert byteVectorValues instanceof QuantizedByteVectorValues == false; return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(randomAccessVectorValuesBytes); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(byteVectorValues); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -77,18 +78,15 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - byte[] bytes - ) { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + KnnVectorValues vectorValues, + byte[] target + ) throws IOException { + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - checkDimensions(bytes.length, randomAccessVectorValuesBytes.dimension()); + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + checkDimensions(target.length, byteVectorValues.dimension()); return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer( - randomAccessVectorValuesBytes, - bytes - ); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer(byteVectorValues, target); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -96,10 +94,10 @@ public RandomVectorScorer getRandomVectorScorer( @Override public RandomVectorScorer getRandomVectorScorer( - VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - float[] floats - ) { + VectorSimilarityFunction similarityFunction, + KnnVectorValues vectorValues, + float[] target + ) throws IOException { throw new IllegalArgumentException("Unsupported vector type"); } } @@ -110,9 +108,9 @@ static float hammingScore(byte[] a, byte[] b) { static class HammingVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final byte[] query; - private final RandomAccessVectorValues.Bytes byteValues; + private final ByteVectorValues byteValues; - HammingVectorScorer(RandomAccessVectorValues.Bytes byteValues, byte[] query) { + HammingVectorScorer(ByteVectorValues byteValues, byte[] query) { super(byteValues); this.query = query; this.byteValues = byteValues; @@ -125,9 +123,9 @@ public float score(int i) throws IOException { } static class HammingScorerSupplier implements RandomVectorScorerSupplier { - private final RandomAccessVectorValues.Bytes byteValues, byteValues1, byteValues2; + private final ByteVectorValues byteValues, byteValues1, byteValues2; - HammingScorerSupplier(RandomAccessVectorValues.Bytes byteValues) throws IOException { + HammingScorerSupplier(ByteVectorValues byteValues) throws IOException { this.byteValues = byteValues; this.byteValues1 = byteValues.copy(); this.byteValues2 = byteValues.copy(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java index e8da3b72ae7c7..04069333deb13 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java @@ -45,24 +45,13 @@ public int size() { } @Override - public float[] vectorValue() throws IOException { - // Lazy load vectors as we may iterate but not actually require the vector - return vectorValue(in.docID()); + public DocIndexIterator iterator() { + return in.iterator(); } @Override - public int docID() { - return in.docID(); - } - - @Override - public int nextDoc() throws IOException { - return in.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - return in.advance(target); + public FloatVectorValues copy() throws IOException { + return in.copy(); } @Override @@ -74,22 +63,24 @@ public float magnitude() { return magnitude; } - private float[] vectorValue(int docId) throws IOException { + @Override + public float[] vectorValue(int ord) throws IOException { + int docId = ordToDoc(ord); if (docId != this.docId) { this.docId = docId; hasMagnitude = decodedMagnitude(docId); // We should only copy and transform if we have a stored a non-unit length magnitude if (hasMagnitude) { - System.arraycopy(in.vectorValue(), 0, vector, 0, dimension()); + System.arraycopy(in.vectorValue(ord), 0, vector, 0, dimension()); for (int i = 0; i < vector.length; i++) { vector[i] *= magnitude; } return vector; } else { - return in.vectorValue(); + return in.vectorValue(ord); } } else { - return hasMagnitude ? vector : in.vectorValue(); + return hasMagnitude ? vector : in.vectorValue(ord); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 4adfe619ca4e1..a48af90d539e6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReadState; @@ -2167,6 +2168,7 @@ private class IndexedSyntheticFieldLoader extends SourceLoader.DocValuesBasedSyn private ByteVectorValues byteVectorValues; private boolean hasValue; private boolean hasMagnitude; + private int ord; private final IndexVersion indexCreatedVersion; private final VectorSimilarity vectorSimilarity; @@ -2184,16 +2186,20 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) { magnitudeReader = leafReader.getNumericDocValues(fullPath() + COSINE_MAGNITUDE_FIELD_SUFFIX); } + KnnVectorValues.DocIndexIterator iterator = values.iterator(); return docId -> { - hasValue = docId == values.advance(docId); + hasValue = docId == iterator.advance(docId); hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); + ord = iterator.index(); return hasValue; }; } byteVectorValues = leafReader.getByteVectorValues(fullPath()); if (byteVectorValues != null) { + KnnVectorValues.DocIndexIterator iterator = byteVectorValues.iterator(); return docId -> { - hasValue = docId == byteVectorValues.advance(docId); + hasValue = docId == iterator.advance(docId); + ord = iterator.index(); return hasValue; }; } @@ -2216,7 +2222,7 @@ public void write(XContentBuilder b) throws IOException { } b.startArray(leafName()); if (values != null) { - for (float v : values.vectorValue()) { + for (float v : values.vectorValue(ord)) { if (hasMagnitude) { b.value(v * magnitude); } else { @@ -2224,7 +2230,7 @@ public void write(XContentBuilder b) throws IOException { } } } else if (byteVectorValues != null) { - byte[] vectorValue = byteVectorValues.vectorValue(); + byte[] vectorValue = byteVectorValues.vectorValue(ord); for (byte value : vectorValue) { b.value(value); } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java index fd7c5227e22ac..be1b972dcd41a 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.index.mapper.vectors.DenseVectorScriptDocValues; @@ -19,7 +20,8 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class ByteKnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected ByteVectorValues input; // null if no vectors + protected final ByteVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; // null if no vectors protected byte[] vector; protected final int dims; @@ -31,6 +33,7 @@ protected ByteKnnDenseVectorDocValuesField(@Nullable ByteVectorValues input, Str super(name, elementType); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -38,15 +41,15 @@ public void setNextDocId(int docId) throws IOException { if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java index c7678b03dd8c5..3e38092200511 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenormalizedCosineFloatVectorValues; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; @@ -20,7 +21,8 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected FloatVectorValues input; // null if no vectors + protected final FloatVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; protected float[] vector; protected final int dims; @@ -28,6 +30,7 @@ public KnnDenseVectorDocValuesField(@Nullable FloatVectorValues input, String na super(name, ElementType.FLOAT); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -35,15 +38,15 @@ public void setNextDocId(int docId) throws IOException { if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java index ad0979cb3a481..64b54d3623f04 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.QueryTimeout; @@ -459,7 +460,6 @@ public void grow(int count) { } private static class ExitableByteVectorValues extends ByteVectorValues { - private int calls; private final QueryCancellation queryCancellation; private final ByteVectorValues in; @@ -479,8 +479,13 @@ public int size() { } @Override - public byte[] vectorValue() throws IOException { - return in.vectorValue(); + public byte[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); + } + + @Override + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -505,33 +510,17 @@ public DocIdSetIterator iterator() { } @Override - public int docID() { - return in.docID(); + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); } @Override - public int nextDoc() throws IOException { - final int nextDoc = in.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; - } - - @Override - public int advance(int target) throws IOException { - final int advance = in.advance(target); - checkAndThrowWithSampling(); - return advance; - } - - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + public ByteVectorValues copy() throws IOException { + return in.copy(); } } private static class ExitableFloatVectorValues extends FilterFloatVectorValues { - private int calls; private final QueryCancellation queryCancellation; ExitableFloatVectorValues(FloatVectorValues vectorValues, QueryCancellation queryCancellation) { @@ -541,17 +530,13 @@ private static class ExitableFloatVectorValues extends FilterFloatVectorValues { } @Override - public int advance(int target) throws IOException { - final int advance = super.advance(target); - checkAndThrowWithSampling(); - return advance; + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); } @Override - public int nextDoc() throws IOException { - final int nextDoc = super.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -575,13 +560,61 @@ public DocIdSetIterator iterator() { }; } - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + @Override + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); + } + + @Override + public FloatVectorValues copy() throws IOException { + return in.copy(); } } + private static KnnVectorValues.DocIndexIterator createExitableIterator( + KnnVectorValues.DocIndexIterator delegate, + QueryCancellation queryCancellation + ) { + return new KnnVectorValues.DocIndexIterator() { + private int calls; + + @Override + public int index() { + return delegate.index(); + } + + @Override + public int docID() { + return delegate.docID(); + } + + @Override + public long cost() { + return delegate.cost(); + } + + @Override + public int nextDoc() throws IOException { + int nextDoc = delegate.nextDoc(); + checkAndThrowWithSampling(); + return nextDoc; + } + + @Override + public int advance(int target) throws IOException { + final int advance = delegate.advance(target); + checkAndThrowWithSampling(); + return advance; + } + + private void checkAndThrowWithSampling() { + if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { + queryCancellation.checkCancelled(); + } + } + }; + } + private static class ExitableDocSetIterator extends DocIdSetIterator { private int calls; private final DocIdSetIterator in; @@ -636,18 +669,18 @@ protected FilterFloatVectorValues(FloatVectorValues in) { } @Override - public int docID() { - return in.docID(); + public DocIndexIterator iterator() { + return in.iterator(); } @Override - public int nextDoc() throws IOException { - return in.nextDoc(); + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); } @Override - public int advance(int target) throws IOException { - return in.advance(target); + public FloatVectorValues copy() throws IOException { + return in.copy(); } @Override @@ -660,9 +693,5 @@ public int size() { return in.size(); } - @Override - public float[] vectorValue() throws IOException { - return in.vectorValue(); - } } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java index 8f0a306e1eb3b..86b60d9984de5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java @@ -19,6 +19,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; @@ -110,8 +111,9 @@ public void testRandom() throws Exception { totalSize += vectorValues.size(); StoredFields storedFields = ctx.reader().storedFields(); int docId; - while ((docId = vectorValues.nextDoc()) != NO_MORE_DOCS) { - byte[] v = vectorValues.vectorValue(); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + while ((docId = iterator.nextDoc()) != NO_MORE_DOCS) { + byte[] v = vectorValues.vectorValue(iterator.index()); assertEquals(dimension, v.length); String idString = storedFields.document(docId).getField("id").stringValue(); int id = Integer.parseInt(idString); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java index cee60efb57327..f89b481a13fd8 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java @@ -19,6 +19,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.VectorSimilarityFunction; @@ -68,9 +69,10 @@ public void testAddIndexesDirectory0FS() throws Exception { try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); - assertEquals(0, vectorValues.vectorValue()[0], 0); - assertEquals(NO_MORE_DOCS, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertEquals(0, vectorValues.vectorValue(iterator.index())[0], 0); + assertEquals(NO_MORE_DOCS, iterator.nextDoc()); } } } @@ -110,12 +112,13 @@ private void testAddIndexesDirectory01FS(VectorSimilarityFunction similarityFunc try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); // The merge order is randomized, we might get 1 first, or 2 - float value = vectorValues.vectorValue()[0]; + float value = vectorValues.vectorValue(iterator.index())[0]; assertTrue(value == 1 || value == 2); - assertEquals(1, vectorValues.nextDoc()); - value += vectorValues.vectorValue()[0]; + assertEquals(1, iterator.nextDoc()); + value += vectorValues.vectorValue(iterator.index())[0]; assertEquals(3f, value, 0); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java index b2ffb779be00b..de4ab0bc5df30 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper.vectors; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.test.ESTestCase; @@ -25,7 +26,7 @@ public void testEmptyVectors() throws IOException { wrap(new float[0][0]), wrapMagnitudes(new float[0]) ); - assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.nextDoc()); + assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.iterator().nextDoc()); } public void testRandomVectors() throws IOException { @@ -47,9 +48,10 @@ public void testRandomVectors() throws IOException { wrapMagnitudes(magnitudes) ); + KnnVectorValues.DocIndexIterator iterator = normalizedCosineFloatVectorValues.iterator(); for (int i = 0; i < numVectors; i++) { - assertEquals(i, normalizedCosineFloatVectorValues.advance(i)); - assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(), (float) 1e-6); + assertEquals(i, iterator.advance(i)); + assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(iterator.index()), (float) 1e-6); assertEquals(magnitudes[i], normalizedCosineFloatVectorValues.magnitude(), (float) 1e-6); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java index c007156c806eb..baade683a90fd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java @@ -208,7 +208,41 @@ public int size() { } @Override - public byte[] vectorValue() { + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; + } + + @Override + public byte[] vectorValue(int ord) { + assert ord == index; for (int i = 0; i < byteVector.length; i++) { byteVector[i] = (byte) vectors[index][i]; } @@ -216,25 +250,12 @@ public byte[] vectorValue() { } @Override - public int docID() { - return index; - } - - @Override - public int nextDoc() { + public ByteVectorValues copy() { throw new UnsupportedOperationException(); } @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; - } - - @Override - public VectorScorer scorer(byte[] floats) throws IOException { + public VectorScorer scorer(byte[] floats) { throw new UnsupportedOperationException(); } }; @@ -256,30 +277,51 @@ public int size() { } @Override - public float[] vectorValue() { - return vectors[index]; - } - - @Override - public int docID() { - return index; + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() throws IOException { + return advance(index + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; } @Override - public int nextDoc() { - return advance(index + 1); + public float[] vectorValue(int ord) { + assert ord == index; + return vectors[index]; } @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; + public FloatVectorValues copy() { + throw new UnsupportedOperationException(); } @Override - public VectorScorer scorer(float[] floats) throws IOException { + public VectorScorer scorer(float[] floats) { throw new UnsupportedOperationException(); } }; diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index fff5dcb4bb80b..f3357a72c9243 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.PointValues; @@ -205,15 +206,17 @@ public void testExitableDirectoryReaderVectors() throws IOException { cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues vectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); cancelled.set(true); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); // On the first doc when already canceled, it throws - expectThrows(TaskCancelledException.class, vectorValues::nextDoc); + expectThrows(TaskCancelledException.class, iterator::nextDoc); cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues uncancelledVectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); + uncancelledVectorValues.iterator(); cancelled.set(true); searcher.removeQueryCancellation(cancellation); // On the first doc when already canceled, it throws, but with the cancellation removed, it should not - uncancelledVectorValues.nextDoc(); + iterator.nextDoc(); } private static class PointValuesIntersectVisitor implements PointValues.IntersectVisitor { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java index dbabc891cec6e..db250b16eab16 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; @@ -205,8 +206,9 @@ public void testKnnVectors() throws Exception { FloatVectorValues vectorValues = leafReader.getFloatVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); @@ -239,8 +241,9 @@ public void testKnnByteVectors() throws Exception { ByteVectorValues vectorValues = leafReader.getByteVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); From 2471dc9c84933b4f1957a08e8a4d73387b9d43cf Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 30 Sep 2024 17:19:05 +0200 Subject: [PATCH 373/417] Update lucene snapshot buildkite config to build from branch_10_0 (#113797) The Lucene 10 branch has been cut, we want to switch the snapshot creation to the newly created branch until Lucene 10 has been released. Later, we will switch to branch_10x. --- .buildkite/pipelines/lucene-snapshot/build-snapshot.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml b/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml index f1813ff6003cb..bb0e82df98ede 100644 --- a/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml +++ b/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml @@ -4,7 +4,7 @@ steps: key: lucene-build if: (build.env("LUCENE_BUILD_ID") == null || build.env("LUCENE_BUILD_ID") == "") build: - branch: main + branch: branch_10_0 - wait - label: Upload and update lucene snapshot command: .buildkite/scripts/lucene-snapshot/upload-snapshot.sh From 79d3d6ab60eb7350e767a9d9ff0da754bd6a35cd Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 30 Sep 2024 20:40:06 +0200 Subject: [PATCH 374/417] Make dutch_kp and lovins no op token filters --- .../common/StemmerTokenFilterFactory.java | 43 +++++++++++-------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 559bc9b3ff3da..6b622444980d3 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -9,6 +9,7 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicStemFilter; import org.apache.lucene.analysis.bg.BulgarianStemFilter; @@ -86,27 +87,13 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { private static final TokenStream EMPTY_TOKEN_STREAM = new EmptyTokenStream(); - private String language; + private final String language; StemmerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { super(name, settings); this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter"))); // check that we have a valid language by trying to create a TokenStream create(EMPTY_TOKEN_STREAM).close(); - if ("lovins".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "lovins_deprecation", - "The [lovins] stemmer is deprecated and will be removed in a future version." - ); - } - if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "dutch_kp_deprecation", - "The [dutch_kp] stemmer is deprecated and will be removed in a future version." - ); - } } @Override @@ -134,8 +121,17 @@ public TokenStream create(TokenStream tokenStream) { } else if ("dutch".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new DutchStemmer()); } else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - // TODO Lucene 10 upgrade: KPStemmer has been removed, what is the migration path for users relying on it? - throw new UnsupportedOperationException(); + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "dutch_kp_deprecation", + "The [dutch_kp] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; // English stemmers } else if ("english".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); @@ -144,8 +140,17 @@ public TokenStream create(TokenStream tokenStream) { || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); } else if ("lovins".equalsIgnoreCase(language)) { - // TODO Lucene 10 upgrade: LovinsStemmer has been removed, what is the migration path for users relying on it? - throw new UnsupportedOperationException(); + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "lovins_deprecation", + "The [lovins] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); } else if ("porter2".equalsIgnoreCase(language)) { From a20bf849cf5a0e50e7d975ce52a8236cee13f662 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 1 Oct 2024 06:21:46 +0200 Subject: [PATCH 375/417] Fix needless use of concurrent collector managers (#113739) The utility methods for creating the collectors that were removed from Lucene created non-concurrent collector managers. The default constructors create concurrent ones though which leads to a performance regression, especially when sorting. -> fixed by adjusting to non-concurrent managers again --- .../join/query/ParentChildInnerHitContextBuilder.java | 4 ++-- .../org/elasticsearch/index/query/NestedQueryBuilder.java | 4 ++-- .../bucket/sampler/BestDocsDeferringCollector.java | 2 +- .../search/aggregations/metrics/TopHitsAggregator.java | 5 +++-- .../compute/lucene/LuceneTopNSourceOperator.java | 2 +- 5 files changed, 9 insertions(+), 8 deletions(-) diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index aaedda7f0e539..6b00e94431bef 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -137,12 +137,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, Integer.MAX_VALUE).newCollector(); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = new TopScoreDocCollectorManager(topN, Integer.MAX_VALUE).newCollector(); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } for (LeafReaderContext ctx : this.context.searcher().getIndexReader().leaves()) { diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index f1544d49c2ba5..626875c75a5fe 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -443,12 +443,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, Integer.MAX_VALUE).newCollector(); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = new TopScoreDocCollectorManager(topN, Integer.MAX_VALUE).newCollector(); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 0f86b802d857f..37cee75c11b48 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -99,7 +99,7 @@ public void collect(int doc, long bucket) throws IOException { // Designed to be overridden by subclasses that may score docs by criteria // other than Lucene score protected TopDocsCollector createTopDocsCollector(int size) throws IOException { - return new TopScoreDocCollectorManager(size, Integer.MAX_VALUE).newCollector(); + return new TopScoreDocCollectorManager(size, null, Integer.MAX_VALUE, false).newCollector(); } // Can be overridden by subclasses that have a different priority queue implementation diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index d80b08042671f..87d8f839dfca1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -138,13 +138,14 @@ public void collect(int docId, long bucket) throws IOException { // but here we create collectors ourselves and we need prevent OOM because of crazy an offset and size. topN = Math.min(topN, subSearchContext.searcher().getIndexReader().maxDoc()); if (sort == null) { - TopScoreDocCollector topScoreDocCollector = new TopScoreDocCollectorManager(topN, Integer.MAX_VALUE).newCollector(); + TopScoreDocCollector topScoreDocCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false) + .newCollector(); collectors = new Collectors(topScoreDocCollector, null); } else { // TODO: can we pass trackTotalHits=subSearchContext.trackTotalHits(){ // Note that this would require to catch CollectionTerminatedException collectors = new Collectors( - new TopFieldCollectorManager(sort.sort, topN, Integer.MAX_VALUE).newCollector(), + new TopFieldCollectorManager(sort.sort, topN, null, Integer.MAX_VALUE, false).newCollector(), subSearchContext.trackScores() ? new MaxScoreCollector() : null ); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index c9990bd5f9994..0f600958b93b3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -233,7 +233,7 @@ static final class PerShardCollector { } // We don't use CollectorManager here as we don't retrieve the total hits and sort by score. - this.topFieldCollector = new TopFieldCollectorManager(sortAndFormats.get().sort, limit, 0).newCollector(); + this.topFieldCollector = new TopFieldCollectorManager(sortAndFormats.get().sort, limit, null, 0, false).newCollector(); } LeafCollector getLeafCollector(LeafReaderContext leafReaderContext) throws IOException { From 85e87303282c7bf3731857fea501f4a1b75858fc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 1 Oct 2024 06:12:13 +0000 Subject: [PATCH 376/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-524ea208c87 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f17785ae4b464..7e2cfe24188b9 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-22ac47c07ad +lucene = 10.0.0-snapshot-524ea208c87 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 47272f8c109fa..9a59331bc1b4d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From ef2d0b84cb20a872f825e752082bfbcdbd17ed13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 1 Oct 2024 10:41:00 +0200 Subject: [PATCH 377/417] Make "german2" an alias for "german" snowball stemmer (#113614) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit With Lucene 10, German2Stemmer, which is used as a parameter for the Snowball stemmer, has been folded into GermanStemmer. This results mainly in different treatment Umlauts, i.e. where formerly "german" would stem "Bücher" -> "Buch" but "Buecher" -> "Buech" and "german2" would stem both to the same form "Buch", this is now true for the general "german" stemmer variant. This change makes the defunct "german2" language stemmer an alias for the "german" stemmer that now includes the same improved functionality. --- docs/changelog/113614.yaml | 18 +++++++++ .../tokenfilters/stemmer-tokenfilter.asciidoc | 1 - .../common/StemmerTokenFilterFactory.java | 9 ++++- .../StemmerTokenFilterFactoryTests.java | 38 +++++++++++++++++++ 4 files changed, 64 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/113614.yaml diff --git a/docs/changelog/113614.yaml b/docs/changelog/113614.yaml new file mode 100644 index 0000000000000..bd9dcb3e38772 --- /dev/null +++ b/docs/changelog/113614.yaml @@ -0,0 +1,18 @@ +pr: 113614 +summary: The 'german2' stemmer is now an alias for the 'german' snowball stemmer +area: Analysis +type: breaking +issues: [] +breaking: + title: The "german2" snowball stemmer is now an alias for the "german" stemmer + area: Analysis + details: >- + Lucene 10 has merged the improved "german2" snowball language stemmer with the + "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for + "german". This may results in slightly different tokens being generated for + terms with umlaut substitution (like "ue" for "ü" etc...) + impact: >- + Replace usages of "german2" with "german" in analysis configuration. Old + indices that use the "german" stemmer should be reindexed if possible. + notable: false + diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc index 4cd088935af19..d9e2120afe6d1 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc @@ -173,7 +173,6 @@ http://bvg.udc.es/recursos_lingua/stemming.jsp[`minimal_galician`] (Plural step German:: https://dl.acm.org/citation.cfm?id=1141523[*`light_german`*], https://snowballstem.org/algorithms/german/stemmer.html[`german`], -https://snowballstem.org/algorithms/german2/stemmer.html[`german2`], http://members.unine.ch/jacques.savoy/clef/morpho.pdf[`minimal_german`] Greek:: diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 6b622444980d3..7548c8ad2b88b 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -89,6 +89,8 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { private final String language; + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(StemmerTokenFilterFactory.class); + StemmerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { super(name, settings); this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter"))); @@ -190,7 +192,12 @@ public boolean incrementToken() { } else if ("german".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("german2".equalsIgnoreCase(language)) { - // TODO Lucene 10 upgrade: how about bw comp for users relying on german2 stemmer that is now folded into german stemmer? + DEPRECATION_LOGGER.critical( + DeprecationCategory.ANALYSIS, + "german2_stemmer_deprecation", + "The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. " + + "Replace all usages of 'german2' with 'german'." + ); return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) { return new GermanLightStemFilter(tokenStream); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java index 8f3d52f0174c6..bb06c221873b5 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; @@ -16,6 +17,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -103,6 +105,42 @@ public void testMultipleLanguagesThrowsException() throws IOException { assertEquals("Invalid stemmer class specified: [english, light_english]", e.getMessage()); } + public void testGermanAndGerman2Stemmer() throws IOException { + IndexVersion v = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.UPGRADE_TO_LUCENE_10_0_0, IndexVersion.current()); + Analyzer analyzer = createGermanStemmer("german", v); + assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" }); + + analyzer = createGermanStemmer("german2", v); + assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" }); + assertWarnings( + "The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. " + + "Replace all usages of 'german2' with 'german'." + ); + } + + private static Analyzer createGermanStemmer(String variant, IndexVersion v) throws IOException { + + Settings settings = Settings.builder() + .put("index.analysis.filter.my_german.type", "stemmer") + .put("index.analysis.filter.my_german.language", variant) + .put("index.analysis.analyzer.my_german.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_german.filter", "my_german") + .put(SETTING_VERSION_CREATED, v) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + + ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_german"); + assertThat(tokenFilter, instanceOf(StemmerTokenFilterFactory.class)); + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader("Buecher oder Bücher")); + TokenStream create = tokenFilter.create(tokenizer); + assertThat(create, instanceOf(SnowballFilter.class)); + IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers; + NamedAnalyzer analyzer = indexAnalyzers.get("my_german"); + return analyzer; + } + public void testKpDeprecation() throws IOException { IndexVersion v = IndexVersionUtils.randomVersion(random()); Settings settings = Settings.builder() From e700fe17e6621a35f64c5ba4c08bbd4ff9045bd2 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Tue, 1 Oct 2024 11:24:23 +0100 Subject: [PATCH 378/417] Fix build IOConext READ -> DEFAULT --- .../repositories/blobstore/BlobStoreRepository.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 1975e6cb55940..5e45aec6a5240 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -4030,7 +4030,7 @@ protected void snapshotFile(SnapshotShardContext context, FileInfo fileInfo) thr final String file = fileInfo.physicalName(); try ( Releasable ignored = context.withCommitRef(); - IndexInput indexInput = store.openVerifyingInput(file, IOContext.READ, fileInfo.metadata()) + IndexInput indexInput = store.openVerifyingInput(file, IOContext.DEFAULT, fileInfo.metadata()) ) { for (int i = 0; i < fileInfo.numberOfParts(); i++) { final long partBytes = fileInfo.partBytes(i); From 6a48626737352678f0a3a08968fcaa10bf21c895 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Tue, 1 Oct 2024 14:15:33 +0100 Subject: [PATCH 379/417] Fix bad merge IOContext READ -> DEFAULT --- .../xpack/ccr/repository/CcrRestoreSourceService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java index 6b390ab5747a8..164e6ed5406ae 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java @@ -245,7 +245,7 @@ private Store.MetadataSnapshot getMetadata() throws IOException { private long readFileBytes(String fileName, ByteArray reference) throws IOException { try (Releasable ignored = keyedLock.acquire(fileName)) { - var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.READ; + var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.DEFAULT; final IndexInput indexInput = cachedInputs.computeIfAbsent(fileName, f -> { try { return commitRef.getIndexCommit().getDirectory().openInput(fileName, context); From ef5d73f8f26bf447155059bfc7a69de388e53876 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 1 Oct 2024 15:27:22 +0200 Subject: [PATCH 380/417] fix compilation after merge --- .../org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java index 069eb126bcb3c..18adebb145f98 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java @@ -196,7 +196,7 @@ static final OldSegmentInfos readCommit(Directory directory, String segmentFileN long generation = generationFromSegmentsFileName(segmentFileName); // System.out.println(Thread.currentThread() + ": SegmentInfos.readCommit " + segmentFileName); - try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName)) { try { return readCommit(directory, input, generation, minSupportedMajorVersion); } catch (EOFException | NoSuchFileException | FileNotFoundException e) { From d5a46044a5a366c845070a8aa182de32ce982245 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 2 Oct 2024 06:13:17 +0000 Subject: [PATCH 381/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-4461bc1eff4 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7e2cfe24188b9..b5629adca6439 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-524ea208c87 +lucene = 10.0.0-snapshot-4461bc1eff4 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9a59331bc1b4d..a1f4cc106552a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e7644ca5c0a58d9b7d0970bcf99c8116afd89766 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 2 Oct 2024 09:32:42 +0200 Subject: [PATCH 382/417] Replace Lucene912Codec with Lucene100Codec See https://github.com/apache/lucene/pull/13815 --- .../index/codec/CodecService.java | 6 +++--- .../index/codec/Elasticsearch816Codec.java | 4 ++-- .../index/codec/LegacyPerFieldMapperCodec.java | 6 +++--- .../diskusage/IndexDiskUsageAnalyzerTests.java | 18 +++++++++--------- .../vectors/ES813FlatVectorFormatTests.java | 4 ++-- .../ES813Int8FlatVectorFormatTests.java | 4 ++-- ...4HnswScalarQuantizedVectorsFormatTests.java | 4 ++-- .../vectors/ES815BitFlatVectorFormatTests.java | 4 ++-- .../ES815HnswBitVectorsFormatTests.java | 4 ++-- .../codec/zstd/StoredFieldCodecDuelTests.java | 6 +++--- .../engine/CompletionStatsCacheTests.java | 4 ++-- 11 files changed, 32 insertions(+), 32 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index 144b99abe5644..c1c392ac07f18 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; @@ -46,7 +46,7 @@ public class CodecService implements CodecProvider { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, mapperService, bigArrays); + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, mapperService, bigArrays); if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { @@ -58,7 +58,7 @@ public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) ); - Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index 00711c7ecc306..f1db9d8c72be1 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -13,8 +13,8 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; @@ -64,7 +64,7 @@ public Elasticsearch816Codec() { * worse space-efficiency or vice-versa. */ public Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode mode) { - super("Elasticsearch816", new Lucene912Codec()); + super("Elasticsearch816", new Lucene100Codec()); this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); this.defaultPostingsFormat = new Lucene912PostingsFormat(); this.defaultDVFormat = new Lucene90DocValuesFormat(); diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java index 64c2ca788f63c..bf2c5a9f01e29 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.mapper.MapperService; @@ -22,11 +22,11 @@ * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new * changes in {@link PerFieldMapperCodec}. */ -public final class LegacyPerFieldMapperCodec extends Lucene912Codec { +public final class LegacyPerFieldMapperCodec extends Lucene100Codec { private final PerFieldFormatSupplier formatSupplier; - public LegacyPerFieldMapperCodec(Lucene912Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public LegacyPerFieldMapperCodec(Lucene100Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 1489fa6de90c5..bf4a28b9c60b2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -12,8 +12,8 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; @@ -328,7 +328,7 @@ public void testTriangle() throws Exception { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene912Codec(Lucene912Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene100Codec(Lucene100Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { @@ -415,25 +415,25 @@ private static void addFieldsToDoc(Document doc, IndexableField[] fields) { enum CodecMode { BEST_SPEED { @Override - Lucene912Codec.Mode mode() { - return Lucene912Codec.Mode.BEST_SPEED; + Lucene100Codec.Mode mode() { + return Lucene100Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene912Codec.Mode mode() { - return Lucene912Codec.Mode.BEST_COMPRESSION; + Lucene100Codec.Mode mode() { + return Lucene100Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene912Codec.Mode mode(); + abstract Lucene100Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene912Codec(codecMode.mode())); + .setCodec(new Lucene100Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -641,7 +641,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene912Codec(mode.mode()) { + .setCodec(new Lucene100Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new ES812PostingsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java index aa50bc26c4443..57cca6eea86ec 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java index 8cb927036588a..9069b094ee483 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813Int8FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813Int8FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java index f89b481a13fd8..549a14ca6c31b 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.KnnFloatVectorField; @@ -42,7 +42,7 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES814HnswScalarQuantizedVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java index 90d2584feb3f2..034d428b25209 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815BitFlatVectorFormatTests extends BaseKnnBitVectorsFormatTestCa @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815BitFlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java index add90ea271fa1..4af6a405c7705 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815HnswBitVectorsFormatTests extends BaseKnnBitVectorsFormatTestC @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815HnswBitVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java index c3fea6c7a189b..437ba1cecc11d 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; @@ -35,13 +35,13 @@ public class StoredFieldCodecDuelTests extends ESTestCase { private static final String DOUBLE_FIELD = "double_field_5"; public void testDuelBestSpeed() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } public void testDuelBestCompression() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 6565a11a860ec..6d205a22433b4 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -45,7 +45,7 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); final PostingsFormat postingsFormat = new Completion912PostingsFormat(); - indexWriterConfig.setCodec(new Lucene912Codec() { + indexWriterConfig.setCodec(new Lucene100Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields From d623d992eb46ed47d4ce9ee1aa9b0f85a088f305 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 2 Oct 2024 10:26:32 +0200 Subject: [PATCH 383/417] Introduce Elasticsearch900Codec --- .../elasticsearch/common/lucene/Lucene.java | 2 +- .../index/codec/Elasticsearch816Codec.java | 4 +- .../index/codec/Elasticsearch900Codec.java | 131 ++++++++++++++++++ .../index/codec/PerFieldMapperCodec.java | 2 +- 4 files changed, 135 insertions(+), 4 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 700472a0809b6..ebd030ec55424 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -89,7 +89,7 @@ import java.util.Objects; public class Lucene { - public static final String LATEST_CODEC = "Lucene912"; + public static final String LATEST_CODEC = "Lucene100"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index f1db9d8c72be1..dc8e0d109b0f8 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -9,11 +9,11 @@ package org.elasticsearch.index.codec; +import org.apache.lucene.backward_codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; -import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; @@ -64,7 +64,7 @@ public Elasticsearch816Codec() { * worse space-efficiency or vice-versa. */ public Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode mode) { - super("Elasticsearch816", new Lucene100Codec()); + super("Elasticsearch816", new Lucene912Codec()); this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); this.defaultPostingsFormat = new Lucene912PostingsFormat(); this.defaultDVFormat = new Lucene90DocValuesFormat(); diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java new file mode 100644 index 0000000000000..50a917248cebd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 9.0. This extends the Lucene 10.0 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch900Codec extends CodecService.DeduplicateFieldInfosCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch900Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch900Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch900Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch900Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch900", new Lucene100Codec()); + this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); + this.defaultPostingsFormat = new Lucene912PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

    The default implementation always returns "Lucene912". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

    The default implementation always returns "Lucene912". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

    The default implementation always returns "Lucene912". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index 83c5cb396d88b..b60b88da5949d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -26,7 +26,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Elasticsearch816Codec { +public final class PerFieldMapperCodec extends Elasticsearch900Codec { private final PerFieldFormatSupplier formatSupplier; From 66c2c3cc9b922f70c518094535cd5863bbcd5ea3 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 2 Oct 2024 10:45:09 +0200 Subject: [PATCH 384/417] Add the missing bits for Elasticsearch900Codec --- server/src/main/java/module-info.java | 3 ++- .../META-INF/services/org.apache.lucene.codecs.Codec | 1 + .../test/java/org/elasticsearch/index/codec/CodecTests.java | 2 +- .../zstd/Zstd814BestCompressionStoredFieldsFormatTests.java | 4 ++-- .../codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java | 4 ++-- 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 56672957dd571..97aa40a83f62b 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -457,7 +457,8 @@ provides org.apache.lucene.codecs.Codec with org.elasticsearch.index.codec.Elasticsearch814Codec, - org.elasticsearch.index.codec.Elasticsearch816Codec; + org.elasticsearch.index.codec.Elasticsearch816Codec, + org.elasticsearch.index.codec.Elasticsearch900Codec; provides org.apache.logging.log4j.core.util.ContextDataProvider with org.elasticsearch.common.logging.DynamicContextDataProvider; diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 4e85ba2cf479f..33c8081971202 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1,2 +1,3 @@ org.elasticsearch.index.codec.Elasticsearch814Codec org.elasticsearch.index.codec.Elasticsearch816Codec +org.elasticsearch.index.codec.Elasticsearch900Codec diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 10b0b54d2d7e2..9e4a19eb039fd 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -52,7 +52,7 @@ public void testResolveDefaultCodecs() throws Exception { assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Elasticsearch816Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch900Codec.class)); } public void testDefault() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java index 71c7464657e72..77a7585e3b518 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch816Codec; +import org.elasticsearch.index.codec.Elasticsearch900Codec; public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java index 02a1b10697907..3d6cfea70d121 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch816Codec; +import org.elasticsearch.index.codec.Elasticsearch900Codec; public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); @Override protected Codec getCodec() { From 7089ff344d3583d46406f41304ac8ca21ddc1e46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 2 Oct 2024 14:06:57 +0200 Subject: [PATCH 385/417] Conditional stemming for 'persian' analyzer (#113482) The 'persian' analyzer for Lucene 10 comes with PersianStemFilter as the last token filter by default. In order to maintain compatibility for old indices, we use the new analyzer for new indices but configure a legacy analyzer that is used with existing indices. --- docs/changelog/113482.yaml | 27 +++++ .../common/PersianAnalyzerProvider.java | 56 +++++++++- .../common/PersianAnalyzerProviderTests.java | 78 ++++++++++++++ .../test/analysis-common/20_analyzers.yml | 25 +++++ .../upgrades/FullClusterRestartIT.java | 101 ++++++++++++++++++ 5 files changed, 283 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/113482.yaml create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java diff --git a/docs/changelog/113482.yaml b/docs/changelog/113482.yaml new file mode 100644 index 0000000000000..f16f183ea05c6 --- /dev/null +++ b/docs/changelog/113482.yaml @@ -0,0 +1,27 @@ +pr: 113482 +summary: The 'persian' analyzer has stemmer by default +area: Analysis +type: breaking +issues: +- 113050 +breaking: + title: The 'persian' analyzer has stemmer by default + area: Analysis + details: >- + Lucene 10 has added a final stemming step to its PersianAnalyzer that we + expose as 'persian' analyzer. Existing indices will keep the old + non-stemming behaviour while new indices will see the updated behaviour with + added stemming. + Users that want to maintain the non-stemming behaviour need to define their + own analyzer as outlines in + https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. + Users that want to use the new stemming behaviour for existing indices will + have to reindex their data. + impact: >- + Indexing with the 'persian' analyzer will produce slightly different token. + Users should check if this impacts their search results. If they want to + maintain the legacy non-stemming behaviour they can define their own + analyzer equivalent as explained in + https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. + notable: false + diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java index 9ea3a9fa4eee9..917a45188123c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java @@ -9,24 +9,72 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.LowerCaseFilter; +import org.apache.lucene.analysis.StopFilter; +import org.apache.lucene.analysis.StopwordAnalyzerBase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; +import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.fa.PersianAnalyzer; +import org.apache.lucene.analysis.fa.PersianCharFilter; +import org.apache.lucene.analysis.fa.PersianNormalizationFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.Analysis; -public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { +import java.io.Reader; - private final PersianAnalyzer analyzer; +public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { + + private final StopwordAnalyzerBase analyzer; PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(name, settings); - analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) { + // since Lucene 10 this analyzer contains stemming by default + analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); + } else { + // for older index versions we need the old analyzer behaviour without stemming + analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())) { + + protected Analyzer.TokenStreamComponents createComponents(String fieldName) { + final Tokenizer source = new StandardTokenizer(); + TokenStream result = new LowerCaseFilter(source); + result = new DecimalDigitFilter(result); + result = new ArabicNormalizationFilter(result); + /* additional persian-specific normalization */ + result = new PersianNormalizationFilter(result); + /* + * the order here is important: the stopword list is normalized with the + * above! + */ + return new TokenStreamComponents(source, new StopFilter(result, stopwords)); + } + + protected TokenStream normalize(String fieldName, TokenStream in) { + TokenStream result = new LowerCaseFilter(in); + result = new DecimalDigitFilter(result); + result = new ArabicNormalizationFilter(result); + /* additional persian-specific normalization */ + result = new PersianNormalizationFilter(result); + return result; + } + + protected Reader initReader(String fieldName, Reader reader) { + return new PersianCharFilter(reader); + } + }; + } } @Override - public PersianAnalyzer get() { + public StopwordAnalyzerBase get() { return this.analyzer; } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java new file mode 100644 index 0000000000000..7b962538c2a10 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.index.IndexVersionUtils; + +import java.io.IOException; + +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + +/** + * Tests Persian Analyzer factory and behavioural changes with Lucene 10 + */ +public class PersianAnalyzerProviderTests extends ESTokenStreamTestCase { + + public void testPersianAnalyzerPostLucene10() throws IOException { + IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.UPGRADE_TO_LUCENE_10_0_0, + IndexVersion.current() + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = persianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زياد", "خوانده" }); + } + + public void testPersianAnalyzerPreLucene10() throws IOException { + IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = persianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زيادي", "خوانده" }); + } +} diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index 7674b95af4851..8930e485aa249 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -901,6 +901,31 @@ - length: { tokens: 1 } - match: { tokens.0.token: خورد } +--- +"persian stemming": + - requires: + cluster_features: ["lucene_10_upgrade"] + reason: "test requires persian analyzer stemming capabilities that come with Lucene 10" + + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: persian + + - do: + indices.analyze: + index: test + body: + text: كتابها + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: كتاب } + --- "portuguese": - do: diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index ee18f8fc2ec4b..43ed1755317f6 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; +import org.elasticsearch.search.SearchFeatures; import org.elasticsearch.test.NotEqualMessageBuilder; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -1726,6 +1727,106 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception { } } + /** + * This test ensures that search results on old indices using "persian" analyzer don't change + * after we introduce Lucene 10 + */ + public void testPersianAnalyzerBWC() throws Exception { + var originalClusterLegacyPersianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false; + assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyPersianAnalyzer); + final String indexName = "test_persian_stemmer"; + Settings idxSettings = indexSettings(1, 1).build(); + String mapping = """ + { + "properties": { + "textfield" : { + "type": "text", + "analyzer": "persian" + } + } + } + """; + + String query = """ + { + "query": { + "match": { + "textfield": "كتابها" + } + } + } + """; + + if (isRunningAgainstOldCluster()) { + createIndex(client(), indexName, idxSettings, mapping); + ensureGreen(indexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", "كتابها") + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", "كتاب") + ) + ) + ); + refresh(indexName); + + assertNumHits(indexName, 2, 1); + + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + } else { + // old index should still only return one doc + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + + String newIndexName = indexName + "_new"; + createIndex(client(), newIndexName, idxSettings, mapping); + ensureGreen(newIndexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", "كتابها") + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", "كتاب") + ) + ) + ); + refresh(newIndexName); + + searchRequest = new Request("POST", "/" + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(2, entityAsMap(client().performRequest(searchRequest))); + + // searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index + searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); + } + } + /** * This test ensures that soft deletes are enabled a when upgrading a pre-8 cluster to 8.0+ */ From 23c0bedc81f4c44456faea94dfd829278cb21a37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 2 Oct 2024 18:44:58 +0200 Subject: [PATCH 386/417] Add bwc layer for 'romanian' analyzer (#113906) The 'romanian' language analyzer has been improved in Lucene 10 in two important ways. First, the snowball stemmer has been modified to work with s-comma and t-comma characters but only with their cedilla forms used when Romanian didn't have full Unicode support (https://github.com/snowballstem/snowball/pull/177). Second, the analyzer now contains a normalization step to map cedilla forms to forms with comma. In order to maintain backwards compatibility with existing indices, this change moves the Lucene 9 stemmer over to the analysis module was a deprecated variant and creates the analyzer for existing indices with the "old" stemmer and without the normalization step. New indices automatically run with the improved behaviour. --- .../common/LegacyRomanianStemmer.java | 741 ++++++++++++++++++ .../common/RomanianAnalyzerProvider.java | 46 +- .../common/RomanianAnalyzerTests.java | 80 ++ .../upgrades/FullClusterRestartIT.java | 105 +++ 4 files changed, 965 insertions(+), 7 deletions(-) create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java new file mode 100644 index 0000000000000..0eb8d916307ae --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java @@ -0,0 +1,741 @@ +/* + * @notice + * Generated by Snowball 2.0.0 - https://snowballstem.org/ + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ + +package org.elasticsearch.analysis.common; + +import org.tartarus.snowball.Among; + +/** +* This class implements the stemming algorithm defined by a snowball script. +* NOTE: This is the RomanianStemmer used in Lucene 9 and should only be used for backwards compatibility +*/ +@SuppressWarnings("checkstyle:DescendantToken") +class LegacyRomanianStemmer extends org.tartarus.snowball.SnowballStemmer { + + private static final java.lang.invoke.MethodHandles.Lookup methodObject = java.lang.invoke.MethodHandles.lookup(); + + private static final Among a_0[] = { new Among("", -1, 3), new Among("I", 0, 1), new Among("U", 0, 2) }; + + private static final Among a_1[] = { + new Among("ea", -1, 3), + new Among("a\u0163ia", -1, 7), + new Among("aua", -1, 2), + new Among("iua", -1, 4), + new Among("a\u0163ie", -1, 7), + new Among("ele", -1, 3), + new Among("ile", -1, 5), + new Among("iile", 6, 4), + new Among("iei", -1, 4), + new Among("atei", -1, 6), + new Among("ii", -1, 4), + new Among("ului", -1, 1), + new Among("ul", -1, 1), + new Among("elor", -1, 3), + new Among("ilor", -1, 4), + new Among("iilor", 14, 4) }; + + private static final Among a_2[] = { + new Among("icala", -1, 4), + new Among("iciva", -1, 4), + new Among("ativa", -1, 5), + new Among("itiva", -1, 6), + new Among("icale", -1, 4), + new Among("a\u0163iune", -1, 5), + new Among("i\u0163iune", -1, 6), + new Among("atoare", -1, 5), + new Among("itoare", -1, 6), + new Among("\u0103toare", -1, 5), + new Among("icitate", -1, 4), + new Among("abilitate", -1, 1), + new Among("ibilitate", -1, 2), + new Among("ivitate", -1, 3), + new Among("icive", -1, 4), + new Among("ative", -1, 5), + new Among("itive", -1, 6), + new Among("icali", -1, 4), + new Among("atori", -1, 5), + new Among("icatori", 18, 4), + new Among("itori", -1, 6), + new Among("\u0103tori", -1, 5), + new Among("icitati", -1, 4), + new Among("abilitati", -1, 1), + new Among("ivitati", -1, 3), + new Among("icivi", -1, 4), + new Among("ativi", -1, 5), + new Among("itivi", -1, 6), + new Among("icit\u0103i", -1, 4), + new Among("abilit\u0103i", -1, 1), + new Among("ivit\u0103i", -1, 3), + new Among("icit\u0103\u0163i", -1, 4), + new Among("abilit\u0103\u0163i", -1, 1), + new Among("ivit\u0103\u0163i", -1, 3), + new Among("ical", -1, 4), + new Among("ator", -1, 5), + new Among("icator", 35, 4), + new Among("itor", -1, 6), + new Among("\u0103tor", -1, 5), + new Among("iciv", -1, 4), + new Among("ativ", -1, 5), + new Among("itiv", -1, 6), + new Among("ical\u0103", -1, 4), + new Among("iciv\u0103", -1, 4), + new Among("ativ\u0103", -1, 5), + new Among("itiv\u0103", -1, 6) }; + + private static final Among a_3[] = { + new Among("ica", -1, 1), + new Among("abila", -1, 1), + new Among("ibila", -1, 1), + new Among("oasa", -1, 1), + new Among("ata", -1, 1), + new Among("ita", -1, 1), + new Among("anta", -1, 1), + new Among("ista", -1, 3), + new Among("uta", -1, 1), + new Among("iva", -1, 1), + new Among("ic", -1, 1), + new Among("ice", -1, 1), + new Among("abile", -1, 1), + new Among("ibile", -1, 1), + new Among("isme", -1, 3), + new Among("iune", -1, 2), + new Among("oase", -1, 1), + new Among("ate", -1, 1), + new Among("itate", 17, 1), + new Among("ite", -1, 1), + new Among("ante", -1, 1), + new Among("iste", -1, 3), + new Among("ute", -1, 1), + new Among("ive", -1, 1), + new Among("ici", -1, 1), + new Among("abili", -1, 1), + new Among("ibili", -1, 1), + new Among("iuni", -1, 2), + new Among("atori", -1, 1), + new Among("osi", -1, 1), + new Among("ati", -1, 1), + new Among("itati", 30, 1), + new Among("iti", -1, 1), + new Among("anti", -1, 1), + new Among("isti", -1, 3), + new Among("uti", -1, 1), + new Among("i\u015Fti", -1, 3), + new Among("ivi", -1, 1), + new Among("it\u0103i", -1, 1), + new Among("o\u015Fi", -1, 1), + new Among("it\u0103\u0163i", -1, 1), + new Among("abil", -1, 1), + new Among("ibil", -1, 1), + new Among("ism", -1, 3), + new Among("ator", -1, 1), + new Among("os", -1, 1), + new Among("at", -1, 1), + new Among("it", -1, 1), + new Among("ant", -1, 1), + new Among("ist", -1, 3), + new Among("ut", -1, 1), + new Among("iv", -1, 1), + new Among("ic\u0103", -1, 1), + new Among("abil\u0103", -1, 1), + new Among("ibil\u0103", -1, 1), + new Among("oas\u0103", -1, 1), + new Among("at\u0103", -1, 1), + new Among("it\u0103", -1, 1), + new Among("ant\u0103", -1, 1), + new Among("ist\u0103", -1, 3), + new Among("ut\u0103", -1, 1), + new Among("iv\u0103", -1, 1) }; + + private static final Among a_4[] = { + new Among("ea", -1, 1), + new Among("ia", -1, 1), + new Among("esc", -1, 1), + new Among("\u0103sc", -1, 1), + new Among("ind", -1, 1), + new Among("\u00E2nd", -1, 1), + new Among("are", -1, 1), + new Among("ere", -1, 1), + new Among("ire", -1, 1), + new Among("\u00E2re", -1, 1), + new Among("se", -1, 2), + new Among("ase", 10, 1), + new Among("sese", 10, 2), + new Among("ise", 10, 1), + new Among("use", 10, 1), + new Among("\u00E2se", 10, 1), + new Among("e\u015Fte", -1, 1), + new Among("\u0103\u015Fte", -1, 1), + new Among("eze", -1, 1), + new Among("ai", -1, 1), + new Among("eai", 19, 1), + new Among("iai", 19, 1), + new Among("sei", -1, 2), + new Among("e\u015Fti", -1, 1), + new Among("\u0103\u015Fti", -1, 1), + new Among("ui", -1, 1), + new Among("ezi", -1, 1), + new Among("\u00E2i", -1, 1), + new Among("a\u015Fi", -1, 1), + new Among("se\u015Fi", -1, 2), + new Among("ase\u015Fi", 29, 1), + new Among("sese\u015Fi", 29, 2), + new Among("ise\u015Fi", 29, 1), + new Among("use\u015Fi", 29, 1), + new Among("\u00E2se\u015Fi", 29, 1), + new Among("i\u015Fi", -1, 1), + new Among("u\u015Fi", -1, 1), + new Among("\u00E2\u015Fi", -1, 1), + new Among("a\u0163i", -1, 2), + new Among("ea\u0163i", 38, 1), + new Among("ia\u0163i", 38, 1), + new Among("e\u0163i", -1, 2), + new Among("i\u0163i", -1, 2), + new Among("\u00E2\u0163i", -1, 2), + new Among("ar\u0103\u0163i", -1, 1), + new Among("ser\u0103\u0163i", -1, 2), + new Among("aser\u0103\u0163i", 45, 1), + new Among("seser\u0103\u0163i", 45, 2), + new Among("iser\u0103\u0163i", 45, 1), + new Among("user\u0103\u0163i", 45, 1), + new Among("\u00E2ser\u0103\u0163i", 45, 1), + new Among("ir\u0103\u0163i", -1, 1), + new Among("ur\u0103\u0163i", -1, 1), + new Among("\u00E2r\u0103\u0163i", -1, 1), + new Among("am", -1, 1), + new Among("eam", 54, 1), + new Among("iam", 54, 1), + new Among("em", -1, 2), + new Among("asem", 57, 1), + new Among("sesem", 57, 2), + new Among("isem", 57, 1), + new Among("usem", 57, 1), + new Among("\u00E2sem", 57, 1), + new Among("im", -1, 2), + new Among("\u00E2m", -1, 2), + new Among("\u0103m", -1, 2), + new Among("ar\u0103m", 65, 1), + new Among("ser\u0103m", 65, 2), + new Among("aser\u0103m", 67, 1), + new Among("seser\u0103m", 67, 2), + new Among("iser\u0103m", 67, 1), + new Among("user\u0103m", 67, 1), + new Among("\u00E2ser\u0103m", 67, 1), + new Among("ir\u0103m", 65, 1), + new Among("ur\u0103m", 65, 1), + new Among("\u00E2r\u0103m", 65, 1), + new Among("au", -1, 1), + new Among("eau", 76, 1), + new Among("iau", 76, 1), + new Among("indu", -1, 1), + new Among("\u00E2ndu", -1, 1), + new Among("ez", -1, 1), + new Among("easc\u0103", -1, 1), + new Among("ar\u0103", -1, 1), + new Among("ser\u0103", -1, 2), + new Among("aser\u0103", 84, 1), + new Among("seser\u0103", 84, 2), + new Among("iser\u0103", 84, 1), + new Among("user\u0103", 84, 1), + new Among("\u00E2ser\u0103", 84, 1), + new Among("ir\u0103", -1, 1), + new Among("ur\u0103", -1, 1), + new Among("\u00E2r\u0103", -1, 1), + new Among("eaz\u0103", -1, 1) }; + + private static final Among a_5[] = { + new Among("a", -1, 1), + new Among("e", -1, 1), + new Among("ie", 1, 1), + new Among("i", -1, 1), + new Among("\u0103", -1, 1) }; + + private static final char g_v[] = { 17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 32, 0, 0, 4 }; + + private boolean B_standard_suffix_removed; + private int I_p2; + private int I_p1; + private int I_pV; + + private boolean r_prelude() { + while (true) { + int v_1 = cursor; + lab0: { + golab1: while (true) { + int v_2 = cursor; + lab2: { + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + bra = cursor; + lab3: { + int v_3 = cursor; + lab4: { + if (!(eq_s("u"))) { + break lab4; + } + ket = cursor; + if (!(in_grouping(g_v, 97, 259))) { + break lab4; + } + slice_from("U"); + break lab3; + } + cursor = v_3; + if (!(eq_s("i"))) { + break lab2; + } + ket = cursor; + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + slice_from("I"); + } + cursor = v_2; + break golab1; + } + cursor = v_2; + if (cursor >= limit) { + break lab0; + } + cursor++; + } + continue; + } + cursor = v_1; + break; + } + return true; + } + + private boolean r_mark_regions() { + I_pV = limit; + I_p1 = limit; + I_p2 = limit; + int v_1 = cursor; + lab0: { + lab1: { + int v_2 = cursor; + lab2: { + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + lab3: { + int v_3 = cursor; + lab4: { + if (!(out_grouping(g_v, 97, 259))) { + break lab4; + } + golab5: while (true) { + lab6: { + if (!(in_grouping(g_v, 97, 259))) { + break lab6; + } + break golab5; + } + if (cursor >= limit) { + break lab4; + } + cursor++; + } + break lab3; + } + cursor = v_3; + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + golab7: while (true) { + lab8: { + if (!(out_grouping(g_v, 97, 259))) { + break lab8; + } + break golab7; + } + if (cursor >= limit) { + break lab2; + } + cursor++; + } + } + break lab1; + } + cursor = v_2; + if (!(out_grouping(g_v, 97, 259))) { + break lab0; + } + lab9: { + int v_6 = cursor; + lab10: { + if (!(out_grouping(g_v, 97, 259))) { + break lab10; + } + golab11: while (true) { + lab12: { + if (!(in_grouping(g_v, 97, 259))) { + break lab12; + } + break golab11; + } + if (cursor >= limit) { + break lab10; + } + cursor++; + } + break lab9; + } + cursor = v_6; + if (!(in_grouping(g_v, 97, 259))) { + break lab0; + } + if (cursor >= limit) { + break lab0; + } + cursor++; + } + } + I_pV = cursor; + } + cursor = v_1; + int v_8 = cursor; + lab13: { + golab14: while (true) { + lab15: { + if (!(in_grouping(g_v, 97, 259))) { + break lab15; + } + break golab14; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + golab16: while (true) { + lab17: { + if (!(out_grouping(g_v, 97, 259))) { + break lab17; + } + break golab16; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + I_p1 = cursor; + golab18: while (true) { + lab19: { + if (!(in_grouping(g_v, 97, 259))) { + break lab19; + } + break golab18; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + golab20: while (true) { + lab21: { + if (!(out_grouping(g_v, 97, 259))) { + break lab21; + } + break golab20; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + I_p2 = cursor; + } + cursor = v_8; + return true; + } + + private boolean r_postlude() { + int among_var; + while (true) { + int v_1 = cursor; + lab0: { + bra = cursor; + among_var = find_among(a_0); + if (among_var == 0) { + break lab0; + } + ket = cursor; + switch (among_var) { + case 1: + slice_from("i"); + break; + case 2: + slice_from("u"); + break; + case 3: + if (cursor >= limit) { + break lab0; + } + cursor++; + break; + } + continue; + } + cursor = v_1; + break; + } + return true; + } + + private boolean r_RV() { + if (!(I_pV <= cursor)) { + return false; + } + return true; + } + + private boolean r_R1() { + if (!(I_p1 <= cursor)) { + return false; + } + return true; + } + + private boolean r_R2() { + if (!(I_p2 <= cursor)) { + return false; + } + return true; + } + + private boolean r_step_0() { + int among_var; + ket = cursor; + among_var = find_among_b(a_1); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R1()) { + return false; + } + switch (among_var) { + case 1: + slice_del(); + break; + case 2: + slice_from("a"); + break; + case 3: + slice_from("e"); + break; + case 4: + slice_from("i"); + break; + case 5: { + int v_1 = limit - cursor; + lab0: { + if (!(eq_s_b("ab"))) { + break lab0; + } + return false; + } + cursor = limit - v_1; + } + slice_from("i"); + break; + case 6: + slice_from("at"); + break; + case 7: + slice_from("a\u0163i"); + break; + } + return true; + } + + private boolean r_combo_suffix() { + int among_var; + int v_1 = limit - cursor; + ket = cursor; + among_var = find_among_b(a_2); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R1()) { + return false; + } + switch (among_var) { + case 1: + slice_from("abil"); + break; + case 2: + slice_from("ibil"); + break; + case 3: + slice_from("iv"); + break; + case 4: + slice_from("ic"); + break; + case 5: + slice_from("at"); + break; + case 6: + slice_from("it"); + break; + } + B_standard_suffix_removed = true; + cursor = limit - v_1; + return true; + } + + private boolean r_standard_suffix() { + int among_var; + B_standard_suffix_removed = false; + while (true) { + int v_1 = limit - cursor; + lab0: { + if (!r_combo_suffix()) { + break lab0; + } + continue; + } + cursor = limit - v_1; + break; + } + ket = cursor; + among_var = find_among_b(a_3); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R2()) { + return false; + } + switch (among_var) { + case 1: + slice_del(); + break; + case 2: + if (!(eq_s_b("\u0163"))) { + return false; + } + bra = cursor; + slice_from("t"); + break; + case 3: + slice_from("ist"); + break; + } + B_standard_suffix_removed = true; + return true; + } + + private boolean r_verb_suffix() { + int among_var; + if (cursor < I_pV) { + return false; + } + int v_2 = limit_backward; + limit_backward = I_pV; + ket = cursor; + among_var = find_among_b(a_4); + if (among_var == 0) { + limit_backward = v_2; + return false; + } + bra = cursor; + switch (among_var) { + case 1: + lab0: { + int v_3 = limit - cursor; + lab1: { + if (!(out_grouping_b(g_v, 97, 259))) { + break lab1; + } + break lab0; + } + cursor = limit - v_3; + if (!(eq_s_b("u"))) { + limit_backward = v_2; + return false; + } + } + slice_del(); + break; + case 2: + slice_del(); + break; + } + limit_backward = v_2; + return true; + } + + private boolean r_vowel_suffix() { + ket = cursor; + if (find_among_b(a_5) == 0) { + return false; + } + bra = cursor; + if (!r_RV()) { + return false; + } + slice_del(); + return true; + } + + @Override + public boolean stem() { + int v_1 = cursor; + r_prelude(); + cursor = v_1; + r_mark_regions(); + limit_backward = cursor; + cursor = limit; + int v_3 = limit - cursor; + r_step_0(); + cursor = limit - v_3; + int v_4 = limit - cursor; + r_standard_suffix(); + cursor = limit - v_4; + int v_5 = limit - cursor; + lab0: { + lab1: { + int v_6 = limit - cursor; + lab2: { + if (!(B_standard_suffix_removed)) { + break lab2; + } + break lab1; + } + cursor = limit - v_6; + if (!r_verb_suffix()) { + break lab0; + } + } + } + cursor = limit - v_5; + int v_7 = limit - cursor; + r_vowel_suffix(); + cursor = limit - v_7; + cursor = limit_backward; + int v_8 = cursor; + r_postlude(); + cursor = v_8; + return true; + } + + @Override + public boolean equals(Object o) { + return o instanceof LegacyRomanianStemmer; + } + + @Override + public int hashCode() { + return LegacyRomanianStemmer.class.getName().hashCode(); + } +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java index cf33a38abd634..6c28df83a6d36 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java @@ -9,28 +9,60 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopwordAnalyzerBase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.core.LowerCaseFilter; +import org.apache.lucene.analysis.core.StopFilter; +import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter; import org.apache.lucene.analysis.ro.RomanianAnalyzer; +import org.apache.lucene.analysis.snowball.SnowballFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.Analysis; -public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { +public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { - private final RomanianAnalyzer analyzer; + private final StopwordAnalyzerBase analyzer; RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(name, settings); - analyzer = new RomanianAnalyzer( - Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()), - Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET) - ); + CharArraySet stopwords = Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()); + CharArraySet stemExclusionSet = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET); + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) { + // since Lucene 10, this analyzer a modern unicode form and normalizes cedilla forms to forms with commas + analyzer = new RomanianAnalyzer(stopwords, stemExclusionSet); + } else { + // for older index versions we need the old behaviour without normalization + analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet())) { + + protected Analyzer.TokenStreamComponents createComponents(String fieldName) { + final Tokenizer source = new StandardTokenizer(); + TokenStream result = new LowerCaseFilter(source); + result = new StopFilter(result, stopwords); + if (stemExclusionSet.isEmpty() == false) { + result = new SetKeywordMarkerFilter(result, stemExclusionSet); + } + result = new SnowballFilter(result, new LegacyRomanianStemmer()); + return new TokenStreamComponents(source, result); + } + + protected TokenStream normalize(String fieldName, TokenStream in) { + return new LowerCaseFilter(in); + } + }; + + } } @Override - public RomanianAnalyzer get() { + public StopwordAnalyzerBase get() { return this.analyzer; } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java new file mode 100644 index 0000000000000..1af44bc71f35d --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.index.IndexVersionUtils; + +import java.io.IOException; + +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + +/** + * Verifies the behavior of Romanian analyzer. + */ +public class RomanianAnalyzerTests extends ESTokenStreamTestCase { + + public void testRomanianAnalyzerPostLucene10() throws IOException { + IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.UPGRADE_TO_LUCENE_10_0_0, + IndexVersion.current() + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = romanianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenț" }); + assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoștinț" }); + } + + public void testRomanianAnalyzerPreLucene10() throws IOException { + IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = romanianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenţ" }); + assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoştinţ" }); + } +} diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 43ed1755317f6..685d6a806edd8 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -1827,6 +1827,111 @@ public void testPersianAnalyzerBWC() throws Exception { } } + /** + * This test ensures that search results on old indices using "romanain" analyzer don't change + * after we introduce Lucene 10 + */ + public void testRomanianAnalyzerBWC() throws Exception { + var originalClusterLegacyRomanianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false; + assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyRomanianAnalyzer); + final String indexName = "test_romanian_stemmer"; + Settings idxSettings = indexSettings(1, 1).build(); + String cedillaForm = "absenţa"; + String commaForm = "absența"; + + String mapping = """ + { + "properties": { + "textfield" : { + "type": "text", + "analyzer": "romanian" + } + } + } + """; + + // query that uses the cedilla form of "t" + String query = """ + { + "query": { + "match": { + "textfield": "absenţa" + } + } + } + """; + + if (isRunningAgainstOldCluster()) { + createIndex(client(), indexName, idxSettings, mapping); + ensureGreen(indexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", cedillaForm) + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/2", + // this doc uses the comma form + (builder, params) -> builder.field("textfield", commaForm) + ) + ) + ); + refresh(indexName); + + assertNumHits(indexName, 2, 1); + + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + } else { + // old index should still only return one doc + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + + String newIndexName = indexName + "_new"; + createIndex(client(), newIndexName, idxSettings, mapping); + ensureGreen(newIndexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", cedillaForm) + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", commaForm) + ) + ) + ); + refresh(newIndexName); + + searchRequest = new Request("POST", "/" + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(2, entityAsMap(client().performRequest(searchRequest))); + + // searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index + searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); + } + } + /** * This test ensures that soft deletes are enabled a when upgrading a pre-8 cluster to 8.0+ */ From e12e96215451b277a83348d313a8503cfae32743 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 3 Oct 2024 00:43:45 +0200 Subject: [PATCH 387/417] Fix bad merge --- .../telemetry/apm/internal/tracing/APMTracer.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index e9e1a79586a1b..cb74d62137815 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -24,7 +24,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.Build; @@ -440,7 +439,7 @@ private static CharacterRunAutomaton buildAutomaton(List includePatterns ? includeAutomaton : Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CharacterRunAutomaton(MinimizationOperations.minimize(Operations.determinize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT))); + return new CharacterRunAutomaton(Operations.determinize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton patternsToAutomaton(List patterns) { From 1a4387d6940d9af7a44ee23836b9f2c52e4c1328 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 3 Oct 2024 01:09:29 +0200 Subject: [PATCH 388/417] minor changes to changelog for persian analyzer --- docs/changelog/113482.yaml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/changelog/113482.yaml b/docs/changelog/113482.yaml index f16f183ea05c6..cb5823f0ccfcc 100644 --- a/docs/changelog/113482.yaml +++ b/docs/changelog/113482.yaml @@ -8,20 +8,20 @@ breaking: title: The 'persian' analyzer has stemmer by default area: Analysis details: >- - Lucene 10 has added a final stemming step to its PersianAnalyzer that we - expose as 'persian' analyzer. Existing indices will keep the old + Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch + exposes as 'persian' analyzer. Existing indices will keep the old non-stemming behaviour while new indices will see the updated behaviour with added stemming. - Users that want to maintain the non-stemming behaviour need to define their - own analyzer as outlines in + Users that wish to maintain the non-stemming behaviour need to define their + own analyzer as outlined in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. - Users that want to use the new stemming behaviour for existing indices will + Users that wish to use the new stemming behaviour for existing indices will have to reindex their data. impact: >- - Indexing with the 'persian' analyzer will produce slightly different token. - Users should check if this impacts their search results. If they want to + Indexing with the 'persian' analyzer will produce slightly different tokens. + Users should check if this impacts their search results. If they wish to maintain the legacy non-stemming behaviour they can define their own - analyzer equivalent as explained in + analyzer equivalent as explained in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. notable: false From ae92b29f4450516e1686576626ad19cd063bef07 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 3 Oct 2024 06:14:12 +0000 Subject: [PATCH 389/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-f76fdb293e1 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index b5629adca6439..ca44490e867a3 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-4461bc1eff4 +lucene = 10.0.0-snapshot-f76fdb293e1 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a1f4cc106552a..3253e99ca1d31 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From aeabb36d90e18672193fb7f8338aac013cf09d82 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 4 Oct 2024 06:13:06 +0000 Subject: [PATCH 390/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-f76fdb293e1 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3253e99ca1d31..d6520533c5b3e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From e56d35616295df80570dc9d17d1a6cd218a40afc Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Fri, 4 Oct 2024 10:20:27 +0300 Subject: [PATCH 391/417] muting RankDocsQueryBuilderTests testRankDocsQueryEarlyTerminate --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 68422264221f2..d25ef4a2d0967 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -347,6 +347,9 @@ tests: - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5Small_withPlatformAgnosticVariant issue: https://github.com/elastic/elasticsearch/issues/113983 +- class: org.elasticsearch.search.retriever.rankdoc.RankDocsQueryBuilderTests + method: testRankDocsQueryEarlyTerminate + issue: https://github.com/elastic/elasticsearch/issues/114097 # Examples: # From ca1938c983d6cba7d845d70e9403706888fbca90 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 4 Oct 2024 12:30:36 +0200 Subject: [PATCH 392/417] Fix compilation issue --- .../org/elasticsearch/index/codec/Elasticsearch900Codec.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java index 50a917248cebd..4154a242c15ed 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java @@ -65,7 +65,7 @@ public Elasticsearch900Codec() { */ public Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode mode) { super("Elasticsearch900", new Lucene100Codec()); - this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); + this.storedFieldsFormat = mode.getFormat(); this.defaultPostingsFormat = new Lucene912PostingsFormat(); this.defaultDVFormat = new Lucene90DocValuesFormat(); this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); From e4de0a19386cdd627a3c0a84ccb85d63166fdf15 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 4 Oct 2024 15:41:43 +0200 Subject: [PATCH 393/417] Add changelog about the nori dictionary update (#114124) --- docs/changelog/114124.yaml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 docs/changelog/114124.yaml diff --git a/docs/changelog/114124.yaml b/docs/changelog/114124.yaml new file mode 100644 index 0000000000000..a7ad5d8edaa99 --- /dev/null +++ b/docs/changelog/114124.yaml @@ -0,0 +1,18 @@ +pr: 114124 +summary: The Korean dictionary for Nori has been updated +area: Analysis +type: breaking +issues: [] +breaking: + title: The Korean dictionary for Nori has been updated + area: Analysis + details: >- + Lucene 10 ships with an updated Korean dictionaryi (mecab-ko-dic-2.1.1). + For details see https://github.com/apache/lucene/issues/11452. Users + experiencing changes in search behaviour on existing data are advised to + reindex. + impact: >- + The change is small and should generally provide better analysis results. + Existing indices for full-text use cases should be reindex though. + notable: false + From 6861f7f2604a452cef8526e0c0526927ff607650 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 4 Oct 2024 15:51:44 +0200 Subject: [PATCH 394/417] fix typos --- docs/changelog/114124.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/changelog/114124.yaml b/docs/changelog/114124.yaml index a7ad5d8edaa99..c812c6a468902 100644 --- a/docs/changelog/114124.yaml +++ b/docs/changelog/114124.yaml @@ -7,12 +7,12 @@ breaking: title: The Korean dictionary for Nori has been updated area: Analysis details: >- - Lucene 10 ships with an updated Korean dictionaryi (mecab-ko-dic-2.1.1). + Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For details see https://github.com/apache/lucene/issues/11452. Users experiencing changes in search behaviour on existing data are advised to reindex. impact: >- The change is small and should generally provide better analysis results. - Existing indices for full-text use cases should be reindex though. + Existing indices for full-text use cases should be reindexed though. notable: false From add9a9e6b06238afad468b41ab0e4efb1832e232 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Fri, 4 Oct 2024 17:02:11 +0300 Subject: [PATCH 395/417] Updating RankDocRetrieverBuilderIT for lucene_snapshot branch (#114098) --- .../retriever/RankDocRetrieverBuilderIT.java | 90 +++++++++---------- 1 file changed, 45 insertions(+), 45 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java index 0179dd54fa9b2..ce748a34f0b94 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java @@ -36,6 +36,7 @@ import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; +import org.elasticsearch.search.sort.ShardDocSortField; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -190,8 +191,10 @@ public void testRankDocsRetrieverBasicWithPagination() { SearchSourceBuilder source = new SearchSourceBuilder(); StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.queryStringQuery("quick").defaultField(TEXT_FIELD)) - .boost(10L); + standard0.queryBuilder = QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); // this one retrieves docs 2 and 6 due to prefilter standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); @@ -206,8 +209,8 @@ public void testRankDocsRetrieverBasicWithPagination() { null ); // the compound retriever here produces a score for a doc based on the percentage of the queries that it was matched on and - // resolves ties based on actual score, rank, and then the doc (we're forcing 1 shard for consistent results) - // so ideal rank would be: 6, 2, 1, 4, 7, 3 and with pagination, we'd just omit the first result + // resolves ties based on actual score, and then the doc (we're forcing 1 shard for consistent results) + // so ideal rank would be: 6, 2, 1, 3, 4, 7 and with pagination, we'd just omit the first result source.retriever( new CompoundRetrieverWithRankDocs( rankWindowSize, @@ -228,9 +231,9 @@ public void testRankDocsRetrieverBasicWithPagination() { assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_4")); - assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_7")); - assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_3")); + assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_3")); + assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_4")); + assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_7")); }); } @@ -243,8 +246,10 @@ public void testRankDocsRetrieverWithAggs() { SearchSourceBuilder source = new SearchSourceBuilder(); StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.queryStringQuery("quick").defaultField(TEXT_FIELD)) - .boost(10L); + standard0.queryBuilder = QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); // this one retrieves docs 2 and 6 due to prefilter standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); @@ -268,13 +273,15 @@ public void testRankDocsRetrieverWithAggs() { ) ) ); + source.size(1); source.aggregation(new TermsAggregationBuilder("topic").field(TOPIC_FIELD)); SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertNotNull(resp.getAggregations()); assertNotNull(resp.getAggregations().get("topic")); @@ -292,8 +299,10 @@ public void testRankDocsRetrieverWithCollapse() { SearchSourceBuilder source = new SearchSourceBuilder(); StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.queryStringQuery("quick").defaultField(TEXT_FIELD)) - .boost(10L); + standard0.queryBuilder = QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); // this one retrieves docs 2 and 6 due to prefilter standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); @@ -308,8 +317,8 @@ public void testRankDocsRetrieverWithCollapse() { null ); // the compound retriever here produces a score for a doc based on the percentage of the queries that it was matched on and - // resolves ties based on actual score, rank, and then the doc (we're forcing 1 shard for consistent results) - // so ideal rank would be: 6, 2, 1, 4, 7, 3 + // resolves ties based on actual score, and then the doc (we're forcing 1 shard for consistent results) + // so ideal rank would be: 6, 2, 1, 3, 4, 7 // with collapsing on topic field we would have 6, 2, 1, 7 source.retriever( new CompoundRetrieverWithRankDocs( @@ -339,7 +348,6 @@ public void testRankDocsRetrieverWithCollapse() { assertThat(resp.getHits().getAt(1).field(TOPIC_FIELD).getValue().toString(), equalTo("astronomy")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); assertThat(resp.getHits().getAt(2).field(TOPIC_FIELD).getValue().toString(), equalTo("technology")); - assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getHits().length, equalTo(3)); assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(0).getId(), equalTo("doc_4")); assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(1).getId(), equalTo("doc_3")); assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(2).getId(), equalTo("doc_1")); @@ -348,17 +356,15 @@ public void testRankDocsRetrieverWithCollapse() { }); } - public void testRankDocsRetrieverWithCollapseAndAggs() { - // same as above, but we only want to bring back the top result from each subsearch - // so that would be 1, 2, and 7 - // and final rank would be (based on score): 2, 1, 7 - // aggs should still account for the same docs as the testRankDocsRetriever test, i.e. all but doc_5 + public void testRankDocsRetrieverWithNestedCollapseAndAggs() { final int rankWindowSize = 10; SearchSourceBuilder source = new SearchSourceBuilder(); StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); // this one retrieves docs 1 and 6 as doc_4 is collapsed to doc_1 - standard0.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.queryStringQuery("quick").defaultField(TEXT_FIELD)) - .boost(10L); + standard0.queryBuilder = QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); standard0.collapseBuilder = new CollapseBuilder(TOPIC_FIELD).setInnerHits( new InnerHitBuilder("a").addSort(new FieldSortBuilder(DOC_FIELD).order(SortOrder.DESC)).setSize(10) ); @@ -376,8 +382,8 @@ public void testRankDocsRetrieverWithCollapseAndAggs() { null ); // the compound retriever here produces a score for a doc based on the percentage of the queries that it was matched on and - // resolves ties based on actual score, rank, and then the doc (we're forcing 1 shard for consistent results) - // so ideal rank would be: 6, 2, 1, 4, 7, 3 + // resolves ties based on actual score, and then the doc (we're forcing 1 shard for consistent results) + // so ideal rank would be: 6, 2, 1, 3, 4, 7 source.retriever( new CompoundRetrieverWithRankDocs( rankWindowSize, @@ -393,7 +399,7 @@ public void testRankDocsRetrieverWithCollapseAndAggs() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertNotNull(resp.getAggregations()); @@ -427,8 +433,8 @@ public void testRankDocsRetrieverWithNestedQuery() { null ); // the compound retriever here produces a score for a doc based on the percentage of the queries that it was matched on and - // resolves ties based on actual score, rank, and then the doc (we're forcing 1 shard for consistent results) - // so ideal rank would be: 6, 2, 1, 4, 3, 7 + // resolves ties based on actual score, and then the doc (we're forcing 1 shard for consistent results) + // so ideal rank would be: 6, 2, 1, 3, 4, 7 source.retriever( new CompoundRetrieverWithRankDocs( rankWindowSize, @@ -460,8 +466,10 @@ public void testRankDocsRetrieverMultipleCompoundRetrievers() { SearchSourceBuilder source = new SearchSourceBuilder(); StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.queryStringQuery("quick").defaultField(TEXT_FIELD)) - .boost(10L); + standard0.queryBuilder = QueryBuilders.boolQuery() + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) + .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); // this one retrieves docs 2 and 6 due to prefilter standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); @@ -506,11 +514,11 @@ public void testRankDocsRetrieverMultipleCompoundRetrievers() { assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_4")); - assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); + assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_2")); - assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_7")); - assertThat(resp.getHits().getAt(5).getId(), equalTo("doc_3")); + assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_3")); + assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_6")); + assertThat(resp.getHits().getAt(5).getId(), equalTo("doc_7")); }); } @@ -545,9 +553,9 @@ public void testRankDocsRetrieverDifferentNestedSorting() { assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_4")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_7")); + assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_6")); - assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_2")); + assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_7")); }); } @@ -673,22 +681,14 @@ private RankDoc[] getRankDocs(SearchResponse searchResponse) { for (int i = 0; i < size; i++) { var hit = searchResponse.getHits().getAt(i); long sortValue = (long) hit.getRawSortValues()[hit.getRawSortValues().length - 1]; - int doc = decodeDoc(sortValue); - int shardRequestIndex = decodeShardRequestIndex(sortValue); + int doc = ShardDocSortField.decodeDoc(sortValue); + int shardRequestIndex = ShardDocSortField.decodeShardRequestIndex(sortValue); docs[i] = new RankDoc(doc, hit.getScore(), shardRequestIndex); docs[i].rank = i + 1; } return docs; } - public static int decodeDoc(long value) { - return (int) value; - } - - public static int decodeShardRequestIndex(long value) { - return (int) (value >> 32); - } - record RankDocAndHitRatio(RankDoc rankDoc, float hitRatio) {} /** From dba248eb7e198c74dd5d2dbc715997310f3a0d2b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 4 Oct 2024 15:40:05 +0000 Subject: [PATCH 396/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-a4c0f741ccc --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ca44490e867a3..dc0c2af9edb8d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-f76fdb293e1 +lucene = 10.0.0-snapshot-a4c0f741ccc bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d6520533c5b3e..23b1f7573f55b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From adfd3777c012f97f1834c3d906847020a5369d4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 4 Oct 2024 20:29:33 +0200 Subject: [PATCH 397/417] Add Snowball upgrade changelog (#114146) --- docs/changelog/114146.yaml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 docs/changelog/114146.yaml diff --git a/docs/changelog/114146.yaml b/docs/changelog/114146.yaml new file mode 100644 index 0000000000000..be2096a64105c --- /dev/null +++ b/docs/changelog/114146.yaml @@ -0,0 +1,20 @@ +pr: 114146 +summary: Snowball stemmers have been upgraded +area: Analysis +type: breaking +issues: [] +breaking: + title: Snowball stemmers have been upgraded + area: Analysis + details: >- + Lucene 10 ships with an upgrade of its Snowball stemmers. + For details see https://github.com/apache/lucene/issues/13209. Users using + Snowball stemmers that are experiencing changes in search behaviour on + existing data are advised to reindex. + impact: >- + The upgrade should generally provide improved stemming results. Small changes + in token analysis can lead to mismatches with previously index data, so + existing indices using Snowball stemmers as part of their analysis chain + should be reindexed. + notable: false + From 1df381a2a558f8ef7c43024ea3dc17bda0c12f06 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 5 Oct 2024 06:12:14 +0000 Subject: [PATCH 398/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-a4c0f741ccc --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 23b1f7573f55b..131689519fab1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 4d4e435810a8d32abee3235ab19f074cb22b4cb6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 6 Oct 2024 06:12:53 +0000 Subject: [PATCH 399/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-a4c0f741ccc --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0f304b6835746..38e08b165e28c 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From d0951f613f5ec289ff3d760319aa023bb0b740ef Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 7 Oct 2024 06:13:03 +0000 Subject: [PATCH 400/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-a4c0f741ccc --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 38e08b165e28c..2729f247bcbd9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 64e66e8310301f11d8bda1c948c578c5f80eccb3 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Mon, 7 Oct 2024 11:30:04 +0300 Subject: [PATCH 401/417] muting RankDocsQueryBuilderTests testRankDocsQueryEarlyTerminate (#114198) --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 992996e811853..63f6a94ac3df7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -369,6 +369,9 @@ tests: - class: org.elasticsearch.aggregations.AggregationsClientYamlTestSuiteIT method: test {yaml=aggregations/stats_metric_fail_formatting/fail formatting} issue: https://github.com/elastic/elasticsearch/issues/114187 +- class: org.elasticsearch.search.retriever.rankdoc.RankDocsQueryBuilderTests + method: testRankDocsQueryEarlyTerminate + issue: https://github.com/elastic/elasticsearch/issues/114097 # Examples: # From e7a8877556629ed662dc5da6d4e8e941b0625d78 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 7 Oct 2024 13:53:49 +0200 Subject: [PATCH 402/417] Delete full cluster restart test removed in main This was restored due to a bad merge. --- .../upgrades/FullClusterRestartIT.java | 63 +------------------ 1 file changed, 1 insertion(+), 62 deletions(-) diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 85ad35e533dda..a38f47ddefb45 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -1899,68 +1899,7 @@ public void testRomanianAnalyzerBWC() throws Exception { assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); } } - - /** - * This test ensures that soft deletes are enabled a when upgrading a pre-8 cluster to 8.0+ - */ - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // This test can be removed in v9 - public void testEnableSoftDeletesOnRestore() throws Exception { - var originalClusterDidNotEnforceSoftDeletes = oldClusterHasFeature(RestTestLegacyFeatures.SOFT_DELETES_ENFORCED) == false; - - assumeTrue("soft deletes must be enabled on 8.0+", originalClusterDidNotEnforceSoftDeletes); - final String snapshot = "snapshot-" + index; - if (isRunningAgainstOldCluster()) { - final Settings.Builder settings = indexSettings(1, 1); - settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false); - createIndex(index, settings.build()); - ensureGreen(index); - int numDocs = randomIntBetween(0, 100); - indexRandomDocuments( - numDocs, - true, - true, - randomBoolean(), - i -> jsonBuilder().startObject().field("field", "value").endObject() - ); - // create repo - client().performRequest(newXContentRequest(HttpMethod.PUT, "/_snapshot/repo", (repoConfig, params) -> { - repoConfig.field("type", "fs"); - repoConfig.startObject("settings"); - repoConfig.field("compress", randomBoolean()); - repoConfig.field("location", repoDirectory.getRoot().getPath()); - repoConfig.endObject(); - return repoConfig; - })); - // create snapshot - Request createSnapshot = newXContentRequest( - HttpMethod.PUT, - "/_snapshot/repo/" + snapshot, - (builder, params) -> builder.field("indices", index) - ); - createSnapshot.addParameter("wait_for_completion", "true"); - client().performRequest(createSnapshot); - } else { - String restoredIndex = "restored-" + index; - // Restore - Request restoreRequest = newXContentRequest( - HttpMethod.POST, - "/_snapshot/repo/" + snapshot + "/_restore", - (restoreCommand, params) -> { - restoreCommand.field("indices", index); - restoreCommand.field("rename_pattern", index); - restoreCommand.field("rename_replacement", restoredIndex); - restoreCommand.startObject("index_settings").field("index.soft_deletes.enabled", true).endObject(); - return restoreCommand; - } - ); - restoreRequest.addParameter("wait_for_completion", "true"); - client().performRequest(restoreRequest); - ensureGreen(restoredIndex); - int numDocs = countOfIndexedRandomDocuments(); - assertTotalHits(numDocs, entityAsMap(client().performRequest(new Request("GET", "/" + restoredIndex + "/_search")))); - } - } - + public void testForbidDisableSoftDeletesOnRestore() throws Exception { final String snapshot = "snapshot-" + index; if (isRunningAgainstOldCluster()) { From 679914719a482de4c8a478d97cec36f025450d3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 7 Oct 2024 17:09:32 +0200 Subject: [PATCH 403/417] Remove smoke test section in MetadataCreateIndexServiceTests --- .../cluster/metadata/MetadataCreateIndexServiceTests.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 94f4e3a9f6a5d..05382de49087d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -10,8 +10,6 @@ package org.elasticsearch.cluster.metadata; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceAlreadyExistsException; @@ -607,12 +605,6 @@ public void testValidateDotIndex() { List systemIndexDescriptors = new ArrayList<>(); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-one*", "test")); Automaton patternAutomaton = new RegExp("\\.test-~(one.*)", RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); - assertTrue( - new CharacterRunAutomaton(Operations.determinize(patternAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)).run( - ".test-~(one.*)" - ) - ); - // TODO remove this smoke test ^^^ once the issue is fixed systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-~(one*)", "test")); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".pattern-test*", "test-1")); From cd46af1f9405f75c32a2e606f3c049472ac33384 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 7 Oct 2024 17:55:41 +0200 Subject: [PATCH 404/417] spotless --- .../java/org/elasticsearch/upgrades/FullClusterRestartIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index a38f47ddefb45..73f291da15ead 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -1899,7 +1899,7 @@ public void testRomanianAnalyzerBWC() throws Exception { assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); } } - + public void testForbidDisableSoftDeletesOnRestore() throws Exception { final String snapshot = "snapshot-" + index; if (isRunningAgainstOldCluster()) { From f8fac7a1e3169077e50f1187f8f15c06e17b1482 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 7 Oct 2024 20:31:15 +0200 Subject: [PATCH 405/417] Address EngineTestCase compile errors --- .../org/elasticsearch/index/engine/EngineTestCase.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 0b5803e9887d6..4713adf6cf01d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -178,7 +178,7 @@ protected static void assertVisibleCount(Engine engine, int numDocs, boolean ref engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(numDocs)); } } @@ -970,7 +970,7 @@ protected static void assertVisibleCount(InternalEngine engine, int numDocs, boo engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(numDocs)); } } @@ -1168,7 +1168,10 @@ public static void assertOpsOnReplica( assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } From 7987e8cfa5a85e060013d916f59ff8014794d97b Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 7 Oct 2024 21:01:02 +0200 Subject: [PATCH 406/417] Address more compile errors around TotalHitCountCollectorManager constructor --- .../index/engine/InternalEngineTests.java | 25 +++++++++++++------ .../search/SearchCancellationTests.java | 9 ++++--- ...ityIndexReaderWrapperIntegrationTests.java | 2 +- 3 files changed, 24 insertions(+), 12 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 50a5d730622ba..21aefd893de70 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -640,7 +640,7 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException { recoverFromTranslog(recoveringEngine, translogHandler, Long.MAX_VALUE); recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(operations.get(operations.size() - 1) instanceof Engine.Delete ? 0 : 1)); } } @@ -2010,7 +2010,7 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValueDoc1)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2019,7 +2019,7 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValueDoc2)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2249,7 +2249,7 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValue)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2275,7 +2275,10 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion assertVisibleCount(engine, docDeleted ? 0 : 1); if (docDeleted == false) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2361,7 +2364,10 @@ public void testNonInternalVersioningOnPrimary() throws IOException { if (docDeleted == false) { logger.info("searching for [{}]", lastFieldValue); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2378,7 +2384,7 @@ public void testVersioningPromotedReplica() throws IOException { final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine); final long currentSeqNo = getSequenceID(replicaEngine, new Engine.Get(false, false, Term.toString(lastReplicaOp.uid()))).v1(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); if (totalHits > 0) { // last op wasn't delete assertThat(currentSeqNo, equalTo(finalReplicaSeqNo + opsOnPrimary)); @@ -2402,7 +2408,10 @@ public void testConcurrentExternalVersioningOnPrimary() throws IOException, Inte assertVisibleCount(engine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index db91e01827d57..aa2e76f512cc8 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -106,14 +106,17 @@ public void testCancellableCollector() throws IOException { true ); - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(reader.numDocs())); searcher.addQueryCancellation(cancellation); - expectThrows(TaskCancelledException.class, () -> searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager())); + expectThrows( + TaskCancelledException.class, + () -> searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())) + ); searcher.removeQueryCancellation(cancellation); - Integer totalHits2 = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits2 = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits2, equalTo(reader.numDocs())); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index df64c4f87410a..4751f66cf548e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -193,7 +193,7 @@ protected IndicesAccessControl getIndicesAccessControl() { int expectedHitCount = valuesHitCount[i]; logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount); - Integer totalHits = indexSearcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = indexSearcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(indexSearcher.getSlices())); assertThat(totalHits, equalTo(expectedHitCount)); assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount)); } From 43b979728b7831c7d856a58718143e0ea965e40d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 8 Oct 2024 06:12:41 +0000 Subject: [PATCH 407/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-a4c0f741ccc --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2729f247bcbd9..23c624eea46ae 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 84fdf5a479e2fcc6845772eca0665ad05c8b0935 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 9 Oct 2024 06:14:27 +0000 Subject: [PATCH 408/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-a4c0f741ccc --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 23c624eea46ae..b6c3001f76029 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From fc31076d096fa5f474ef117a2cf9f637f29e774e Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Tue, 8 Oct 2024 16:38:36 -0400 Subject: [PATCH 409/417] Fix bbq for Lucene 10 --- .../vectors/BinarizedByteVectorValues.java | 51 +++++-- .../vectors/ES816BinaryFlatVectorsScorer.java | 22 +-- .../ES816BinaryQuantizedVectorsReader.java | 38 ++--- .../ES816BinaryQuantizedVectorsWriter.java | 143 ++++++++++-------- .../vectors/OffHeapBinarizedVectorValues.java | 100 +++--------- ...RandomAccessBinarizedByteVectorValues.java | 70 --------- .../ES816BinaryFlatVectorsScorerTests.java | 43 +++++- ...S816BinaryQuantizedVectorsFormatTests.java | 28 ++-- ...HnswBinaryQuantizedVectorsFormatTests.java | 12 +- 9 files changed, 237 insertions(+), 270 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java index 73dd4273a794e..1929c36def141 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java @@ -19,23 +19,50 @@ */ package org.elasticsearch.index.codec.vectors; -import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.search.VectorScorer; +import org.apache.lucene.util.VectorUtil; import java.io.IOException; /** * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 */ -public abstract class BinarizedByteVectorValues extends DocIdSetIterator { +public abstract class BinarizedByteVectorValues extends ByteVectorValues { - public abstract float[] getCorrectiveTerms(); - - public abstract byte[] vectorValue() throws IOException; + public abstract float[] getCorrectiveTerms(int vectorOrd) throws IOException; /** Return the dimension of the vectors */ public abstract int dimension(); + /** Returns the centroid distance for the vector */ + public abstract float getCentroidDistance(int vectorOrd) throws IOException; + + /** Returns the vector magnitude for the vector */ + public abstract float getVectorMagnitude(int vectorOrd) throws IOException; + + /** Returns OOQ corrective factor for the given vector ordinal */ + public abstract float getOOQ(int targetOrd) throws IOException; + + /** + * Returns the norm of the target vector w the centroid corrective factor for the given vector + * ordinal + */ + public abstract float getNormOC(int targetOrd) throws IOException; + + /** + * Returns the target vector dot product the centroid corrective factor for the given vector + * ordinal + */ + public abstract float getODotC(int targetOrd) throws IOException; + + /** + * @return the quantizer used to quantize the vectors + */ + public abstract BinaryQuantizer getQuantizer(); + + public abstract float[] getCentroid() throws IOException; + /** * Return the number of vectors for this field. * @@ -43,11 +70,6 @@ public abstract class BinarizedByteVectorValues extends DocIdSetIterator { */ public abstract int size(); - @Override - public final long cost() { - return size(); - } - /** * Return a {@link VectorScorer} for the given query vector. * @@ -55,4 +77,13 @@ public final long cost() { * @return a {@link VectorScorer} instance or null */ public abstract VectorScorer scorer(float[] query) throws IOException; + + @Override + public abstract BinarizedByteVectorValues copy() throws IOException; + + float getCentroidDP() throws IOException { + // this only gets executed on-merge + float[] centroid = getCentroid(); + return VectorUtil.dotProduct(centroid, centroid); + } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java index 78fa282709098..cd360c6d6ff20 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java @@ -20,10 +20,10 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.elasticsearch.simdvec.ESVectorUtil; @@ -45,9 +45,9 @@ public ES816BinaryFlatVectorsScorer(FlatVectorsScorer nonQuantizedDelegate) { @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues + KnnVectorValues vectorValues ) throws IOException { - if (vectorValues instanceof RandomAccessBinarizedByteVectorValues) { + if (vectorValues instanceof BinarizedByteVectorValues) { throw new UnsupportedOperationException( "getRandomVectorScorerSupplier(VectorSimilarityFunction,RandomAccessVectorValues) not implemented for binarized format" ); @@ -58,10 +58,10 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues, + KnnVectorValues vectorValues, float[] target ) throws IOException { - if (vectorValues instanceof RandomAccessBinarizedByteVectorValues binarizedVectors) { + if (vectorValues instanceof BinarizedByteVectorValues binarizedVectors) { BinaryQuantizer quantizer = binarizedVectors.getQuantizer(); float[] centroid = binarizedVectors.getCentroid(); // FIXME: precompute this once? @@ -82,7 +82,7 @@ public RandomVectorScorer getRandomVectorScorer( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues, + KnnVectorValues vectorValues, byte[] target ) throws IOException { return nonQuantizedDelegate.getRandomVectorScorer(similarityFunction, vectorValues, target); @@ -91,7 +91,7 @@ public RandomVectorScorer getRandomVectorScorer( RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction similarityFunction, ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues scoringVectors, - RandomAccessBinarizedByteVectorValues targetVectors + BinarizedByteVectorValues targetVectors ) { return new BinarizedRandomVectorScorerSupplier(scoringVectors, targetVectors, similarityFunction); } @@ -104,12 +104,12 @@ public String toString() { /** Vector scorer supplier over binarized vector values */ static class BinarizedRandomVectorScorerSupplier implements RandomVectorScorerSupplier { private final ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors; - private final RandomAccessBinarizedByteVectorValues targetVectors; + private final BinarizedByteVectorValues targetVectors; private final VectorSimilarityFunction similarityFunction; BinarizedRandomVectorScorerSupplier( ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors, - RandomAccessBinarizedByteVectorValues targetVectors, + BinarizedByteVectorValues targetVectors, VectorSimilarityFunction similarityFunction ) { this.queryVectors = queryVectors; @@ -149,14 +149,14 @@ public record BinaryQueryVector(byte[] vector, BinaryQuantizer.QueryFactors fact /** Vector scorer over binarized vector values */ public static class BinarizedRandomVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final BinaryQueryVector queryVector; - private final RandomAccessBinarizedByteVectorValues targetVectors; + private final BinarizedByteVectorValues targetVectors; private final VectorSimilarityFunction similarityFunction; private final float sqrtDimensions; public BinarizedRandomVectorScorer( BinaryQueryVector queryVectors, - RandomAccessBinarizedByteVectorValues targetVectors, + BinarizedByteVectorValues targetVectors, VectorSimilarityFunction similarityFunction ) { super(targetVectors); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java index b0378fee6793d..21c4a5c449387 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java @@ -36,6 +36,7 @@ import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.Bits; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.RamUsageEstimator; @@ -78,7 +79,7 @@ public ES816BinaryQuantizedVectorsReader( ES816BinaryQuantizedVectorsFormat.META_EXTENSION ); boolean success = false; - try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName, state.context)) { + try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName)) { Throwable priorE = null; try { versionMeta = CodecUtil.checkIndexHeader( @@ -102,7 +103,7 @@ public ES816BinaryQuantizedVectorsReader( ES816BinaryQuantizedVectorsFormat.VECTOR_DATA_CODEC_NAME, // Quantized vectors are accessed randomly from their node ID stored in the HNSW // graph. - state.context.withRandomAccess() + state.context.withReadAdvice(ReadAdvice.RANDOM) ); success = true; } finally { @@ -357,9 +358,9 @@ static FieldEntry create(IndexInput input, VectorEncoding vectorEncoding, Vector /** Binarized vector values holding row and quantized vector values */ protected static final class BinarizedVectorValues extends FloatVectorValues { private final FloatVectorValues rawVectorValues; - private final OffHeapBinarizedVectorValues quantizedVectorValues; + private final BinarizedByteVectorValues quantizedVectorValues; - BinarizedVectorValues(FloatVectorValues rawVectorValues, OffHeapBinarizedVectorValues quantizedVectorValues) { + BinarizedVectorValues(FloatVectorValues rawVectorValues, BinarizedByteVectorValues quantizedVectorValues) { this.rawVectorValues = rawVectorValues; this.quantizedVectorValues = quantizedVectorValues; } @@ -375,29 +376,28 @@ public int size() { } @Override - public float[] vectorValue() throws IOException { - return rawVectorValues.vectorValue(); + public float[] vectorValue(int ord) throws IOException { + return rawVectorValues.vectorValue(ord); } @Override - public int docID() { - return rawVectorValues.docID(); + public BinarizedVectorValues copy() throws IOException { + return new BinarizedVectorValues(rawVectorValues.copy(), quantizedVectorValues.copy()); } @Override - public int nextDoc() throws IOException { - int rawDocId = rawVectorValues.nextDoc(); - int quantizedDocId = quantizedVectorValues.nextDoc(); - assert rawDocId == quantizedDocId; - return quantizedDocId; + public Bits getAcceptOrds(Bits acceptDocs) { + return rawVectorValues.getAcceptOrds(acceptDocs); } @Override - public int advance(int target) throws IOException { - int rawDocId = rawVectorValues.advance(target); - int quantizedDocId = quantizedVectorValues.advance(target); - assert rawDocId == quantizedDocId; - return quantizedDocId; + public int ordToDoc(int ord) { + return rawVectorValues.ordToDoc(ord); + } + + @Override + public DocIndexIterator iterator() { + return rawVectorValues.iterator(); } @Override @@ -405,7 +405,7 @@ public VectorScorer scorer(float[] query) throws IOException { return quantizedVectorValues.scorer(query); } - protected OffHeapBinarizedVectorValues getQuantizedVectorValues() throws IOException { + protected BinarizedByteVectorValues getQuantizedVectorValues() throws IOException { return quantizedVectorValues; } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java index 92837a8ffce45..a7774b850b64c 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; @@ -44,7 +45,6 @@ import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.VectorUtil; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.elasticsearch.core.SuppressForbidden; @@ -354,10 +354,11 @@ static DocsWithFieldSet writeBinarizedVectorAndQueryData( int queryCorrectionCount = binaryQuantizer.getSimilarity() != EUCLIDEAN ? 5 : 3; final ByteBuffer queryCorrectionsBuffer = ByteBuffer.allocate(Float.BYTES * queryCorrectionCount + Short.BYTES) .order(ByteOrder.LITTLE_ENDIAN); - for (int docV = floatVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = floatVectorValues.nextDoc()) { + KnnVectorValues.DocIndexIterator iterator = floatVectorValues.iterator(); + for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write index vector BinaryQuantizer.QueryAndIndexResults r = binaryQuantizer.quantizeQueryAndIndex( - floatVectorValues.vectorValue(), + floatVectorValues.vectorValue(iterator.index()), toIndex, toQuery, centroid @@ -393,11 +394,12 @@ static DocsWithFieldSet writeBinarizedVectorAndQueryData( static DocsWithFieldSet writeBinarizedVectorData(IndexOutput output, BinarizedByteVectorValues binarizedByteVectorValues) throws IOException { DocsWithFieldSet docsWithField = new DocsWithFieldSet(); - for (int docV = binarizedByteVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = binarizedByteVectorValues.nextDoc()) { + KnnVectorValues.DocIndexIterator iterator = binarizedByteVectorValues.iterator(); + for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write vector - byte[] binaryValue = binarizedByteVectorValues.vectorValue(); + byte[] binaryValue = binarizedByteVectorValues.vectorValue(iterator.index()); output.writeBytes(binaryValue, binaryValue.length); - float[] corrections = binarizedByteVectorValues.getCorrectiveTerms(); + float[] corrections = binarizedByteVectorValues.getCorrectiveTerms(iterator.index()); for (int i = 0; i < corrections.length; i++) { output.writeInt(Float.floatToIntBits(corrections[i])); } @@ -598,8 +600,9 @@ static int calculateCentroid(MergeState mergeState, FieldInfo fieldInfo, float[] if (vectorValues == null) { continue; } - for (int doc = vectorValues.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = vectorValues.nextDoc()) { - float[] vector = vectorValues.vectorValue(); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { + float[] vector = vectorValues.vectorValue(iterator.index()); // TODO Panama sum for (int j = 0; j < vector.length; j++) { centroid[j] += vector[j]; @@ -827,23 +830,31 @@ static class BinarizedFloatVectorValues extends BinarizedByteVectorValues { private final float[] centroid; private final FloatVectorValues values; private final BinaryQuantizer quantizer; - private int lastDoc; + private int lastOrd = -1; BinarizedFloatVectorValues(FloatVectorValues delegate, BinaryQuantizer quantizer, float[] centroid) { this.values = delegate; this.quantizer = quantizer; this.binarized = new byte[BQVectorUtils.discretize(delegate.dimension(), 64) / 8]; this.centroid = centroid; - lastDoc = -1; } @Override - public float[] getCorrectiveTerms() { + public float[] getCorrectiveTerms(int ord) { + if (ord != lastOrd) { + throw new IllegalStateException( + "attempt to retrieve corrective terms for different ord " + ord + " than the quantization was done for: " + lastOrd + ); + } return corrections; } @Override - public byte[] vectorValue() throws IOException { + public byte[] vectorValue(int ord) throws IOException { + if (ord != lastOrd) { + binarize(ord); + lastOrd = ord; + } return binarized; } @@ -853,33 +864,43 @@ public int dimension() { } @Override - public int size() { - return values.size(); + public float getCentroidDistance(int vectorOrd) throws IOException { + throw new UnsupportedOperationException(); } @Override - public int docID() { - return values.docID(); + public float getVectorMagnitude(int vectorOrd) throws IOException { + throw new UnsupportedOperationException(); } @Override - public int nextDoc() throws IOException { - int doc = values.nextDoc(); - if (doc != NO_MORE_DOCS) { - binarize(); - } - lastDoc = doc; - return doc; + public float getOOQ(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); } @Override - public int advance(int target) throws IOException { - int doc = values.advance(target); - if (doc != NO_MORE_DOCS) { - binarize(); - } - lastDoc = doc; - return doc; + public float getNormOC(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public float getODotC(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public BinaryQuantizer getQuantizer() { + throw new UnsupportedOperationException(); + } + + @Override + public float[] getCentroid() throws IOException { + return centroid; + } + + @Override + public int size() { + return values.size(); } @Override @@ -887,22 +908,32 @@ public VectorScorer scorer(float[] target) throws IOException { throw new UnsupportedOperationException(); } - private void binarize() throws IOException { - if (lastDoc == docID()) return; - corrections = quantizer.quantizeForIndex(values.vectorValue(), binarized, centroid); + @Override + public BinarizedByteVectorValues copy() throws IOException { + return new BinarizedFloatVectorValues(values.copy(), quantizer, centroid); + } + + private void binarize(int ord) throws IOException { + corrections = quantizer.quantizeForIndex(values.vectorValue(ord), binarized, centroid); + } + + @Override + public DocIndexIterator iterator() { + return values.iterator(); + } + + @Override + public int ordToDoc(int ord) { + return values.ordToDoc(ord); } } static class BinarizedCloseableRandomVectorScorerSupplier implements CloseableRandomVectorScorerSupplier { private final RandomVectorScorerSupplier supplier; - private final RandomAccessVectorValues vectorValues; + private final KnnVectorValues vectorValues; private final Closeable onClose; - BinarizedCloseableRandomVectorScorerSupplier( - RandomVectorScorerSupplier supplier, - RandomAccessVectorValues vectorValues, - Closeable onClose - ) { + BinarizedCloseableRandomVectorScorerSupplier(RandomVectorScorerSupplier supplier, KnnVectorValues vectorValues, Closeable onClose) { this.supplier = supplier; this.onClose = onClose; this.vectorValues = vectorValues; @@ -932,7 +963,6 @@ public int totalVectorCount() { static final class NormalizedFloatVectorValues extends FloatVectorValues { private final FloatVectorValues values; private final float[] normalizedVector; - int curDoc = -1; NormalizedFloatVectorValues(FloatVectorValues values) { this.values = values; @@ -950,38 +980,25 @@ public int size() { } @Override - public float[] vectorValue() { - return normalizedVector; + public int ordToDoc(int ord) { + return values.ordToDoc(ord); } @Override - public VectorScorer scorer(float[] query) { - throw new UnsupportedOperationException(); - } - - @Override - public int docID() { - return values.docID(); + public float[] vectorValue(int ord) throws IOException { + System.arraycopy(values.vectorValue(ord), 0, normalizedVector, 0, normalizedVector.length); + VectorUtil.l2normalize(normalizedVector); + return normalizedVector; } @Override - public int nextDoc() throws IOException { - curDoc = values.nextDoc(); - if (curDoc != NO_MORE_DOCS) { - System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); - VectorUtil.l2normalize(normalizedVector); - } - return curDoc; + public DocIndexIterator iterator() { + return values.iterator(); } @Override - public int advance(int target) throws IOException { - curDoc = values.advance(target); - if (curDoc != NO_MORE_DOCS) { - System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); - VectorUtil.l2normalize(normalizedVector); - } - return curDoc; + public NormalizedFloatVectorValues copy() throws IOException { + return new NormalizedFloatVectorValues(values.copy()); } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java index 2a3c3aca60e54..72934750305ce 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java @@ -36,7 +36,7 @@ import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; /** Binarized vector values loaded from off-heap */ -public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues implements RandomAccessBinarizedByteVectorValues { +public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues { protected final int dimension; protected final int size; @@ -109,7 +109,12 @@ public float getCentroidDP() { } @Override - public float[] getCorrectiveTerms() { + public float[] getCorrectiveTerms(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues; + } + slice.seek(((long) targetOrd * byteSize) + numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); return correctiveValues; } @@ -173,11 +178,6 @@ public float[] getCentroid() { return centroid; } - @Override - public IndexInput getSlice() { - return slice; - } - @Override public int getVectorByteLength() { return numBytes; @@ -230,8 +230,6 @@ public static OffHeapBinarizedVectorValues load( /** Dense off-heap binarized vector values */ public static class DenseOffHeapVectorValues extends OffHeapBinarizedVectorValues { - private int doc = -1; - public DenseOffHeapVectorValues( int dimension, int size, @@ -245,30 +243,6 @@ public DenseOffHeapVectorValues( super(dimension, size, centroid, centroidDp, binaryQuantizer, similarityFunction, vectorsScorer, slice); } - @Override - public byte[] vectorValue() throws IOException { - return vectorValue(doc); - } - - @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() { - return advance(doc + 1); - } - - @Override - public int advance(int target) { - assert docID() < target; - if (target >= size) { - return doc = NO_MORE_DOCS; - } - return doc = target; - } - @Override public DenseOffHeapVectorValues copy() throws IOException { return new DenseOffHeapVectorValues( @@ -291,19 +265,25 @@ public Bits getAcceptOrds(Bits acceptDocs) { @Override public VectorScorer scorer(float[] target) throws IOException { DenseOffHeapVectorValues copy = copy(); + DocIndexIterator iterator = copy.iterator(); RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); return new VectorScorer() { @Override public float score() throws IOException { - return scorer.score(copy.doc); + return scorer.score(iterator.index()); } @Override public DocIdSetIterator iterator() { - return copy; + return iterator; } }; } + + @Override + public DocIndexIterator iterator() { + return createDenseIterator(); + } } /** Sparse off-heap binarized vector values */ @@ -333,27 +313,6 @@ private static class SparseOffHeapVectorValues extends OffHeapBinarizedVectorVal this.disi = configuration.getIndexedDISI(dataIn); } - @Override - public byte[] vectorValue() throws IOException { - return vectorValue(disi.index()); - } - - @Override - public int docID() { - return disi.docID(); - } - - @Override - public int nextDoc() throws IOException { - return disi.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - assert docID() < target; - return disi.advance(target); - } - @Override public SparseOffHeapVectorValues copy() throws IOException { return new SparseOffHeapVectorValues( @@ -393,19 +352,25 @@ public int length() { }; } + @Override + public DocIndexIterator iterator() { + return IndexedDISI.asDocIndexIterator(disi); + } + @Override public VectorScorer scorer(float[] target) throws IOException { SparseOffHeapVectorValues copy = copy(); + DocIndexIterator iterator = copy.iterator(); RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); return new VectorScorer() { @Override public float score() throws IOException { - return scorer.score(copy.disi.index()); + return scorer.score(iterator.index()); } @Override public DocIdSetIterator iterator() { - return copy; + return iterator; } }; } @@ -419,23 +384,8 @@ private static class EmptyOffHeapVectorValues extends OffHeapBinarizedVectorValu } @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() { - return advance(doc + 1); - } - - @Override - public int advance(int target) { - return doc = NO_MORE_DOCS; - } - - @Override - public byte[] vectorValue() { - throw new UnsupportedOperationException(); + public DocIndexIterator iterator() { + return createDenseIterator(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java deleted file mode 100644 index 2417353373ba5..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * @notice - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * Modifications copyright (C) 2024 Elasticsearch B.V. - */ -package org.elasticsearch.index.codec.vectors; - -import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; - -import java.io.IOException; - -/** - * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 - */ -public interface RandomAccessBinarizedByteVectorValues extends RandomAccessVectorValues.Bytes { - /** Returns the centroid distance for the vector */ - float getCentroidDistance(int vectorOrd) throws IOException; - - /** Returns the vector magnitude for the vector */ - float getVectorMagnitude(int vectorOrd) throws IOException; - - /** Returns OOQ corrective factor for the given vector ordinal */ - float getOOQ(int targetOrd) throws IOException; - - /** - * Returns the norm of the target vector w the centroid corrective factor for the given vector - * ordinal - */ - float getNormOC(int targetOrd) throws IOException; - - /** - * Returns the target vector dot product the centroid corrective factor for the given vector - * ordinal - */ - float getODotC(int targetOrd) throws IOException; - - /** - * @return the quantizer used to quantize the vectors - */ - BinaryQuantizer getQuantizer(); - - /** - * @return coarse grained centroids for the vectors - */ - float[] getCentroid() throws IOException; - - @Override - RandomAccessBinarizedByteVectorValues copy() throws IOException; - - default float getCentroidDP() throws IOException { - // this only gets executed on-merge - float[] centroid = getCentroid(); - return VectorUtil.dotProduct(centroid, centroid); - } -} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java index 4ac66a9f63a3f..dc41a58fa2b8a 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.VectorScorer; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.logging.LogConfigurator; @@ -61,7 +62,7 @@ public void testScore() throws IOException { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) throws IOException { return random().nextFloat(0f, 1000f); @@ -99,7 +100,7 @@ public float[] getCentroid() throws IOException { } @Override - public RandomAccessBinarizedByteVectorValues copy() throws IOException { + public BinarizedByteVectorValues copy() throws IOException { return null; } @@ -115,6 +116,16 @@ public int size() { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; @@ -209,7 +220,7 @@ public void testScoreEuclidean() throws IOException { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, 0f, 0f) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) { return 355.78073f; @@ -375,7 +386,7 @@ public float[] getCentroid() { } @Override - public RandomAccessBinarizedByteVectorValues copy() { + public BinarizedByteVectorValues copy() { return null; } @@ -389,6 +400,16 @@ public int size() { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; @@ -806,7 +827,7 @@ public void testScoreMIP() throws IOException { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) { return 0f; @@ -1617,7 +1638,7 @@ public float[] getCentroid() { } @Override - public RandomAccessBinarizedByteVectorValues copy() { + public BinarizedByteVectorValues copy() { return null; } @@ -1727,6 +1748,16 @@ public int size() { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java index 0892436891ff1..42f2fbb383ac9 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.DirectoryReader; @@ -30,6 +30,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.IndexSearcher; @@ -58,7 +59,7 @@ public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816BinaryQuantizedVectorsFormat(); @@ -90,8 +91,8 @@ public void testSearch() throws Exception { float[] queryVector = randomVector(dims); Query q = new KnnFloatVectorQuery(fieldName, queryVector, k); TopDocs collectedDocs = searcher.search(q, k); - assertEquals(k, collectedDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, collectedDocs.totalHits.relation); + assertEquals(k, collectedDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, collectedDocs.totalHits.relation()); } } } @@ -148,7 +149,7 @@ public void testQuantizedVectorsWriteAndRead() throws IOException { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); assertEquals(vectorValues.size(), numVectors); - OffHeapBinarizedVectorValues qvectorValues = ((ES816BinaryQuantizedVectorsReader.BinarizedVectorValues) vectorValues) + BinarizedByteVectorValues qvectorValues = ((ES816BinaryQuantizedVectorsReader.BinarizedVectorValues) vectorValues) .getQuantizedVectorValues(); float[] centroid = qvectorValues.getCentroid(); assertEquals(centroid.length, dims); @@ -159,13 +160,18 @@ public void testQuantizedVectorsWriteAndRead() throws IOException { if (similarityFunction == VectorSimilarityFunction.COSINE) { vectorValues = new ES816BinaryQuantizedVectorsWriter.NormalizedFloatVectorValues(vectorValues); } - - while (vectorValues.nextDoc() != NO_MORE_DOCS) { - float[] corrections = quantizer.quantizeForIndex(vectorValues.vectorValue(), expectedVector, centroid); - assertArrayEquals(expectedVector, qvectorValues.vectorValue()); - assertEquals(corrections.length, qvectorValues.getCorrectiveTerms().length); + KnnVectorValues.DocIndexIterator docIndexIterator = vectorValues.iterator(); + + while (docIndexIterator.nextDoc() != NO_MORE_DOCS) { + float[] corrections = quantizer.quantizeForIndex( + vectorValues.vectorValue(docIndexIterator.index()), + expectedVector, + centroid + ); + assertArrayEquals(expectedVector, qvectorValues.vectorValue(docIndexIterator.index())); + assertEquals(corrections.length, qvectorValues.getCorrectiveTerms(docIndexIterator.index()).length); for (int i = 0; i < corrections.length; i++) { - assertEquals(corrections[i], qvectorValues.getCorrectiveTerms()[i], 0.00001f); + assertEquals(corrections[i], qvectorValues.getCorrectiveTerms(docIndexIterator.index())[i], 0.00001f); } } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java index f607de57e1fd5..ca96e093b7b28 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; @@ -30,6 +30,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.TopDocs; @@ -55,7 +56,7 @@ public class ES816HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816HnswBinaryQuantizedVectorsFormat(); @@ -91,12 +92,13 @@ public void testSingleVectorCase() throws Exception { try (IndexReader reader = DirectoryReader.open(w)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues("f"); + KnnVectorValues.DocIndexIterator docIndexIterator = vectorValues.iterator(); assert (vectorValues.size() == 1); - while (vectorValues.nextDoc() != NO_MORE_DOCS) { - assertArrayEquals(vector, vectorValues.vectorValue(), 0.00001f); + while (docIndexIterator.nextDoc() != NO_MORE_DOCS) { + assertArrayEquals(vector, vectorValues.vectorValue(docIndexIterator.index()), 0.00001f); } TopDocs td = r.searchNearestVectors("f", randomVector(vector.length), 1, null, Integer.MAX_VALUE); - assertEquals(1, td.totalHits.value); + assertEquals(1, td.totalHits.value()); assertTrue(td.scoreDocs[0].score >= 0); } } From 7dafac99d03cff6577e75eae39a3e49f85e149c5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 9 Oct 2024 14:33:47 +0000 Subject: [PATCH 410/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-bc478d85a12 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9e1940bcf9924..20e683e617c60 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-a4c0f741ccc +lucene = 10.0.0-snapshot-bc478d85a12 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b6c3001f76029..70553c70c0342 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 88357287e8b9b1adf5623bde688d366248e6a2ae Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Wed, 9 Oct 2024 16:39:46 +0100 Subject: [PATCH 411/417] apply MADV_NORMAL advice to enable more aggressive readahead (#114410) --- distribution/src/config/jvm.options | 3 +++ 1 file changed, 3 insertions(+) diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index a523c3ec85ba1..f55d90933ed61 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -62,6 +62,9 @@ 23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.setAsTypeCache 23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.asTypeUncached +# Lucene 10: apply MADV_NORMAL advice to enable more aggressive readahead +-Dorg.apache.lucene.store.defaultReadAdvice=normal + ## heap dumps # generate a heap dump when an allocation from the Java heap fails; heap dumps From ec1b110959c375a1bbef627f39b10404d35fd40c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 10 Oct 2024 06:12:37 +0000 Subject: [PATCH 412/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-eadc07cc6a1 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 150 ++++++++++++------------ 2 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 20e683e617c60..fd5072cfc61cc 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 10.0.0-snapshot-bc478d85a12 +lucene = 10.0.0-snapshot-eadc07cc6a1 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 70553c70c0342..f89f217aa91a3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,129 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 3d77894b58c87154120eb6a6da3d2971c043d2c1 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 10 Oct 2024 17:57:25 +0200 Subject: [PATCH 413/417] Remove empty queue conditional from slicing logic (#114513) With recent changes in Lucene around not forking execution when not necessary, we have removed the search worker thread pool in #111099. The worker thread pool had unlimited queue, and the fear was that we couuld have much more queueing on the search thread pool if we use it to parallelize execution across segments, because every shard would take up to a thread per slice when executing the query phase. We have then introduced an additional conditional to stop parallelizing when there is a queue. That is perhaps a bit extreme, as it's a decision made when creating the searcher, while a queue may no longer be there once the search is executing. This has caused some benchmarks regressions, hence this commit removes the additional queue dependent conditional in order to perform additional benchmarks without it. --- .../main/java/org/elasticsearch/search/DefaultSearchContext.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 1521b17a81766..fe6c542e629ec 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -291,7 +291,6 @@ static int determineMaximumNumberOfSlices( ToLongFunction fieldCardinality ) { return executor instanceof ThreadPoolExecutor tpe - && tpe.getQueue().isEmpty() && isParallelCollectionSupportedForResults(resultsType, request.source(), fieldCardinality, enableQueryPhaseParallelCollection) ? tpe.getMaximumPoolSize() : 1; From 34ed72110d5cf98fcd9c032e0d61926c527f4301 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 11 Oct 2024 06:12:54 +0000 Subject: [PATCH 414/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-eadc07cc6a1 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f89f217aa91a3..eca278b830533 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2816,127 +2816,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 4721a67771b520f6221f10fcde67845dc6c395ee Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 12 Oct 2024 06:13:05 +0000 Subject: [PATCH 415/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-eadc07cc6a1 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4efc487594013..cdb88115516f3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2821,127 +2821,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 3c5086abf721fda5b5e90db27e2b1204ba09392b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 13 Oct 2024 06:15:57 +0000 Subject: [PATCH 416/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-eadc07cc6a1 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cdb88115516f3..e70718c0d96dd 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2821,127 +2821,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 8574e5161dd89d19822d47a39bf8c2e5ff549c35 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 14 Oct 2024 06:12:53 +0000 Subject: [PATCH 417/417] [Automated] Update Lucene snapshot to 10.0.0-snapshot-eadc07cc6a1 --- gradle/verification-metadata.xml | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e70718c0d96dd..596af502b0bee 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2821,127 +2821,127 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - +