diff --git a/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml b/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml index 1f69b8faa7ab4..bb0e82df98ede 100644 --- a/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml +++ b/.buildkite/pipelines/lucene-snapshot/build-snapshot.yml @@ -1,8 +1,10 @@ steps: - trigger: apache-lucene-build-snapshot - label: Trigger pipeline to build lucene snapshot + label: Trigger pipeline to build lucene 10 snapshot key: lucene-build - if: build.env("LUCENE_BUILD_ID") == null || build.env("LUCENE_BUILD_ID") == "" + if: (build.env("LUCENE_BUILD_ID") == null || build.env("LUCENE_BUILD_ID") == "") + build: + branch: branch_10_0 - wait - label: Upload and update lucene snapshot command: .buildkite/scripts/lucene-snapshot/upload-snapshot.sh diff --git a/.buildkite/pipelines/lucene-snapshot/run-tests.yml b/.buildkite/pipelines/lucene-snapshot/run-tests.yml index 49c3396488d82..a3ae332736fa9 100644 --- a/.buildkite/pipelines/lucene-snapshot/run-tests.yml +++ b/.buildkite/pipelines/lucene-snapshot/run-tests.yml @@ -62,7 +62,6 @@ steps: matrix: setup: BWC_VERSION: - - 7.17.13 - 8.9.1 - 8.10.0 agents: diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java index 569e8909e1e12..b294fe97c7e7c 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java @@ -19,7 +19,7 @@ import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.core.IOUtils; @@ -217,19 +217,17 @@ public float squareDistanceScalar() { return 1 / (1f + adjustedDistance); } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } - RandomVectorScorer luceneScorer(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) - throws IOException { + RandomVectorScorer luceneScorer(QuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorer(sim, values, queryVec); } diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index 58ccf69406ff2..5388f942be8d7 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -59,10 +59,6 @@ org.apache.lucene.util.Version#parseLeniently(java.lang.String) org.apache.lucene.index.NoMergePolicy#INSTANCE @ explicit use of NoMergePolicy risks forgetting to configure NoMergeScheduler; use org.elasticsearch.common.lucene.Lucene#indexWriterConfigWithNoMerging() instead. -@defaultMessage Spawns a new thread which is solely under lucenes control use ThreadPool#relativeTimeInMillis instead -org.apache.lucene.search.TimeLimitingCollector#getGlobalTimerThread() -org.apache.lucene.search.TimeLimitingCollector#getGlobalCounter() - @defaultMessage Don't interrupt threads use FutureUtils#cancel(Future) instead java.util.concurrent.Future#cancel(boolean) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index edb97a2968bc8..7e2cfe24188b9 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 9.11.1 +lucene = 10.0.0-snapshot-524ea208c87 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index fb99ef498df17..bdb0704fcd880 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.11.1 -:lucene_version_path: 9_11_1 +:lucene_version: 10.0.0 +:lucene_version_path: 10_0_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/docs/changelog/111465.yaml b/docs/changelog/111465.yaml new file mode 100644 index 0000000000000..2a8df287427a9 --- /dev/null +++ b/docs/changelog/111465.yaml @@ -0,0 +1,5 @@ +pr: 111465 +summary: Add range and regexp Intervals +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/112826.yaml b/docs/changelog/112826.yaml new file mode 100644 index 0000000000000..65c05b4d6035a --- /dev/null +++ b/docs/changelog/112826.yaml @@ -0,0 +1,6 @@ +pr: 112826 +summary: "Multi term intervals: increase max_expansions" +area: Search +type: enhancement +issues: + - 110491 diff --git a/docs/changelog/113333.yaml b/docs/changelog/113333.yaml new file mode 100644 index 0000000000000..c6a3584845729 --- /dev/null +++ b/docs/changelog/113333.yaml @@ -0,0 +1,5 @@ +pr: 113333 +summary: Upgrade to Lucene 9.12 +area: Search +type: upgrade +issues: [] diff --git a/docs/plugins/analysis-nori.asciidoc b/docs/plugins/analysis-nori.asciidoc index 02980a4ed8a8c..0d3e76f71d238 100644 --- a/docs/plugins/analysis-nori.asciidoc +++ b/docs/plugins/analysis-nori.asciidoc @@ -244,11 +244,11 @@ Which responds with: "end_offset": 3, "type": "word", "position": 1, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JKS(Subject case marker)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JKS(Subject case marker)" }, { "token": "깊", @@ -268,11 +268,11 @@ Which responds with: "end_offset": 6, "type": "word", "position": 3, - "leftPOS": "E(Verbal endings)", + "leftPOS": "ETM(Adnominal form transformative ending)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "E(Verbal endings)" + "rightPOS": "ETM(Adnominal form transformative ending)" }, { "token": "나무", @@ -292,11 +292,11 @@ Which responds with: "end_offset": 10, "type": "word", "position": 5, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JX(Auxiliary postpositional particle)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JX(Auxiliary postpositional particle)" } ] }, diff --git a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc index 5273537389e3d..881970787f5a6 100644 --- a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc @@ -1430,7 +1430,8 @@ PUT /persian_example "decimal_digit", "arabic_normalization", "persian_normalization", - "persian_stop" + "persian_stop", + "persian_stem" ] } } diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index 2cf01b77d57ab..5f98807387280 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -40,14 +40,14 @@ POST _analyze "start_offset": 0, "end_offset": 8, "type": "word", - "position": 0 + "position": 1 }, { "token": "/one/two/three", "start_offset": 0, "end_offset": 14, "type": "word", - "position": 0 + "position": 2 } ] } @@ -144,14 +144,14 @@ POST my-index-000001/_analyze "start_offset": 7, "end_offset": 18, "type": "word", - "position": 0 + "position": 1 }, { "token": "/three/four/five", "start_offset": 7, "end_offset": 23, "type": "word", - "position": 0 + "position": 2 } ] } @@ -178,14 +178,14 @@ If we were to set `reverse` to `true`, it would produce the following: [[analysis-pathhierarchy-tokenizer-detailed-examples]] === Detailed examples -A common use-case for the `path_hierarchy` tokenizer is filtering results by -file paths. If indexing a file path along with the data, the use of the -`path_hierarchy` tokenizer to analyze the path allows filtering the results +A common use-case for the `path_hierarchy` tokenizer is filtering results by +file paths. If indexing a file path along with the data, the use of the +`path_hierarchy` tokenizer to analyze the path allows filtering the results by different parts of the file path string. This example configures an index to have two custom analyzers and applies -those analyzers to multifields of the `file_path` text field that will +those analyzers to multifields of the `file_path` text field that will store filenames. One of the two analyzers uses reverse tokenization. Some sample documents are then indexed to represent some file paths for photos inside photo folders of two different users. @@ -264,8 +264,8 @@ POST file-path-test/_doc/5 -------------------------------------------------- -A search for a particular file path string against the text field matches all -the example documents, with Bob's documents ranking highest due to `bob` also +A search for a particular file path string against the text field matches all +the example documents, with Bob's documents ranking highest due to `bob` also being one of the terms created by the standard analyzer boosting relevance for Bob's documents. @@ -301,7 +301,7 @@ GET file-path-test/_search With the reverse parameter for this tokenizer, it's also possible to match from the other end of the file path, such as individual file names or a deep level subdirectory. The following example shows a search for all files named -`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field +`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field configured to use the reverse parameter in the mapping. @@ -342,7 +342,7 @@ POST file-path-test/_analyze It's also useful to be able to filter with file paths when combined with other -types of searches, such as this example looking for any files paths with `16` +types of searches, such as this example looking for any files paths with `16` that also must be in Alice's photo directory. [source,console] diff --git a/docs/reference/modules/threadpool.asciidoc b/docs/reference/modules/threadpool.asciidoc index 2d4110bdcb431..8ae8f59c22982 100644 --- a/docs/reference/modules/threadpool.asciidoc +++ b/docs/reference/modules/threadpool.asciidoc @@ -13,16 +13,10 @@ There are several thread pools, but the important ones include: [[search-threadpool]] `search`:: - For coordination of count/search operations at the shard level whose computation - is offloaded to the search_worker thread pool. Used also by fetch and other search + For count/search operations at the shard level. Used also by fetch and other search related operations Thread pool type is `fixed` with a size of `int((`<>`pass:[ * ]3) / 2) + 1`, and queue_size of `1000`. -`search_worker`:: - For the heavy workload of count/search operations that may be executed concurrently - across segments within the same shard when possible. Thread pool type is `fixed` - with a size of `int((`<>`pass:[ * ]3) / 2) + 1`, and unbounded queue_size . - [[search-throttled]]`search_throttled`:: For count/search/suggest/get operations on `search_throttled indices`. Thread pool type is `fixed` with a size of `1`, and queue_size of `100`. diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc index 1e3380389d861..9b3d1bb5c9f2a 100644 --- a/docs/reference/query-dsl/intervals-query.asciidoc +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -73,7 +73,9 @@ Valid rules include: * <> * <> * <> +* <> * <> +* <> * <> * <> -- @@ -122,8 +124,9 @@ unstemmed ones. ==== `prefix` rule parameters The `prefix` rule matches terms that start with a specified set of characters. -This prefix can expand to match at most 128 terms. If the prefix matches more -than 128 terms, {es} returns an error. You can use the +This prefix can expand to match at most `indices.query.bool.max_clause_count` +<> terms. If the prefix matches more terms, +{es} returns an error. You can use the <> option in the field mapping to avoid this limit. @@ -149,7 +152,8 @@ separate `analyzer` is specified. ==== `wildcard` rule parameters The `wildcard` rule matches terms using a wildcard pattern. This pattern can -expand to match at most 128 terms. If the pattern matches more than 128 terms, +expand to match at most `indices.query.bool.max_clause_count` +<> terms. If the pattern matches more terms, {es} returns an error. `pattern`:: @@ -178,12 +182,45 @@ The `pattern` is normalized using the search analyzer from this field, unless `analyzer` is specified separately. -- +[[intervals-regexp]] +==== `regexp` rule parameters + +The `regexp` rule matches terms using a regular expression pattern. +This pattern can expand to match at most `indices.query.bool.max_clause_count` +<> terms. +If the pattern matches more terms,{es} returns an error. + +`pattern`:: +(Required, string) Regexp pattern used to find matching terms. +For a list of operators supported by the +`regexp` pattern, see <>. + +WARNING: Avoid using wildcard patterns, such as `.*` or `.*?+``. This can +increase the iterations needed to find matching terms and slow search +performance. +-- +`analyzer`:: +(Optional, string) <> used to normalize the `pattern`. +Defaults to the top-level ``'s analyzer. + +-- +`use_field`:: ++ +-- +(Optional, string) If specified, match intervals from this field rather than the +top-level ``. + +The `pattern` is normalized using the search analyzer from this field, unless +`analyzer` is specified separately. +-- + [[intervals-fuzzy]] ==== `fuzzy` rule parameters The `fuzzy` rule matches terms that are similar to the provided term, within an edit distance defined by <>. If the fuzzy expansion matches more than -128 terms, {es} returns an error. +`indices.query.bool.max_clause_count` +<> terms, {es} returns an error. `term`:: (Required, string) The term to match @@ -214,6 +251,41 @@ The `term` is normalized using the search analyzer from this field, unless `analyzer` is specified separately. -- +[[intervals-range]] +==== `range` rule parameters + +The `range` rule matches terms contained within a provided range. +This range can expand to match at most `indices.query.bool.max_clause_count` +<> terms. +If the range matches more terms,{es} returns an error. + +`gt`:: +(Optional, string) Greater than: match terms greater than the provided term. + +`gte`:: +(Optional, string) Greater than or equal to: match terms greater than or +equal to the provided term. + +`lt`:: +(Optional, string) Less than: match terms less than the provided term. + +`lte`:: +(Optional, string) Less than or equal to: match terms less than or +equal to the provided term. + +NOTE: It is required to provide one of `gt` or `gte` params. +It is required to provide one of `lt` or `lte` params. + + +`analyzer`:: +(Optional, string) <> used to normalize the `pattern`. +Defaults to the top-level ``'s analyzer. + +`use_field`:: +(Optional, string) If specified, match intervals from this field rather than the +top-level ``. + + [[intervals-all_of]] ==== `all_of` rule parameters diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 3fed14231808c..5f1a0ccfdd6b4 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1298,7 +1298,7 @@ One of the `dfs.knn` sections for a shard looks like the following: "query" : [ { "type" : "DocAndScoreQuery", - "description" : "DocAndScore[100]", + "description" : "DocAndScoreQuery[0,...][0.008961825,...],0.008961825", "time_in_nanos" : 444414, "breakdown" : { "set_min_competitive_score_count" : 0, diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 472a65f9c6f24..9a59331bc1b4d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2814,124 +2814,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + + + + + + diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java index e2aea6b3ebd9f..4ed60b2f5e8b2 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java @@ -13,7 +13,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -39,7 +39,7 @@ static Optional instance() { Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ); @@ -52,9 +52,5 @@ Optional getInt7SQVectorScorerSupplier( * @param queryVector the query vector * @return an optional containing the vector scorer, or empty */ - Optional getInt7SQVectorScorer( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ); + Optional getInt7SQVectorScorer(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector); } diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a22d787980252..6248902c32e7a 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -13,7 +13,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -25,7 +25,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory { public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { throw new UnsupportedOperationException("should not reach here"); @@ -34,7 +34,7 @@ public Optional getInt7SQVectorScorerSupplier( @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { throw new UnsupportedOperationException("should not reach here"); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a65fe582087d9..a863d9e3448ca 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.simdvec.internal.Int7SQVectorScorer; import org.elasticsearch.simdvec.internal.Int7SQVectorScorerSupplier.DotProductSupplier; @@ -38,7 +38,7 @@ private VectorScorerFactoryImpl() {} public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { input = FilterIndexInput.unwrapOnlyTest(input); @@ -57,7 +57,7 @@ public Optional getInt7SQVectorScorerSupplier( @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { return Int7SQVectorScorer.create(sim, values, queryVector); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index 0b41436ce2242..e02df124ad0f0 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -11,18 +11,14 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; public final class Int7SQVectorScorer { // Unconditionally returns an empty optional on <= JDK 21, since the scorer is only supported on JDK 22+ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { return Optional.empty(); } diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java index f6d874cd3e728..198e10406056e 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java @@ -12,7 +12,7 @@ import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; import java.io.IOException; @@ -31,12 +31,12 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS final int maxOrd; final float scoreCorrectionConstant; final MemorySegmentAccessInput input; - final RandomAccessQuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds + final QuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds final ScalarQuantizedVectorSimilarity fallbackScorer; protected Int7SQVectorScorerSupplier( MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant, ScalarQuantizedVectorSimilarity fallbackScorer ) { @@ -104,11 +104,7 @@ public float score(int node) throws IOException { public static final class EuclideanSupplier extends Int7SQVectorScorerSupplier { - public EuclideanSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public EuclideanSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(EUCLIDEAN, scoreCorrectionConstant, BITS)); } @@ -127,11 +123,7 @@ public EuclideanSupplier copy() { public static final class DotProductSupplier extends Int7SQVectorScorerSupplier { - public DotProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public DotProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(DOT_PRODUCT, scoreCorrectionConstant, BITS)); } @@ -151,11 +143,7 @@ public DotProductSupplier copy() { public static final class MaxInnerProductSupplier extends Int7SQVectorScorerSupplier { - public MaxInnerProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public MaxInnerProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(MAXIMUM_INNER_PRODUCT, scoreCorrectionConstant, BITS)); } diff --git a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index c9659ea1af9a8..3d0e1e71a3744 100644 --- a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -35,11 +35,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr byte[] scratch; /** Return an optional whose value, if present, is the scorer. Otherwise, an empty optional is returned. */ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { checkDimensions(queryVector.length, values.dimension()); var input = values.getSlice(); if (input == null) { @@ -63,12 +59,7 @@ public static Optional create( }; } - Int7SQVectorScorer( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - byte[] queryVector, - float queryCorrection - ) { + Int7SQVectorScorer(MemorySegmentAccessInput input, QuantizedByteVectorValues values, byte[] queryVector, float queryCorrection) { super(values); this.input = input; assert queryVector.length == values.getVectorByteLength(); @@ -105,7 +96,7 @@ final void checkOrdinal(int ord) { } public static final class DotProductScorer extends Int7SQVectorScorer { - public DotProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public DotProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -122,7 +113,7 @@ public float score(int node) throws IOException { } public static final class EuclideanScorer extends Int7SQVectorScorer { - public EuclideanScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public EuclideanScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -136,7 +127,7 @@ public float score(int node) throws IOException { } public static final class MaxInnerProductScorer extends Int7SQVectorScorer { - public MaxInnerProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float corr) { + public MaxInnerProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float corr) { super(in, values, query, corr); } diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java index e5d963995d748..0f967127f6f2c 100644 --- a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java @@ -21,7 +21,7 @@ import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -237,6 +237,8 @@ void testRandomScorerImpl(long maxChunkSize, Function floatArr try (Directory dir = new MMapDirectory(createTempDir("testRandom"), maxChunkSize)) { for (var sim : List.of(COSINE, DOT_PRODUCT, EUCLIDEAN, MAXIMUM_INNER_PRODUCT)) { + // Use the random supplier for COSINE, which returns values in the normalized range + floatArraySupplier = sim == COSINE ? FLOAT_ARRAY_RANDOM_FUNC : floatArraySupplier; final int dims = randomIntBetween(1, 4096); final int size = randomIntBetween(2, 100); final float[][] vectors = new float[size][]; @@ -429,14 +431,13 @@ public Optional call() { } } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index b16c6eaaaa1d1..f4f7e787d2b7b 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -33,3 +33,7 @@ dependencies { artifacts { restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } + +tasks.named("yamlRestCompatTestTransform").configure { task -> + task.replaceValueInMatch("tokens.0.token", "absenț", "romanian") +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 1c71c64311517..6b622444980d3 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -9,6 +9,7 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicStemFilter; import org.apache.lucene.analysis.bg.BulgarianStemFilter; @@ -38,8 +39,9 @@ import org.apache.lucene.analysis.lv.LatvianStemFilter; import org.apache.lucene.analysis.miscellaneous.EmptyTokenStream; import org.apache.lucene.analysis.no.NorwegianLightStemFilter; -import org.apache.lucene.analysis.no.NorwegianLightStemmer; +import org.apache.lucene.analysis.no.NorwegianLightStemFilterFactory; import org.apache.lucene.analysis.no.NorwegianMinimalStemFilter; +import org.apache.lucene.analysis.no.NorwegianMinimalStemFilterFactory; import org.apache.lucene.analysis.pt.PortugueseLightStemFilter; import org.apache.lucene.analysis.pt.PortugueseMinimalStemFilter; import org.apache.lucene.analysis.pt.PortugueseStemFilter; @@ -62,14 +64,11 @@ import org.tartarus.snowball.ext.EstonianStemmer; import org.tartarus.snowball.ext.FinnishStemmer; import org.tartarus.snowball.ext.FrenchStemmer; -import org.tartarus.snowball.ext.German2Stemmer; import org.tartarus.snowball.ext.GermanStemmer; import org.tartarus.snowball.ext.HungarianStemmer; import org.tartarus.snowball.ext.IrishStemmer; import org.tartarus.snowball.ext.ItalianStemmer; -import org.tartarus.snowball.ext.KpStemmer; import org.tartarus.snowball.ext.LithuanianStemmer; -import org.tartarus.snowball.ext.LovinsStemmer; import org.tartarus.snowball.ext.NorwegianStemmer; import org.tartarus.snowball.ext.PortugueseStemmer; import org.tartarus.snowball.ext.RomanianStemmer; @@ -80,6 +79,7 @@ import org.tartarus.snowball.ext.TurkishStemmer; import java.io.IOException; +import java.util.Collections; public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { @@ -87,27 +87,13 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { private static final TokenStream EMPTY_TOKEN_STREAM = new EmptyTokenStream(); - private String language; + private final String language; StemmerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { super(name, settings); this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter"))); // check that we have a valid language by trying to create a TokenStream create(EMPTY_TOKEN_STREAM).close(); - if ("lovins".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "lovins_deprecation", - "The [lovins] stemmer is deprecated and will be removed in a future version." - ); - } - if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "dutch_kp_deprecation", - "The [dutch_kp] stemmer is deprecated and will be removed in a future version." - ); - } } @Override @@ -135,8 +121,17 @@ public TokenStream create(TokenStream tokenStream) { } else if ("dutch".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new DutchStemmer()); } else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new KpStemmer()); - + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "dutch_kp_deprecation", + "The [dutch_kp] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; // English stemmers } else if ("english".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); @@ -145,7 +140,17 @@ public TokenStream create(TokenStream tokenStream) { || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); } else if ("lovins".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new LovinsStemmer()); + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "lovins_deprecation", + "The [lovins] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); } else if ("porter2".equalsIgnoreCase(language)) { @@ -185,7 +190,8 @@ public TokenStream create(TokenStream tokenStream) { } else if ("german".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("german2".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new German2Stemmer()); + // TODO Lucene 10 upgrade: how about bw comp for users relying on german2 stemmer that is now folded into german stemmer? + return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) { return new GermanLightStemFilter(tokenStream); } else if ("minimal_german".equalsIgnoreCase(language) || "minimalGerman".equalsIgnoreCase(language)) { @@ -231,10 +237,13 @@ public TokenStream create(TokenStream tokenStream) { // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { - return new NorwegianLightStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); + NorwegianLightStemFilterFactory factory = new NorwegianLightStemFilterFactory(Collections.singletonMap("variant", "nn")); + return factory.create(tokenStream); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { - return new NorwegianMinimalStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); - + NorwegianMinimalStemFilterFactory factory = new NorwegianMinimalStemFilterFactory( + Collections.singletonMap("variant", "nn") + ); + return factory.create(tokenStream); // Persian stemmers } else if ("persian".equalsIgnoreCase(language)) { return new PersianStemFilter(tokenStream); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java index b406fa8335779..0d936666e92cd 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -278,7 +278,7 @@ public void testPhrasePrefix() throws IOException { boolQuery().should(matchPhrasePrefixQuery("field1", "test")).should(matchPhrasePrefixQuery("field1", "bro")) ).highlighter(highlight().field("field1").order("score").preTags("").postTags("")), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); for (int i = 0; i < 2; i++) { assertHighlight( resp, diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index c03bdb3111050..7674b95af4851 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -948,7 +948,7 @@ text: absenţa analyzer: romanian - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } - do: indices.analyze: @@ -957,7 +957,7 @@ text: absenţa analyzer: my_analyzer - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } --- "russian": diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index ed2ce47d11dc3..cb74d62137815 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -439,13 +439,13 @@ private static CharacterRunAutomaton buildAutomaton(List includePatterns ? includeAutomaton : Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CharacterRunAutomaton(finalAutomaton); + return new CharacterRunAutomaton(Operations.determinize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton patternsToAutomaton(List patterns) { final List automata = patterns.stream().map(s -> { final String regex = s.replace(".", "\\.").replace("*", ".*"); - return new RegExp(regex).toAutomaton(); + return new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); }).toList(); if (automata.isEmpty()) { return null; diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 8e7ecfa49f144..777ddc28fefdc 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1706,7 +1706,7 @@ public void testSegmentsSortedOnTimestampDesc() throws Exception { assertResponse( prepareSearch("metrics-foo").addFetchField(new FieldAndFormat(DEFAULT_TIMESTAMP_FIELD, "epoch_millis")).setSize(totalDocs), resp -> { - assertEquals(totalDocs, resp.getHits().getTotalHits().value); + assertEquals(totalDocs, resp.getHits().getTotalHits().value()); SearchHit[] hits = resp.getHits().getHits(); assertEquals(totalDocs, hits.length); @@ -2027,7 +2027,7 @@ static void indexDocs(String dataStream, int numDocs) { static void verifyDocs(String dataStream, long expectedNumHits, List expectedIndices) { assertResponse(prepareSearch(dataStream).setSize((int) expectedNumHits), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); Arrays.stream(resp.getHits().getHits()).forEach(hit -> assertTrue(expectedIndices.contains(hit.getIndex()))); }); } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 686e253d1d173..a2557a4de6e6d 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -545,7 +545,7 @@ public void testTrimId() throws Exception { var searchRequest = new SearchRequest(dataStreamName); searchRequest.source().trackTotalHits(true); assertResponse(client().search(searchRequest), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numBulkRequests * numDocsPerBulk)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numBulkRequests * numDocsPerBulk)); String id = searchResponse.getHits().getHits()[0].getId(); assertThat(id, notNullValue()); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 6942cc3733d1e..f8c8d2bd359f3 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -256,8 +256,8 @@ public void testGeoIpDatabasesDownload() throws Exception { res -> { try { TotalHits totalHits = res.getHits().getTotalHits(); - assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation); - assertEquals(size, totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation()); + assertEquals(size, totalHits.value()); assertEquals(size, res.getHits().getHits().length); List data = new ArrayList<>(); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java index 570c2a5f3783a..df6780aba7222 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java @@ -81,7 +81,7 @@ public void testBasic() throws Exception { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'] + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -91,7 +91,7 @@ public void testFunction() throws Exception { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertNoFailuresAndResponse(buildRequest("doc['foo'] + abs(1)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -102,7 +102,7 @@ public void testBasicUsingDotValue() throws Exception { prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'].value + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -125,7 +125,7 @@ public void testScore() throws Exception { assertResponse(req, rsp -> { assertNoFailures(rsp); SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals("1", hits.getAt(0).getId()); assertEquals("3", hits.getAt(1).getId()); assertEquals("2", hits.getAt(2).getId()); @@ -148,25 +148,25 @@ public void testDateMethods() throws Exception { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].getHourOfDay() + doc['date1'].getDayOfMonth()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getMonth() + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(9.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(10.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getYear()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -182,25 +182,25 @@ public void testDateObjectMethods() throws Exception { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].date.getHourOfDay() + doc['date1'].date.dayOfMonth"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.monthOfYear + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(10.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.year"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -238,7 +238,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].count() + doc['double1'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(2.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -246,7 +246,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -254,7 +254,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].avg() + doc['double1'].avg()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(8.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -262,7 +262,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].median()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -270,7 +270,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].min()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -278,7 +278,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].max()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -286,7 +286,7 @@ public void testMultiValueMethods() throws Exception { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()/doc['double0'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -295,7 +295,7 @@ public void testMultiValueMethods() throws Exception { // make sure count() works for missing assertNoFailuresAndResponse(buildRequest("doc['double2'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -304,7 +304,7 @@ public void testMultiValueMethods() throws Exception { // make sure .empty works in the same way assertNoFailuresAndResponse(buildRequest("doc['double2'].empty ? 5.0 : 2.0"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -342,7 +342,7 @@ public void testSparseField() throws Exception { ); assertNoFailuresAndResponse(buildRequest("doc['x'] + 1"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); @@ -378,7 +378,7 @@ public void testParams() throws Exception { String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)"; assertResponse(buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(24.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(9.5, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(13.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -501,7 +501,7 @@ public void testSpecialValueVariable() throws Exception { ); assertResponse(req, rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); Stats stats = rsp.getAggregations().get("int_agg"); assertEquals(39.0, stats.getMax(), 0.0001); @@ -655,22 +655,22 @@ public void testGeo() throws Exception { refresh(); // access .lat assertNoFailuresAndResponse(buildRequest("doc['location'].lat"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .lon assertNoFailuresAndResponse(buildRequest("doc['location'].lon"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['location'].empty ? 1 : 0"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // call haversin assertNoFailuresAndResponse(buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D); }); } @@ -693,14 +693,14 @@ public void testBoolean() throws Exception { ); // access .value assertNoFailuresAndResponse(buildRequest("doc['vip'].value"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['vip'].empty ? 1 : 0"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -708,7 +708,7 @@ public void testBoolean() throws Exception { // ternary operator // vip's have a 50% discount assertNoFailuresAndResponse(buildRequest("doc['vip'] ? doc['price']/2 : doc['price']"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -727,7 +727,7 @@ public void testFilterScript() throws Exception { Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap()); builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script))); assertNoFailuresAndResponse(builder, rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java index 0952ff8fe856f..bb714d4674ed6 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java @@ -17,6 +17,8 @@ import org.apache.lucene.search.SortField; import org.elasticsearch.script.DoubleValuesScript; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.function.Function; /** @@ -37,12 +39,20 @@ public DoubleValuesScript newInstance() { return new DoubleValuesScript() { @Override public double execute() { - return exprScript.evaluate(new DoubleValues[0]); + try { + return exprScript.evaluate(new DoubleValues[0]); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override public double evaluate(DoubleValues[] functionValues) { - return exprScript.evaluate(functionValues); + try { + return exprScript.evaluate(functionValues); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index b306f104d7ba5..0082192493d53 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -14,7 +14,6 @@ import org.apache.lucene.expressions.js.JavascriptCompiler; import org.apache.lucene.expressions.js.VariableContext; import org.apache.lucene.search.DoubleValuesSource; -import org.elasticsearch.SpecialPermission; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; @@ -24,7 +23,6 @@ import org.elasticsearch.script.AggregationScript; import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.BucketAggregationSelectorScript; -import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.DoubleValuesScript; import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.FilterScript; @@ -36,9 +34,8 @@ import org.elasticsearch.script.TermsSetQueryScript; import org.elasticsearch.search.lookup.SearchLookup; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PrivilegedAction; +import java.io.IOException; +import java.io.UncheckedIOException; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; @@ -156,36 +153,13 @@ public String getType() { @Override public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { - // classloader created here - final SecurityManager sm = System.getSecurityManager(); - SpecialPermission.check(); - Expression expr = AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Expression run() { - try { - // snapshot our context here, we check on behalf of the expression - AccessControlContext engineContext = AccessController.getContext(); - ClassLoader loader = getClass().getClassLoader(); - if (sm != null) { - loader = new ClassLoader(loader) { - @Override - protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { - try { - engineContext.checkPermission(new ClassPermission(name)); - } catch (SecurityException e) { - throw new ClassNotFoundException(name, e); - } - return super.loadClass(name, resolve); - } - }; - } - // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here - return JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS, loader); - } catch (ParseException e) { - throw convertToScriptException("compile error", scriptSource, scriptSource, e); - } - } - }); + Expression expr; + try { + // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here + expr = JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS); + } catch (ParseException e) { + throw convertToScriptException("compile error", scriptSource, scriptSource, e); + } if (contexts.containsKey(context) == false) { throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]"); } @@ -233,7 +207,11 @@ public Double execute() { placeholder.setValue(((Number) value).doubleValue()); } }); - return expr.evaluate(functionValuesArray); + try { + return expr.evaluate(functionValuesArray); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } }; }; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index 3efcfde684ebc..a3c0c60d75436 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -138,7 +138,7 @@ protected void assertEqualInstances(SearchTemplateResponse expectedInstance, Sea SearchResponse expectedResponse = expectedInstance.getResponse(); SearchResponse newResponse = newInstance.getResponse(); - assertEquals(expectedResponse.getHits().getTotalHits().value, newResponse.getHits().getTotalHits().value); + assertEquals(expectedResponse.getHits().getTotalHits().value(), newResponse.getHits().getTotalHits().value()); assertEquals(expectedResponse.getHits().getMaxScore(), newResponse.getHits().getMaxScore(), 0.0001); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java index fed598e46fbd9..cbb0e19d64a6e 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -74,11 +74,6 @@ public void testMapBasic() throws IOException { Map state = new HashMap<>(); Scorable scorer = new Scorable() { - @Override - public int docID() { - return 0; - } - @Override public float score() { return 0.5f; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java index 01a9e995450aa..7edd6d5303252 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java @@ -85,7 +85,7 @@ public void testBasics() throws IOException { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } @@ -134,7 +134,7 @@ public void testWeightScript() throws IOException { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java index 19173c650c24a..1c6ffe75e3fd2 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java @@ -43,7 +43,7 @@ public void testRankFeaturesTermQuery() throws IOException { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(20f)); } @@ -52,7 +52,7 @@ public void testRankFeaturesTermQuery() throws IOException { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(2000f)); } @@ -67,7 +67,7 @@ public void testRankFeaturesTermQuery() throws IOException { .minimumShouldMatch(1) ), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); for (SearchHit hit : searchResponse.getHits().getHits()) { if (hit.getId().equals("all")) { assertThat(hit.getScore(), equalTo(50f)); @@ -83,7 +83,7 @@ public void testRankFeaturesTermQuery() throws IOException { ); assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index 4fc4fc69e0ee8..97c97a643e9c8 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -203,7 +203,7 @@ private SearchRequestBuilder prepareTokenCountFieldMapperSearch() { } private void assertSearchReturns(SearchResponse result, String... ids) { - assertThat(result.getHits().getTotalHits().value, equalTo((long) ids.length)); + assertThat(result.getHits().getTotalHits().value(), equalTo((long) ids.length)); assertThat(result.getHits().getHits().length, equalTo(ids.length)); List foundIds = new ArrayList<>(); for (SearchHit hit : result.getHits()) { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index 6693d24fe78e2..5904169308fab 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -21,6 +21,7 @@ import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; @@ -270,7 +271,11 @@ public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext conte @Override public IntervalsSource prefixIntervals(BytesRef term, SearchExecutionContext context) { - return toIntervalsSource(Intervals.prefix(term), new PrefixQuery(new Term(name(), term)), context); + return toIntervalsSource( + Intervals.prefix(term, IndexSearcher.getMaxClauseCount()), + new PrefixQuery(new Term(name(), term)), + context + ); } @Override @@ -285,23 +290,47 @@ public IntervalsSource fuzzyIntervals( new Term(name(), term), maxDistance, prefixLength, - 128, + IndexSearcher.getMaxClauseCount(), transpositions, MultiTermQuery.CONSTANT_SCORE_BLENDED_REWRITE ); - IntervalsSource fuzzyIntervals = Intervals.multiterm(fuzzyQuery.getAutomata(), term); + IntervalsSource fuzzyIntervals = Intervals.multiterm(fuzzyQuery.getAutomata(), IndexSearcher.getMaxClauseCount(), term); return toIntervalsSource(fuzzyIntervals, fuzzyQuery, context); } @Override public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContext context) { return toIntervalsSource( - Intervals.wildcard(pattern), + Intervals.wildcard(pattern, IndexSearcher.getMaxClauseCount()), new MatchAllDocsQuery(), // wildcard queries can be expensive, what should the approximation be? context ); } + @Override + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + return toIntervalsSource( + Intervals.regexp(pattern, IndexSearcher.getMaxClauseCount()), + new MatchAllDocsQuery(), // regexp queries can be expensive, what should the approximation be? + context + ); + } + + @Override + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + return toIntervalsSource( + Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper, IndexSearcher.getMaxClauseCount()), + new MatchAllDocsQuery(), // range queries can be expensive, what should the approximation be? + context + ); + } + @Override public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements, SearchExecutionContext queryShardContext) throws IOException { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java index bce6ffb5e0ea3..f277d28eed922 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java @@ -468,8 +468,8 @@ public Query prefixQuery( } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java index d16034c5de2fd..a992f68d93d9e 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TwoPhaseIterator; @@ -266,7 +267,7 @@ public boolean isCacheable(LeafReaderContext ctx) { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0); if (scorer == null) { return Explanation.noMatch("No matching phrase"); } @@ -286,15 +287,26 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public RuntimePhraseScorer scorer(LeafReaderContext context) throws IOException { - final Scorer approximationScorer = approximationWeight != null ? approximationWeight.scorer(context) : null; - if (approximationScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier approximationSupplier = approximationWeight != null ? approximationWeight.scorerSupplier(context) : null; + if (approximationSupplier == null) { return null; } - final DocIdSetIterator approximation = approximationScorer.iterator(); - final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); - final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); - return new RuntimePhraseScorer(this, approximation, leafSimScorer, valueFetcher, field, in); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximationScorer = approximationSupplier.get(leadCost); + final DocIdSetIterator approximation = approximationScorer.iterator(); + final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); + final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); + return new RuntimePhraseScorer(approximation, leafSimScorer, valueFetcher, field, in); + } + + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -310,7 +322,7 @@ public Matches matches(LeafReaderContext context, int doc) throws IOException { Weight innerWeight = in.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, 1); return innerWeight.matches(context, doc); } - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0L); if (scorer == null) { return null; } @@ -336,14 +348,12 @@ private class RuntimePhraseScorer extends Scorer { private float freq; private RuntimePhraseScorer( - Weight weight, DocIdSetIterator approximation, LeafSimScorer scorer, CheckedIntFunction, IOException> valueFetcher, String field, Query query ) { - super(weight); this.scorer = scorer; this.valueFetcher = valueFetcher; this.field = field; diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java index 922b92263d712..1eb6083cfe453 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java @@ -89,8 +89,8 @@ private void assertPhraseQuery(MapperService mapperService) throws IOException { SearchExecutionContext context = createSearchExecutionContext(mapperService, newSearcher(reader)); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "brown fox"); TopDocs docs = context.searcher().search(queryBuilder.toQuery(context), 1); - assertThat(docs.totalHits.value, equalTo(1L)); - assertThat(docs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(docs.totalHits.value(), equalTo(1L)); + assertThat(docs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(docs.scoreDocs[0].doc, equalTo(0)); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java index 4c20802a45058..6970dd6739ecf 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.PhraseQuery; @@ -152,30 +153,56 @@ public void testPhrasePrefixQuery() throws IOException { assertNotEquals(new MatchAllDocsQuery(), SourceConfirmedTextQuery.approximate(delegate)); } - public void testTermIntervals() throws IOException { + public void testTermIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource termIntervals = ft.termIntervals(new BytesRef("foo"), MOCK_CONTEXT); assertThat(termIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); assertEquals(Intervals.term(new BytesRef("foo")), ((SourceIntervalsSource) termIntervals).getIntervalsSource()); } - public void testPrefixIntervals() throws IOException { + public void testPrefixIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); assertThat(prefixIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); - assertEquals(Intervals.prefix(new BytesRef("foo")), ((SourceIntervalsSource) prefixIntervals).getIntervalsSource()); + assertEquals( + Intervals.prefix(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) prefixIntervals).getIntervalsSource() + ); } - public void testWildcardIntervals() throws IOException { + public void testWildcardIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); assertThat(wildcardIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); - assertEquals(Intervals.wildcard(new BytesRef("foo")), ((SourceIntervalsSource) wildcardIntervals).getIntervalsSource()); + assertEquals( + Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) wildcardIntervals).getIntervalsSource() + ); + } + + public void testRegexpIntervals() { + MappedFieldType ft = new MatchOnlyTextFieldType("field"); + IntervalsSource regexpIntervals = ft.regexpIntervals(new BytesRef("foo"), MOCK_CONTEXT); + assertThat(regexpIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); + assertEquals( + Intervals.regexp(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) regexpIntervals).getIntervalsSource() + ); } - public void testFuzzyIntervals() throws IOException { + public void testFuzzyIntervals() { MappedFieldType ft = new MatchOnlyTextFieldType("field"); IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); assertThat(fuzzyIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); } + + public void testRangeIntervals() { + MappedFieldType ft = new MatchOnlyTextFieldType("field"); + IntervalsSource rangeIntervals = ft.rangeIntervals(new BytesRef("foo"), new BytesRef("foo1"), true, true, MOCK_CONTEXT); + assertThat(rangeIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); + assertEquals( + Intervals.range(new BytesRef("foo"), new BytesRef("foo1"), true, true, IndexSearcher.getMaxClauseCount()), + ((SourceIntervalsSource) rangeIntervals).getIntervalsSource() + ); + } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java index 84139409e8bc6..a49e0c2a3e38d 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java @@ -61,7 +61,7 @@ public class SourceConfirmedTextQueryTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = context -> docID -> { sourceFetchCount.incrementAndGet(); - return Collections.singletonList(context.reader().document(docID).get("body")); + return Collections.singletonList(context.reader().storedFields().document(docID).get("body")); }; public void testTerm() throws Exception { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java index 0fef801b22009..2befcfb576017 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java @@ -41,7 +41,7 @@ public class SourceIntervalsSourceTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = - context -> docID -> Collections.singletonList(context.reader().document(docID).get("body")); + context -> docID -> Collections.singletonList(context.reader().storedFields().document(docID).get("body")); public void testIntervals() throws IOException { final FieldType ft = new FieldType(TextField.TYPE_STORED); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java index ad8e252e3fd63..9c0e5ce071dc6 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -115,7 +115,7 @@ public void testParentWithMultipleBuckets() { logger.info("bucket={}", bucket.getKey()); Children childrenBucket = bucket.getAggregations().get("to_comment"); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - logger.info("total_hits={}", topHits.getHits().getTotalHits().value); + logger.info("total_hits={}", topHits.getHits().getTotalHits().value()); for (SearchHit searchHit : topHits.getHits()) { logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId()); } @@ -129,7 +129,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(2L)); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(2L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(2L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("e")); assertThat(topHits.getHits().getAt(1).getId(), equalTo("f")); @@ -141,7 +141,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); categoryBucket = categoryTerms.getBucketByKey("c"); @@ -152,7 +152,7 @@ public void testParentWithMultipleBuckets() { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); } ); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index 872165014f5a4..cce0ef06cbf62 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -107,7 +107,7 @@ public void testMultiLevelChild() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -117,7 +117,7 @@ public void testMultiLevelChild() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -127,7 +127,7 @@ public void testMultiLevelChild() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -135,7 +135,7 @@ public void testMultiLevelChild() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -143,7 +143,7 @@ public void testMultiLevelChild() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -161,7 +161,7 @@ public void test2744() throws IOException { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } ); @@ -182,7 +182,7 @@ public void testSimpleChildQuery() throws Exception { // TEST FETCHING _parent from child assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -195,7 +195,7 @@ public void testSimpleChildQuery() throws Exception { boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -208,7 +208,7 @@ public void testSimpleChildQuery() throws Exception { // HAS CHILD assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")), response -> { assertHitCount(response, 1L); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); }); @@ -307,8 +307,8 @@ public void testHasParentFilter() throws Exception { ).setSize(numChildDocsPerParent), response -> { Set childIds = parentToChildrenEntry.getValue(); - assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size())); - for (int i = 0; i < response.getHits().getTotalHits().value; i++) { + assertThat(response.getHits().getTotalHits().value(), equalTo((long) childIds.size())); + for (int i = 0; i < response.getHits().getTotalHits().value(); i++) { assertThat(childIds.remove(response.getHits().getAt(i).getId()), is(true)); assertThat(response.getHits().getAt(i).getScore(), is(1.0f)); } @@ -341,7 +341,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -349,7 +349,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -357,7 +357,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -367,7 +367,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -375,7 +375,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -383,7 +383,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -426,7 +426,7 @@ public void testScopedFacet() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); @@ -458,7 +458,7 @@ public void testDeletedParent() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -472,7 +472,7 @@ public void testDeletedParent() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1_updated\"")); } @@ -647,7 +647,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -667,7 +667,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -687,7 +687,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -707,7 +707,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ) ).addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("16")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("17")); @@ -768,7 +768,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); } ); @@ -778,7 +778,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); } ); @@ -801,7 +801,7 @@ public void testHasChildInnerHitsHighlighting() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits(); assertThat(searchHits.length, equalTo(1)); @@ -888,7 +888,7 @@ public void testSimpleQueryRewrite() throws Exception { .addSort("p_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("p000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("p001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("p002")); @@ -903,7 +903,7 @@ public void testSimpleQueryRewrite() throws Exception { .addSort("c_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(500L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(500L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("c000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("c001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("c002")); @@ -932,7 +932,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -943,7 +943,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c3")); assertThat(response.getHits().getAt(1).getId(), equalTo("c4")); } @@ -961,7 +961,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -972,7 +972,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); assertThat(response.getHits().getAt(1).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); } @@ -996,7 +996,7 @@ public void testHasChildQueryWithMinimumScore() throws Exception { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)).setMinScore(3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); assertThat(response.getHits().getAt(0).getScore(), equalTo(3.0f)); } @@ -1411,7 +1411,7 @@ public void testParentChildQueriesViaScrollApi() throws Exception { 10, (respNum, response) -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); } ); } @@ -1469,7 +1469,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = NONE assertResponse(minMaxQuery(ScoreMode.None, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1479,7 +1479,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("4")); @@ -1487,7 +1487,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1495,7 +1495,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.None, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.None, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1505,7 +1505,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1515,7 +1515,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1523,7 +1523,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.None, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1533,7 +1533,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = SUM assertResponse(minMaxQuery(ScoreMode.Total, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1543,7 +1543,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1551,7 +1551,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); }); @@ -1559,7 +1559,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Total, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Total, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1569,7 +1569,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1579,7 +1579,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1587,7 +1587,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1597,7 +1597,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = MAX assertResponse(minMaxQuery(ScoreMode.Max, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1607,7 +1607,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1615,7 +1615,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1623,7 +1623,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Max, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Max, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1633,7 +1633,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1643,7 +1643,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1651,7 +1651,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1661,7 +1661,7 @@ public void testMinMaxChildren() throws Exception { // Score mode = AVG assertResponse(minMaxQuery(ScoreMode.Avg, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1671,7 +1671,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1679,7 +1679,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1687,7 +1687,7 @@ public void testMinMaxChildren() throws Exception { assertHitCount(minMaxQuery(ScoreMode.Avg, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1697,7 +1697,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1707,7 +1707,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1715,7 +1715,7 @@ public void testMinMaxChildren() throws Exception { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); }); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java index 0ae10b297f709..6d6072b2992ca 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java @@ -128,7 +128,7 @@ public void testSimpleParentChild() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getAt(0).getId(), equalTo("c1")); assertThat(innerHits.getAt(1).getId(), equalTo("c2")); @@ -148,7 +148,7 @@ public void testSimpleParentChild() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getAt(0).getId(), equalTo("c4")); assertThat(innerHits.getAt(1).getId(), equalTo("c5")); @@ -280,7 +280,7 @@ public void testRandomParentChild() throws Exception { assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child1InnerObjects[parent])); for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); @@ -290,7 +290,7 @@ public void testRandomParentChild() throws Exception { offset1 += child1InnerObjects[parent]; inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child2InnerObjects[parent])); for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child); @@ -347,12 +347,12 @@ public void testInnerHitsOnHasParent() throws Exception { SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.getId(), equalTo("3")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1")); searchHit = response.getHits().getAt(1); assertThat(searchHit.getId(), equalTo("4")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2")); } ); @@ -394,11 +394,11 @@ public void testParentChildMultipleLayers() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("3")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("5")); } ); @@ -417,11 +417,11 @@ public void testParentChildMultipleLayers() throws Exception { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("4")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("6")); } ); @@ -482,34 +482,34 @@ public void testRoyals() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("duke")); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls"); - assertThat(innerHits.getTotalHits().value, equalTo(4L)); + assertThat(innerHits.getTotalHits().value(), equalTo(4L)); assertThat(innerHits.getAt(0).getId(), equalTo("earl1")); assertThat(innerHits.getAt(1).getId(), equalTo("earl2")); assertThat(innerHits.getAt(2).getId(), equalTo("earl3")); assertThat(innerHits.getAt(3).getId(), equalTo("earl4")); SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1")); innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2")); innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3")); innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4")); innerHits = response.getHits().getAt(0).getInnerHits().get("princes"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("prince")); innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("king")); } ); @@ -532,12 +532,12 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { response -> { assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); } @@ -549,7 +549,7 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { assertResponse(prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC), response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2")); }); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java index 258cbe743d7d3..60412179807a5 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java @@ -102,7 +102,7 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag public void collect(int docId, long owningBucketOrd) throws IOException { if (parentDocs.get(docId) && globalOrdinals.advanceExact(docId)) { int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; collectionStrategy.add(owningBucketOrd, globalOrdinal); } } @@ -134,11 +134,6 @@ protected void prepareSubAggs(long[] ordsToCollect) throws IOException { public float score() { return 1f; } - - @Override - public int docID() { - return childDocsIter.docID(); - } }); final Bits liveDocs = ctx.reader().getLiveDocs(); @@ -150,7 +145,7 @@ public int docID() { continue; } int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; /* * Check if we contain every ordinal. It's almost certainly be * faster to replay all the matching ordinals and filter them down diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index 9ecf4ed821e2a..6b00e94431bef 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -20,8 +20,8 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -137,12 +137,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } for (LeafReaderContext ctx : this.context.searcher().getIndexReader().leaves()) { diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index 03a1677e60f47..707fcc822665f 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -107,7 +107,7 @@ public void testParentChild() throws IOException { // verify for each children for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), indexReader, aggregation -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId("child0_" + parent))), indexReader, aggregation -> { assertEquals( "Expected one result for min-aggregation for parent: " + parent + ", but had aggregation-results: " + aggregation, 1, diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java index 91ec0e3c67691..ca90b0e588b18 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -104,7 +104,7 @@ public void testParentChild() throws IOException { }); for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexReader, child -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(parent))), indexReader, child -> { assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount()); assertEquals( expectedParentChildRelations.get(parent).v2(), diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index d4fe49ec8c773..9244f815cd957 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -54,6 +54,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; @@ -341,13 +342,13 @@ static void assertLateParsingQuery(Query query, String type, String id) throws I BooleanQuery booleanQuery = (BooleanQuery) lateParsingQuery.getInnerQuery(); assertThat(booleanQuery.clauses().size(), equalTo(2)); // check the inner ids query, we have to call rewrite to get to check the type it's executed against - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); - TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).getQuery(); - assertEquals(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(id)), termsQuery); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(booleanQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); + TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).query(); + assertEquals(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(id))), termsQuery); // check the type filter - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.FILTER)); - assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).getQuery()); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(BooleanClause.Occur.FILTER)); + assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).query()); } @Override diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 255131b51a57a..393c7b6157077 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -110,74 +110,93 @@ public Explanation explain(LeafReaderContext leafReaderContext, int docId) throw } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - final Scorer approximation = candidateMatchesWeight.scorer(leafReaderContext); - if (approximation == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext leafReaderContext) throws IOException { + final ScorerSupplier approximationSupplier = candidateMatchesWeight.scorerSupplier(leafReaderContext); + if (approximationSupplier == null) { return null; } - final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + ScorerSupplier verifiedDocsScorer; if (scoreMode.needsScores()) { - return new BaseScorer(this, approximation) { - - float score; - - @Override - boolean matchDocId(int docId) throws IOException { - Query query = percolatorQueries.apply(docId); - if (query != null) { - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST) - .add(nonNestedDocsFilter, Occur.FILTER) - .build(); - } - TopDocs topDocs = percolatorIndexSearcher.search(query, 1); - if (topDocs.scoreDocs.length > 0) { - score = topDocs.scoreDocs[0].score; - return true; - } else { - return false; + verifiedDocsScorer = null; + } else { + verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); + } + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximation = approximationSupplier.get(leadCost); + final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + if (scoreMode.needsScores()) { + return new BaseScorer(approximation) { + + float score; + + @Override + boolean matchDocId(int docId) throws IOException { + Query query = percolatorQueries.apply(docId); + if (query != null) { + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + TopDocs topDocs = percolatorIndexSearcher.search(query, 1); + if (topDocs.scoreDocs.length > 0) { + score = topDocs.scoreDocs[0].score; + return true; + } else { + return false; + } + } else { + return false; + } } - } else { - return false; - } - } - @Override - public float score() { - return score; - } - }; - } else { - ScorerSupplier verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); - Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); - return new BaseScorer(this, approximation) { + @Override + public float score() { + return score; + } + }; + } else { + Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); + return new BaseScorer(approximation) { + + @Override + public float score() throws IOException { + return 0f; + } - @Override - public float score() throws IOException { - return 0f; + boolean matchDocId(int docId) throws IOException { + // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. + // If docId also appears in the verifiedDocsBits then that means during indexing + // we were able to extract all query terms and for this candidate match + // and we determined based on the nature of the query that it is safe to skip + // the MemoryIndex verification. + if (verifiedDocsBits.get(docId)) { + return true; + } + Query query = percolatorQueries.apply(docId); + if (query == null) { + return false; + } + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + return Lucene.exists(percolatorIndexSearcher, query); + } + }; } + } - boolean matchDocId(int docId) throws IOException { - // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. - // If docId also appears in the verifiedDocsBits then that means during indexing - // we were able to extract all query terms and for this candidate match - // and we determined based on the nature of the query that it is safe to skip - // the MemoryIndex verification. - if (verifiedDocsBits.get(docId)) { - return true; - } - Query query = percolatorQueries.apply(docId); - if (query == null) { - return false; - } - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST).add(nonNestedDocsFilter, Occur.FILTER).build(); - } - return Lucene.exists(percolatorIndexSearcher, query); - } - }; - } + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -265,8 +284,7 @@ abstract static class BaseScorer extends Scorer { final Scorer approximation; - BaseScorer(Weight weight, Scorer approximation) { - super(weight); + BaseScorer(Scorer approximation) { this.approximation = approximation; } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 409b6fd70c3c7..d6422efdfed26 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -294,7 +294,7 @@ Tuple createCandidateQuery(IndexReader indexReader) throw List extractedTerms = t.v1(); Map> encodedPointValuesByField = t.v2(); // `1 + ` is needed to take into account the EXTRACTION_FAILED should clause - boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= BooleanQuery + boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= IndexSearcher .getMaxClauseCount(); List subQueries = new ArrayList<>(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index c363746856681..8413b564c2041 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -91,7 +91,7 @@ public void process(HitContext hitContext) throws IOException { query = percolatorIndexSearcher.rewrite(query); int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc(); TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc, new Sort(SortField.FIELD_DOC)); - if (topDocs.totalHits.value == 0) { + if (topDocs.totalHits.value() == 0) { // This hit didn't match with a percolate query, // likely to happen when percolating multiple documents continue; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index da4b10956dcf8..0e9aa6de3a0c0 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -8,7 +8,6 @@ */ package org.elasticsearch.percolator; -import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanTermQuery; @@ -26,12 +25,15 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.index.query.DateRangeIncludingNowQuery; import org.elasticsearch.lucene.queries.BlendedTermQuery; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -162,7 +164,7 @@ public QueryVisitor getSubVisitor(Occur occur, Query parent) { int minimumShouldMatchValue = 0; if (parent instanceof BooleanQuery bq) { if (bq.getMinimumNumberShouldMatch() == 0 - && bq.clauses().stream().anyMatch(c -> c.getOccur() == Occur.MUST || c.getOccur() == Occur.FILTER)) { + && bq.clauses().stream().anyMatch(c -> c.occur() == Occur.MUST || c.occur() == Occur.FILTER)) { return QueryVisitor.EMPTY_VISITOR; } minimumShouldMatchValue = bq.getMinimumNumberShouldMatch(); @@ -198,11 +200,15 @@ public void consumeTerms(Query query, Term... termsToConsume) { @Override public void consumeTermsMatching(Query query, String field, Supplier automaton) { if (query instanceof TermInSetQuery q) { - PrefixCodedTerms.TermIterator ti = q.getTermData().iterator(); + BytesRefIterator bytesRefIterator = q.getBytesRefIterator(); BytesRef term; Set qe = new HashSet<>(); - while ((term = ti.next()) != null) { - qe.add(new QueryExtraction(new Term(field, term))); + try { + while ((term = bytesRefIterator.next()) != null) { + qe.add(new QueryExtraction(new Term(field, term))); + } + } catch (IOException e) { + throw new UncheckedIOException(e); } this.terms.add(new Result(true, qe, 1)); } else { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 31e893ace72fd..ff321303b56c0 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -31,6 +31,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.memory.MemoryIndex; @@ -56,6 +57,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermInSetQuery; @@ -246,15 +248,13 @@ public void testDuel() throws Exception { queryFunctions.add( () -> new TermInSetQuery( field1, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); queryFunctions.add( () -> new TermInSetQuery( field2, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); // many iterations with boolean queries, which are the most complex queries to deal with when nested @@ -647,7 +647,7 @@ public void testRangeQueries() throws Exception { v ); TopDocs topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); @@ -655,7 +655,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); @@ -663,7 +663,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(2, topDocs.scoreDocs[0].doc); @@ -671,7 +671,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3, topDocs.scoreDocs[0].doc); @@ -679,7 +679,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(4, topDocs.scoreDocs[0].doc); @@ -690,7 +690,7 @@ public void testRangeQueries() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(5, topDocs.scoreDocs[0].doc); } @@ -836,14 +836,14 @@ public void testPercolateMatchAll() throws Exception { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); assertEquals(4, topDocs.scoreDocs[2].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); @@ -875,7 +875,7 @@ public void testFunctionScoreQuery() throws Exception { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -931,15 +931,15 @@ public void testPercolateSmallAndLargeDocument() throws Exception { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(CoveringQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -947,10 +947,10 @@ public void testPercolateSmallAndLargeDocument() throws Exception { } // This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery: - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try (Directory directory = new ByteBuffersDirectory()) { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) { Document document = new Document(); for (int i = 0; i < maxClauseCount; i++) { @@ -970,22 +970,22 @@ public void testPercolateSmallAndLargeDocument() throws Exception { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); } } finally { - BooleanQuery.setMaxClauseCount(origMaxClauseCount); + IndexSearcher.setMaxClauseCount(origMaxClauseCount); } } @@ -1032,7 +1032,7 @@ public void testDuplicatedClauses() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); } @@ -1066,7 +1066,7 @@ public void testDuplicatedClauses2() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1074,7 +1074,7 @@ public void testDuplicatedClauses2() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1082,7 +1082,7 @@ public void testDuplicatedClauses2() throws Exception { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1117,7 +1117,7 @@ public void testMsmAndRanges_disjunction() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1141,7 +1141,7 @@ private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100); try { - assertThat(topDocs.totalHits.value, equalTo(controlTopDocs.totalHits.value)); + assertThat(topDocs.totalHits.value(), equalTo(controlTopDocs.totalHits.value())); assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length)); for (int j = 0; j < topDocs.scoreDocs.length; j++) { assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc)); @@ -1164,12 +1164,13 @@ private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex logger.error("topDocs.scoreDocs[{}].doc={}", i, topDocs.scoreDocs[i].doc); logger.error("topDocs.scoreDocs[{}].score={}", i, topDocs.scoreDocs[i].score); } + StoredFields storedFields = shardSearcher.storedFields(); for (int i = 0; i < controlTopDocs.scoreDocs.length; i++) { logger.error("controlTopDocs.scoreDocs[{}].doc={}", i, controlTopDocs.scoreDocs[i].doc); logger.error("controlTopDocs.scoreDocs[{}].score={}", i, controlTopDocs.scoreDocs[i].score); // Additional stored information that is useful when debugging: - String queryToString = shardSearcher.doc(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); + String queryToString = storedFields.document(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); logger.error("controlTopDocs.scoreDocs[{}].query_to_string={}", i, queryToString); TermsEnum tenum = MultiTerms.getTerms(shardSearcher.getIndexReader(), fieldType.queryTermsField.name()).iterator(); @@ -1289,7 +1290,7 @@ public String toString() { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { float _score[] = new float[] { boost }; DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc()); CheckedFunction leaf = queryStore.getQueries(context); @@ -1313,7 +1314,7 @@ protected boolean match(int doc) { } } }; - return new Scorer(this) { + Scorer scorer = new Scorer() { @Override public int docID() { @@ -1335,6 +1336,7 @@ public float getMaxScore(int upTo) throws IOException { return _score[0]; } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java index 075d4d429fb39..04a8105b5fb82 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java @@ -118,7 +118,7 @@ public void testPercolateQuery() throws Exception { ) ); TopDocs topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); assertThat(topDocs.scoreDocs.length, equalTo(1)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); Explanation explanation = shardSearcher.explain(query, 0); @@ -137,7 +137,7 @@ public void testPercolateQuery() throws Exception { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); explanation = shardSearcher.explain(query, 1); @@ -166,7 +166,7 @@ public void testPercolateQuery() throws Exception { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(4L)); + assertThat(topDocs.totalHits.value(), equalTo(4L)); query = new PercolateQuery( "_name", @@ -178,7 +178,7 @@ public void testPercolateQuery() throws Exception { new MatchNoDocsQuery("") ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); explanation = shardSearcher.explain(query, 3); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 100cda66acdcc..f72c68c6fd2e3 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -417,10 +418,10 @@ public void testExtractTermsAndRanges() throws Exception { } public void testCreateCandidateQuery() throws Exception { - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); addQueryFieldMappings(); MemoryIndex memoryIndex = new MemoryIndex(false); @@ -435,8 +436,8 @@ public void testCreateCandidateQuery() throws Exception { Tuple t = fieldType.createCandidateQuery(indexReader); assertTrue(t.v2()); assertEquals(2, t.v1().clauses().size()); - assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); - assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class)); + assertThat(t.v1().clauses().get(0).query(), instanceOf(CoveringQuery.class)); + assertThat(t.v1().clauses().get(1).query(), instanceOf(TermQuery.class)); // Now push it over the edge, so that it falls back using TermInSetQuery memoryIndex.addField("field2", "value", new WhitespaceAnalyzer()); @@ -444,12 +445,12 @@ public void testCreateCandidateQuery() throws Exception { t = fieldType.createCandidateQuery(indexReader); assertFalse(t.v2()); assertEquals(3, t.v1().clauses().size()); - TermInSetQuery terms = (TermInSetQuery) t.v1().clauses().get(0).getQuery(); - assertEquals(maxClauseCount - 1, terms.getTermData().size()); - assertThat(t.v1().clauses().get(1).getQuery().toString(), containsString(fieldName + ".range_field: { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertThat(hits[0].getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index a9c3e09e7f4ed..81427060615ea 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -82,7 +82,7 @@ public void testExtractQueryMetadata_termQuery() { } public void testExtractQueryMetadata_termsQuery() { - TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2")); + TermInSetQuery termsQuery = new TermInSetQuery("_field", List.of(new BytesRef("_term1"), new BytesRef("_term2"))); Result result = analyze(termsQuery); assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(1)); diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java index a76ddf13e4595..8b94337141243 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java @@ -70,7 +70,7 @@ public void testReindexFromRemoteGivenIndexExists() throws Exception { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -85,7 +85,7 @@ public void testReindexFromRemoteGivenSameIndexNames() throws Exception { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -114,7 +114,7 @@ public void testReindexManyTimesFromRemoteGivenSameIndexNames() throws Exception final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } } @@ -146,7 +146,7 @@ public void testReindexFromRemoteGivenSimpleDateMathIndexName() throws Interrupt final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -162,7 +162,7 @@ public void testReindexFromRemoteGivenComplexDateMathIndexName() throws Interrup final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java index 4b960e97ce0e0..d046ba881b5d4 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; @@ -96,7 +95,7 @@ static CharacterRunAutomaton buildRemoteWhitelist(List whitelist) { return new CharacterRunAutomaton(Automata.makeEmpty()); } Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY)); - automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (Operations.isTotal(automaton)) { throw new IllegalArgumentException( "Refusing to start because whitelist " diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java index b924f8c311115..01459e2ff61bb 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java @@ -97,8 +97,8 @@ class Fields { HITS_PARSER.declareField(constructorArg(), (p, c) -> { if (p.currentToken() == XContentParser.Token.START_OBJECT) { final TotalHits totalHits = SearchHits.parseTotalHitsFragment(p); - assert totalHits.relation == TotalHits.Relation.EQUAL_TO; - return totalHits.value; + assert totalHits.relation() == TotalHits.Relation.EQUAL_TO; + return totalHits.value(); } else { // For BWC with nodes pre 7.0 return p.longValue(); diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index 593d4b41df712..6c77186089644 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -242,7 +242,7 @@ public void testIndexedTermVectors() throws IOException { withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), reader -> { LeafReader leaf = reader.leaves().get(0).reader(); - Terms terms = leaf.getTermVector(0, "field"); + Terms terms = leaf.termVectors().get(0, "field"); TermsEnum iterator = terms.iterator(); BytesRef term; Set foundTerms = new HashSet<>(); diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java index 61abd64e98a96..d4c4ccfaa442d 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java @@ -130,7 +130,7 @@ private void assertHighlightOneDoc( } TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.collectionToDelimitedString(plainTextForHighlighter, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, hiliteAnalyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java index 4594e8d71c6fb..b9f4943b1dab6 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java @@ -27,7 +27,6 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index return new SmbDirectoryWrapper( setPreload( new MMapDirectory(location, lockFactory), - lockFactory, new HashSet<>(indexSettings.getValue(IndexModule.INDEX_STORE_PRE_LOAD_SETTING)) ) ); diff --git a/qa/ccs-common-rest/build.gradle b/qa/ccs-common-rest/build.gradle index e5e8c5a489d5b..6121f7dcd4f82 100644 --- a/qa/ccs-common-rest/build.gradle +++ b/qa/ccs-common-rest/build.gradle @@ -10,7 +10,7 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' restResources { restApi { - include '_common', 'bulk', 'count', 'cluster', 'field_caps', 'get', 'knn_search', 'index', 'indices', 'msearch', + include 'capabilities', '_common', 'bulk', 'count', 'cluster', 'field_caps', 'get', 'knn_search', 'index', 'indices', 'msearch', 'search', 'async_search', 'graph', '*_point_in_time', 'info', 'scroll', 'clear_scroll', 'search_mvt', 'eql', 'sql' } restTests { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index ed1cf905f7e9d..c205fe60d3bc9 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -54,8 +54,10 @@ tasks.named("precommit").configure { dependsOn 'enforceYamlTestConvention' } -tasks.named("yamlRestCompatTestTransform").configure({task -> - task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") - task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") - task.skipTestsByFilePattern("indices.create/synthetic_source*.yml", "@UpdateForV9 -> tests do not pass after bumping API version to 9 [ES-9597]") +tasks.named("yamlRestCompatTestTransform").configure ({ task -> + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") + task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") + task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") + task.skipTestsByFilePattern("indices.create/synthetic_source*.yml", "@UpdateForV9 -> tests do not pass after bumping API version to 9 [ES-9597]") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml index 99bd001bd95e2..6a5f34b5207ce 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/230_interval_query.yml @@ -476,3 +476,53 @@ setup: - match: { hits.hits.0._id: "6" } - match: { hits.hits.1._id: "5" } +--- +"Test regexp": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ range_regexp_interval_queries ] + test_runner_features: capabilities + reason: "Support for range and regexp interval queries capability required" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cold + - regexp: + pattern: ou.*ide + - match: { hits.total.value: 3 } + + +--- +"Test range": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ range_regexp_interval_queries ] + test_runner_features: capabilities + reason: "Support for range and regexp interval queries capability required" + - do: + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cold + - range: + gte: out + lte: ouu + - match: { hits.total.value: 3 } + diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index dc79961ae78cd..81ca84a06f815 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -212,7 +212,6 @@ dfs knn vector profiling: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 } @@ -235,6 +234,47 @@ dfs knn vector profiling: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } +--- +dfs knn vector profiling description: + - requires: + cluster_features: ["lucene_10_upgrade"] + reason: "the profile description changed with Lucene 10" + - do: + indices.create: + index: images + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + image: + type: "dense_vector" + dims: 3 + index: true + similarity: "l2_norm" + + - do: + index: + index: images + id: "1" + refresh: true + body: + image: [1, 5, -20] + + - do: + search: + index: images + body: + profile: true + knn: + field: "image" + query_vector: [-5, 9, -12] + k: 1 + num_candidates: 100 + + - match: { hits.total.value: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } + --- dfs knn vector profiling with vector_operations_count: - requires: @@ -276,7 +316,6 @@ dfs knn vector profiling with vector_operations_count: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } - match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } @@ -300,7 +339,6 @@ dfs knn vector profiling with vector_operations_count: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } - --- dfs profile for search with dfs_query_then_fetch: - requires: diff --git a/server/build.gradle b/server/build.gradle index 5c12d47da8102..963b3cfb2e747 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -44,6 +44,7 @@ dependencies { api "org.apache.lucene:lucene-core:${versions.lucene}" api "org.apache.lucene:lucene-analysis-common:${versions.lucene}" api "org.apache.lucene:lucene-backward-codecs:${versions.lucene}" + api "org.apache.lucene:lucene-facet:${versions.lucene}" api "org.apache.lucene:lucene-grouping:${versions.lucene}" api "org.apache.lucene:lucene-highlighter:${versions.lucene}" api "org.apache.lucene:lucene-join:${versions.lucene}" diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index c56bc201e7f86..8bedf436e3698 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -571,7 +571,7 @@ public void testSearchQueryThenFetch() throws Exception { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); @@ -601,7 +601,7 @@ public void testSearchDfsQueryThenFetch() throws Exception { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index cc6329a973b37..e8160a311bedb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -306,8 +306,8 @@ public void onFailure(Exception e) { prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)), expected -> assertNoFailuresAndResponse(prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()), all -> { - assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value); - logger.info("total: {}", expected.getHits().getTotalHits().value); + assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value(), all.getHits().getTotalHits().value()); + logger.info("total: {}", expected.getHits().getTotalHits().value()); }) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index e1bf5bce6f3ae..8391ab270b1d1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -253,7 +253,7 @@ public void assertNested(String index, int numDocs) { // now, do a nested query assertNoFailuresAndResponse( prepareSearch(index).setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocs)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java index 8b8b62da98f97..2fd6ee9a16808 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java @@ -141,11 +141,11 @@ public void afterBulk(long executionId, BulkRequest request, Exception failure) assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { assertThat(bulkProcessor.getTotalBytesInFlight(), equalTo(0L)); if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 37904e9f639ac..4ed19065f32f2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -136,11 +136,11 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) final boolean finalRejectedAfterAllRetries = rejectedAfterAllRetries; assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index 75f914f76dd77..b204f614cbae7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -86,7 +86,7 @@ public void testSingleBulkRequest() { assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) 1)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) 1)); }); assertFalse(refCounted.hasReferences()); @@ -199,7 +199,7 @@ public void testMultipleBulkPartsWithBackoff() { assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docs)); }); } } @@ -289,7 +289,7 @@ public void testBulkLevelBulkFailureAfterFirstIncrementalRequest() throws Except assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(hits.get())); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(hits.get())); }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java index 91674b7ce9050..339567f409c02 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java @@ -81,7 +81,7 @@ public void testMappingValidationIndexExists() { ); indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); @@ -135,7 +135,7 @@ public void testMappingValidationIndexExistsWithComponentTemplate() throws IOExc // Now make sure nothing was actually changed: indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java index 274cf90ec9529..f17196c3d97f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java @@ -45,9 +45,9 @@ public void testIndexWithWriteDelayEnabled() throws Exception { try { logger.debug("running search"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index 66323e687eefb..e47925cef913b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -612,7 +612,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getSuccessfulShards(), equalTo(numShards - shardsRemoved)); assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); }); // create a PIT when some shards are missing @@ -637,7 +637,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertThat(resp.pointInTimeId(), equalTo(pointInTimeResponseOneNodeDown.getPointInTimeId())); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); @@ -661,7 +661,7 @@ public void testMissingShardsWithPointInTime() throws Exception { assertThat(resp.getSuccessfulShards(), equalTo(numShards)); assertThat(resp.getFailedShards(), equalTo(0)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, greaterThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), greaterThan((long) numDocs)); }); // ensure that when using the previously created PIT, we'd see the same number of documents as before regardless of the @@ -681,7 +681,7 @@ public void testMissingShardsWithPointInTime() throws Exception { } assertNotNull(resp.getHits().getTotalHits()); // we expect less documents as the newly indexed ones should not be part of the PIT - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index d1a68c68e7de5..a1395f81eb091 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -143,7 +143,7 @@ public void testLocalClusterAlias() throws ExecutionException, InterruptedExcept randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -162,7 +162,7 @@ public void testLocalClusterAlias() throws ExecutionException, InterruptedExcept randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -221,7 +221,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce ); searchRequest.indices(""); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -241,7 +241,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce sourceBuilder.query(rangeQuery); searchRequest.source(sourceBuilder); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -280,7 +280,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { ? originalRequest : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(1, longTerms.getBuckets().size()); @@ -296,7 +296,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException { false ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(2, longTerms.getBuckets().size()); @@ -432,7 +432,7 @@ public void testSearchIdle() throws Exception { () -> assertResponse( prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setPreFilterShardSize(randomIntBetween(1, 3)), - resp -> assertThat(resp.getHits().getTotalHits().value, equalTo(2L)) + resp -> assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)) ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java index 848c5cacda1b9..b70da34c8fe3f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -396,7 +396,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); logger.info("--> checking filtering alias for one index"); @@ -406,7 +406,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); logger.info("--> checking filtering alias for two indices and one complete index"); @@ -416,7 +416,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "test1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for one index"); @@ -426,17 +426,17 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "aliasToTest1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); assertResponse( prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); @@ -446,7 +446,7 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.termQuery("name", "something")), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); } @@ -508,7 +508,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -517,7 +517,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); assertResponse( @@ -526,7 +526,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -535,7 +535,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter13", "filter1", "filter23").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -544,7 +544,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -553,7 +553,7 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { ); assertResponse( prepareSearch("filter23", "filter13", "test1", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); } @@ -608,7 +608,7 @@ public void testDeletingByQueryFilteringAliases() throws Exception { logger.info("--> checking counts before delete"); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); } @@ -1399,7 +1399,7 @@ private void checkAliases() { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java index 4e7c22f0d8847..f7dae8a92c2d6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java @@ -44,7 +44,7 @@ public void testBroadcastOperations() throws IOException { for (int i = 0; i < 5; i++) { // test successful assertResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java index eb10877f5892d..97994a38c277c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java @@ -152,7 +152,7 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { // test successful assertNoFailuresAndResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); @@ -164,7 +164,7 @@ public void testIndexActions() throws Exception { countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length, equalTo(0) ); - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index 5da9788e3079f..4d1ed9bce6440 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -115,7 +115,7 @@ public void testFinalPipelineOfOldDestinationIsNotInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -139,7 +139,7 @@ public void testFinalPipelineOfNewDestinationIsInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals(true, response.getHits().getAt(0).getSourceAsMap().get("final")); }); } @@ -163,7 +163,7 @@ public void testDefaultPipelineOfNewDestinationIsNotInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -187,7 +187,7 @@ public void testDefaultPipelineOfRerouteDestinationIsInvoked() { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertTrue(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java index 7b7433e3aa4c3..cb280d5577fae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java @@ -107,7 +107,7 @@ public void testMaxDocsLimit() throws Exception { indicesAdmin().prepareRefresh("test").get(); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); if (randomBoolean()) { indicesAdmin().prepareFlush("test").get(); @@ -117,7 +117,7 @@ public void testMaxDocsLimit() throws Exception { ensureGreen("test"); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java index 81a0e0ede7cd3..1194218c68ff1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java @@ -46,7 +46,7 @@ public void testDynamicTemplateCopyTo() throws Exception { AggregationBuilders.terms("test_raw").field("test_field_raw").size(recordCount * 2).collectMode(aggCollectionMode) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) recordCount)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) recordCount)); assertThat(((Terms) response.getAggregations().get("test")).getBuckets().size(), equalTo(recordCount + 1)); assertThat(((Terms) response.getAggregations().get("test_raw")).getBuckets().size(), equalTo(recordCount)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java index 03afabaae1d0d..902dd911ddcd3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -115,7 +115,7 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I assertResponse( prepareSearch("index").setQuery(termQuery("_id", response.getHits().getHits()[i].getId())).setExplain(true), dupIdResponse -> { - assertThat(dupIdResponse.getHits().getTotalHits().value, greaterThan(1L)); + assertThat(dupIdResponse.getHits().getTotalHits().value(), greaterThan(1L)); logger.info("found a duplicate id:"); for (SearchHit hit : dupIdResponse.getHits()) { logger.info("Doc {} was found on shard {}", hit.getId(), hit.getShard().getShardId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java index 62c5f934ec8b6..37fbc95d56506 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java @@ -57,9 +57,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { try { logger.debug("running search with all types"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " @@ -77,9 +77,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { try { logger.debug("running search with a specific type"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 7db810fc70ac1..52492ba7ce657 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -149,7 +149,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 5); @@ -161,7 +161,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); @@ -174,7 +174,7 @@ public void testQueryRewrite() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 6, 9); @@ -217,7 +217,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 0, 1); @@ -229,7 +229,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 1, 1); @@ -241,7 +241,7 @@ public void testQueryRewriteMissingValues() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 2, 1); @@ -286,7 +286,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 0, 1); @@ -299,7 +299,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 1, 1); @@ -312,7 +312,7 @@ public void testQueryRewriteDates() throws Exception { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 2, 1); @@ -364,7 +364,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 0, 1); @@ -381,7 +381,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 1, 1); @@ -395,7 +395,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 2, 1); @@ -440,7 +440,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -453,7 +453,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -468,7 +468,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -483,7 +483,7 @@ public void testCanCache() throws Exception { .addAggregation(dateRange("foo").field("s").addRange("now-10y", "now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -497,7 +497,7 @@ public void testCanCache() throws Exception { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 2); @@ -512,7 +512,7 @@ public void testCanCache() throws Exception { .addAggregation(filter("foo", QueryBuilders.rangeQuery("s").from("now-10y").to("now"))), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 4); @@ -543,7 +543,7 @@ public void testCacheWithFilteredAlias() { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 0, 1); @@ -555,20 +555,20 @@ public void testCacheWithFilteredAlias() { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 1, 1); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 1, 2); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 2, 2); } @@ -591,7 +591,7 @@ public void testProfileDisableCache() throws Exception { client.prepareSearch("index").setRequestCache(true).setProfile(profile).setQuery(QueryBuilders.termQuery("k", "hello")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); if (profile == false) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index a6b168af5268d..cbb0a67edcb83 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -229,7 +229,7 @@ public void testCloseWhileRelocatingShards() throws Exception { for (String index : acknowledgedCloses) { assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> { - long docsCount = response.getHits().getTotalHits().value; + long docsCount = response.getHits().getTotalHits().value(); assertEquals( "Expected " + docsPerIndex.get(index) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 942f86017c617..77c4f8a26f478 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -344,7 +344,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).setTrackTotalHits(true).addSort("id", SortOrder.ASC), response -> { logSearchResponse(numberOfShards, numberOfDocs, finalI, response); - iterationHitCount[finalI] = response.getHits().getTotalHits().value; + iterationHitCount[finalI] = response.getHits().getTotalHits().value(); if (iterationHitCount[finalI] != numberOfDocs) { error[0] = true; } @@ -391,7 +391,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, boolean[] errorOccurred = new boolean[1]; for (int i = 0; i < iterations; i++) { assertResponse(prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()), response -> { - if (response.getHits().getTotalHits().value != numberOfDocs) { + if (response.getHits().getTotalHits().value() != numberOfDocs) { errorOccurred[0] = true; } }); @@ -421,7 +421,7 @@ private void logSearchResponse(int numberOfShards, long numberOfDocs, int iterat logger.info( "iteration [{}] - returned documents: {} (expected {})", iteration, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), numberOfDocs ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index fb1fabfd198e6..2c56f75b051eb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -240,7 +240,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(), response -> { var hits = response.getHits(); - if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { + if (hits.getTotalHits().value() != indexer.totalIndexedDocs()) { int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { hitIds[hit] = hit + 1; @@ -254,7 +254,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { } set.forEach(value -> logger.error("Missing id [{}]", value)); } - assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); + assertThat(hits.getTotalHits().value(), equalTo(indexer.totalIndexedDocs())); logger.info("--> DONE search test round {}", idx + 1); } ); @@ -364,9 +364,9 @@ public void indexShardStateChanged( for (Client client : clients()) { assertNoFailuresAndResponse(client.prepareSearch("test").setPreference("_local").setSize(0), response -> { if (expectedCount[0] < 0) { - expectedCount[0] = response.getHits().getTotalHits().value; + expectedCount[0] = response.getHits().getTotalHits().value(); } else { - assertEquals(expectedCount[0], response.getHits().getTotalHits().value); + assertEquals(expectedCount[0], response.getHits().getTotalHits().value()); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java index 45dce5789b9bc..199c9a9fb4c8c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -296,7 +296,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue3268() thro prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1).setQuery(QueryBuilders.matchAllQuery()), response -> { logger.info("--> search all on index_* should find two"); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); // Let's make sure that, even though 2 docs are available, only one is returned according to the size we set in the request // Therefore the reduce phase has taken place, which proves that the QUERY_AND_FETCH search type wasn't erroneously forced. assertThat(response.getHits().getHits().length, equalTo(1)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java index 7bccf3db1284e..68bc6656cec7f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java @@ -160,7 +160,7 @@ private void verifyRoutedSearches(String index, Map> routing + "] shards for routing [" + routing + "] and got hits [" - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + "]" ); @@ -168,7 +168,7 @@ private void verifyRoutedSearches(String index, Map> routing response.getTotalShards() + " was not in " + expectedShards + " for " + index, expectedShards.contains(response.getTotalShards()) ); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); @@ -188,7 +188,7 @@ private void verifyBroadSearches(String index, Map> routingT prepareSearch().setQuery(QueryBuilders.termQuery("_routing", routing)).setIndices(index).setSize(100), response -> { assertEquals(expectedShards, response.getTotalShards()); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java index ee1aac60da9c1..f63f09764621b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java @@ -64,7 +64,7 @@ public void testTopHitsTimeout() { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); } @@ -81,7 +81,7 @@ public void testAggsTimeout() { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertEquals(searchResponse.getHits().getHits().length, 0); StringTerms terms = searchResponse.getAggregations().get("terms"); assertEquals(1, terms.getBuckets().size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java index d023c9de87ca5..4a407ae66f7ad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -115,7 +115,7 @@ public void testSubAggregationForTopAggregationOnUnmappedField() throws Exceptio histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode)) ), response -> { - assertThat(response.getHits().getTotalHits().value, Matchers.equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), Matchers.equalTo(0L)); Histogram values = response.getAggregations().get("values"); assertThat(values, notNullValue()); assertThat(values.getBuckets().isEmpty(), is(true)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index 5a21b600cacd4..1a6e1519d4402 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -293,7 +293,7 @@ public void testDuelTerms() throws Exception { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); final Terms longTerms = response.getAggregations().get("long"); final Terms doubleTerms = response.getAggregations().get("double"); @@ -413,7 +413,7 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); } ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java index a820e6e8d1747..2bd19c9d32d44 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java @@ -57,7 +57,7 @@ public void testWrapperQueryIsRewritten() throws IOException { metadata.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); builder.setMetadata(metadata); assertResponse(client().prepareSearch("test").setSize(0).addAggregation(builder), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); InternalFilters filters = response.getAggregations().get("titles"); assertEquals(1, filters.getBuckets().size()); assertEquals(2, filters.getBuckets().get(0).getDocCount()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 1787b4f784574..20bc07afec32d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -974,7 +974,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(dateHistogram("date_histo").field("value").fixedInterval(DateHistogramInterval.HOUR)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); @@ -1011,7 +1011,7 @@ public void testSingleValueWithTimeZone() throws Exception { .format("yyyy-MM-dd:HH-mm-ssZZZZZ") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -1175,7 +1175,7 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { assertThat( "Expected 24 buckets for one day aggregation with hourly interval", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), equalTo(2L) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 0afc479474814..5d6b1a3230c37 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -78,7 +78,7 @@ public void testSingleValueWithPositiveOffset() throws Exception { dateHistogram("date_histo").field("date").offset("2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -99,7 +99,7 @@ public void testSingleValueWithNegativeOffset() throws Exception { dateHistogram("date_histo").field("date").offset("-2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -128,7 +128,7 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception { .fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(24L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(24L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 6e9a9305eaf4e..6dac2db6f682a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -578,7 +578,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(dateRange("date_range").field("value").addRange("0-1", 0, 1)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -722,7 +722,7 @@ public void testRangeWithFormatStringValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("00:16:40", "00:50:00").addRange("00:50:00", "01:06:40")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00:16:40-00:50:00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00:50:00-01:06:40", 3000000L, 4000000L); @@ -739,7 +739,7 @@ public void testRangeWithFormatStringValue() throws Exception { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -753,7 +753,7 @@ public void testRangeWithFormatStringValue() throws Exception { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); @@ -788,7 +788,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -799,7 +799,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -810,7 +810,7 @@ public void testRangeWithFormatNumericValue() throws Exception { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -827,7 +827,7 @@ public void testRangeWithFormatNumericValue() throws Exception { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -841,7 +841,7 @@ public void testRangeWithFormatNumericValue() throws Exception { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index 1b70b859426d5..96807ed119866 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -159,7 +159,7 @@ public void testEmptyAggregation() throws Exception { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(filter("filter", matchAllQuery())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index b030370215cd3..439583de910c1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -247,7 +247,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(filters("filters", new KeyedFilter("all", matchAllQuery()))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -455,7 +455,7 @@ public void testEmptyAggregationWithOtherBucket() throws Exception { .subAggregation(filters("filters", new KeyedFilter("foo", matchAllQuery())).otherBucket(true).otherBucketKey("bar")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index 843e50a5a7e21..907f943e68422 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -413,7 +413,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index 2edd567221bef..ad65e6468b812 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -915,7 +915,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(histogram("sub_histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index d05c541578d57..dd0cc4aa081df 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -346,7 +346,7 @@ public void testEmptyAggregation() throws Exception { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(nested("nested", "nested"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index 8b63efd92a648..1cfd6e00af7ab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -866,7 +866,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addRange("0-2", 0.0, 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index 5e2a44285e8fa..29bf8a8a0b45a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -97,7 +97,7 @@ public void testEmptyAggregation() throws Exception { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -130,7 +130,7 @@ public void testUnmapped() throws Exception { assertResponse( prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ExtendedStats stats = response.getAggregations().get("stats"); assertThat(stats, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 762bc5bdfaf39..ff4150556c011 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -112,7 +112,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -138,7 +138,7 @@ public void testUnmapped() throws Exception { .field("value") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 12ed0a5c1a8e0..fe6dc7abf66a8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -116,7 +116,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -143,7 +143,7 @@ public void testUnmapped() throws Exception { .percentiles(0, 10, 15, 100) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 52425ae1d9f17..4c8fed2c16ddc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -358,7 +358,7 @@ public void testMap() { prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -407,7 +407,7 @@ public void testMapWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -467,7 +467,7 @@ public void testInitMutatesParams() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -522,7 +522,7 @@ public void testMapCombineWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -586,7 +586,7 @@ public void testInitMapCombineWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -655,7 +655,7 @@ public void testInitMapCombineReduceWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -714,7 +714,7 @@ public void testInitMapCombineReduceGetProperty() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Global global = response.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -773,7 +773,7 @@ public void testMapCombineReduceWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -824,7 +824,7 @@ public void testInitMapReduceWithParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -869,7 +869,7 @@ public void testMapReduceWithParams() { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -928,7 +928,7 @@ public void testInitMapCombineReduceWithParamsAndReduceParams() { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -964,7 +964,7 @@ public void testInitMapCombineReduceWithParamsStored() { .reduceScript(new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -1025,7 +1025,7 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("histo"); assertThat(aggregation, notNullValue()); assertThat(aggregation, instanceOf(Histogram.class)); @@ -1099,7 +1099,7 @@ public void testEmptyAggregation() throws Exception { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index fbe70ec2a40d6..1169f8bbdbf18 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -56,7 +56,7 @@ public void testEmptyAggregation() throws Exception { ), response -> { assertShardExecutionState(response, 0); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 2a8be6b4244dd..b3ad5c578e618 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -82,7 +82,7 @@ public void testEmptyAggregation() throws Exception { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(sum("sum").field("value"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 2877f8882d6d6..d6cceb2013701 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -105,7 +105,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 10, 15 }).field("value"))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -146,7 +146,7 @@ public void testUnmapped() throws Exception { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 })).field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index bbcf7b191fe1b..b4072bcf226ed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -111,7 +111,7 @@ public void testEmptyAggregation() throws Exception { .subAggregation(randomCompression(percentiles("percentiles").field("value")).percentiles(10, 15)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -132,7 +132,7 @@ public void testUnmapped() throws Exception { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(0, 10, 15, 100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 7ac8e3c7a35b4..80c47d6180db0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -328,7 +328,7 @@ public void testBasics() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); higestSortValue += 10; assertThat((Long) hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); @@ -348,7 +348,7 @@ public void testIssue11119() throws Exception { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -381,7 +381,7 @@ public void testIssue11119() throws Exception { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -413,7 +413,7 @@ public void testBreadthFirstWithScoreNeeded() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -444,7 +444,7 @@ public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -501,7 +501,7 @@ public void testPagination() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(controlHits.getTotalHits().value)); + assertThat(hits.getTotalHits().value(), equalTo(controlHits.getTotalHits().value())); assertThat(hits.getHits().length, equalTo(controlHits.getHits().length)); for (int i = 0; i < hits.getHits().length; i++) { logger.info( @@ -543,7 +543,7 @@ public void testSortByBucket() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); assertThat(hits.getAt(1).getSortValues()[0], equalTo(higestSortValue - 1)); @@ -578,7 +578,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("b")); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("6")); @@ -586,7 +586,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("c")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(3L)); + assertThat(hits.getTotalHits().value(), equalTo(3L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("9")); @@ -594,7 +594,7 @@ public void testFieldCollapsing() throws Exception { assertThat(key(bucket), equalTo("a")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); + assertThat(hits.getTotalHits().value(), equalTo(2L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("2")); } @@ -630,7 +630,7 @@ public void testFetchFeatures() throws IOException { for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(1)); SearchHit hit = hits.getAt(0); @@ -682,7 +682,7 @@ public void testEmptyIndex() throws Exception { TopHits hits = response.getAggregations().get("hits"); assertThat(hits, notNullValue()); assertThat(hits.getName(), equalTo("hits")); - assertThat(hits.getHits().getTotalHits().value, equalTo(0L)); + assertThat(hits.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -744,7 +744,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(1L)); TopHits topHits = bucket.getAggregations().get("top-comments"); SearchHits searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(1)); @@ -753,7 +753,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(2L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(2)); @@ -765,7 +765,7 @@ public void testTopHitsInNestedSimple() throws Exception { assertThat(bucket.getDocCount(), equalTo(1L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(4)); @@ -789,7 +789,7 @@ public void testTopHitsInSecondLayerNested() throws Exception { assertThat(toComments.getDocCount(), equalTo(4L)); TopHits topComments = toComments.getAggregations().get("top-comments"); - assertThat(topComments.getHits().getTotalHits().value, equalTo(4L)); + assertThat(topComments.getHits().getTotalHits().value(), equalTo(4L)); assertThat(topComments.getHits().getHits().length, equalTo(4)); assertThat(topComments.getHits().getAt(0).getId(), equalTo("2")); @@ -816,7 +816,7 @@ public void testTopHitsInSecondLayerNested() throws Exception { assertThat(toReviewers.getDocCount(), equalTo(7L)); TopHits topReviewers = toReviewers.getAggregations().get("top-reviewers"); - assertThat(topReviewers.getHits().getTotalHits().value, equalTo(7L)); + assertThat(topReviewers.getHits().getTotalHits().value(), equalTo(7L)); assertThat(topReviewers.getHits().getHits().length, equalTo(7)); assertThat(topReviewers.getHits().getAt(0).getId(), equalTo("1")); @@ -899,7 +899,7 @@ public void testNestedFetchFeatures() { assertThat(nested.getDocCount(), equalTo(4L)); SearchHits hits = ((TopHits) nested.getAggregations().get("top-comments")).getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); SearchHit searchHit = hits.getAt(0); assertThat(searchHit.getId(), equalTo("1")); assertThat(searchHit.getNestedIdentity().getField().string(), equalTo("comments")); @@ -960,7 +960,7 @@ public void testTopHitsInNested() throws Exception { TopHits hits = nested.getAggregations().get("comments"); SearchHits searchHits = hits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(numNestedDocs)); + assertThat(searchHits.getTotalHits().value(), equalTo(numNestedDocs)); for (int j = 0; j < 3; j++) { assertThat(searchHits.getAt(j).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(j).getNestedIdentity().getOffset(), equalTo(0)); @@ -1064,7 +1064,7 @@ public void testNoStoredFields() throws Exception { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); for (SearchHit hit : hits) { assertThat(hit.getSourceAsMap(), nullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 3dee7a8d6e92f..6e00c1e5a8d90 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -67,7 +67,7 @@ protected Collection> nodePlugins() { public void testUnmapped() throws Exception { assertResponse(prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(count("count").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ValueCount valueCount = response.getAggregations().get("count"); assertThat(valueCount, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 3263be081a6f7..2cd22c6a65222 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -72,14 +72,14 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) .setPreference(preference + Integer.toString(counter++)) .setQuery(QueryBuilders.termQuery("field", "test")), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { refresh(); assertResponse( client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterRefresh -> { logger.info( "hits count mismatch on any shard search failed, post explicit refresh hits are {}", - searchResponseAfterRefresh.getHits().getTotalHits().value + searchResponseAfterRefresh.getHits().getTotalHits().value() ); ensureGreen(); assertResponse( @@ -88,7 +88,7 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) .setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterGreen -> logger.info( "hits count mismatch on any shard search failed, post explicit wait for green hits are {}", - searchResponseAfterGreen.getHits().getTotalHits().value + searchResponseAfterGreen.getHits().getTotalHits().value() ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index cab70ba7d7339..0d06856ca1088 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -77,7 +77,7 @@ public void run() { try { while (stop.get() == false) { assertResponse(prepareSearch().setSize(numDocs), response -> { - if (response.getHits().getTotalHits().value != numDocs) { + if (response.getHits().getTotalHits().value() != numDocs) { // if we did not search all shards but had no serious failures that is potentially fine // if only the hit-count is wrong. this can happen if the cluster-state is behind when the // request comes in. It's a small window but a known limitation. @@ -86,7 +86,7 @@ public void run() { .allMatch(ssf -> ssf.getCause() instanceof NoShardAvailableActionException)) { nonCriticalExceptions.add( "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numDocs + " was expected. " @@ -100,7 +100,7 @@ public void run() { final SearchHits sh = response.getHits(); assertThat( "Expected hits to be the same size the actual hits array", - sh.getTotalHits().value, + sh.getTotalHits().value(), equalTo((long) (sh.getHits().length)) ); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 1745ad82931ba..4b59d5b9a78d5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -126,7 +126,7 @@ public void testDfsQueryThenFetch() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -169,7 +169,7 @@ public void testDfsQueryThenFetchWithSort() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -208,7 +208,7 @@ public void testQueryThenFetch() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -237,7 +237,7 @@ public void testQueryThenFetchWithFrom() throws Exception { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(60)); for (int i = 0; i < 60; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -248,7 +248,7 @@ public void testQueryThenFetchWithFrom() throws Exception { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(40)); for (int i = 0; i < 40; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -271,7 +271,7 @@ public void testQueryThenFetchWithSort() throws Exception { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -301,7 +301,7 @@ public void testSimpleFacets() throws Exception { .aggregation(AggregationBuilders.filter("test1", termQuery("name", "test1"))); assertNoFailuresAndResponse(client().search(new SearchRequest("test").source(sourceBuilder)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(100L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(100L)); Global global = response.getAggregations().get("global"); Filter all = global.getAggregations().get("all"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 223ee81e84a92..5233a0cd564ef 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -685,7 +685,7 @@ public void testDateMathIndexes() throws ExecutionException, InterruptedExceptio assertNotNull(localClusterSearchInfo); Cluster remoteClusterSearchInfo = clusters.getCluster(REMOTE_CLUSTER); assertNotNull(remoteClusterSearchInfo); - assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value, greaterThan(2L)); + assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value(), greaterThan(2L)); for (var hit : response.getHits()) { assertThat(hit.getIndex(), anyOf(equalTo("datemath-2001-01-01-14"), equalTo("remotemath-2001-01-01-14"))); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 2cb2e186b257e..91cc344614c23 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -139,7 +139,7 @@ private void hitExecute(FetchContext context, HitContext hitContext) throws IOEx hitField = new DocumentField(NAME, new ArrayList<>(1)); hitContext.hit().setDocumentField(NAME, hitField); } - Terms terms = hitContext.reader().getTermVector(hitContext.docId(), field); + Terms terms = hitContext.reader().termVectors().get(hitContext.docId(), field); if (terms != null) { TermsEnum te = terms.iterator(); Map tv = new HashMap<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index 66d44a818b797..e39f8df9bad36 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -151,7 +151,7 @@ public void testSimpleNested() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(2)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -171,7 +171,7 @@ public void testSimpleNested() throws Exception { assertThat(response.getHits().getAt(0).getShard(), notNullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getHits().length, equalTo(3)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -196,7 +196,7 @@ public void testSimpleNested() throws Exception { ), response -> { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(1)); HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("comments.message"); assertThat(highlightField.fragments()[0].string(), equalTo("fox eat quick")); @@ -264,7 +264,7 @@ public void testRandomNested() throws Exception { SearchHit searchHit = response.getHits().getAt(i); assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) field1InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field1InnerObjects[i])); for (int j = 0; j < field1InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field1")); @@ -273,7 +273,7 @@ public void testRandomNested() throws Exception { } inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) field2InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field2InnerObjects[i])); for (int j = 0; j < field2InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field2")); @@ -378,13 +378,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -409,13 +409,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -436,7 +436,7 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -460,13 +460,13 @@ public void testNestedMultipleLayers() throws Exception { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -538,7 +538,7 @@ public void testNestedDefinedAsObject() throws Exception { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1")); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), @@ -613,7 +613,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit parent = response.getHits().getAt(0); assertThat(parent.getId(), equalTo("1")); SearchHits inner = parent.getInnerHits().get("comments.messages"); - assertThat(inner.getTotalHits().value, equalTo(2L)); + assertThat(inner.getTotalHits().value(), equalTo(2L)); assertThat(inner.getAt(0).getSourceAsString(), equalTo("{\"message\":\"no fox\"}")); assertThat(inner.getAt(1).getSourceAsString(), equalTo("{\"message\":\"fox eat quick\"}")); } @@ -629,7 +629,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(2L)); + assertThat(messages.getTotalHits().value(), equalTo(2L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(2)); @@ -651,7 +651,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); @@ -685,7 +685,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); @@ -786,22 +786,22 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { ); assertNoFailuresAndResponse(prepareSearch("test").setQuery(query).setSize(numDocs).addSort("field1", SortOrder.ASC), response -> { assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getAt(0).getId(), equalTo("0")); - assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1")); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries()[0], equalTo("test3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2")); for (int i = 2; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(String.valueOf(i))); - assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3")); } @@ -844,7 +844,7 @@ public void testNestedSource() throws Exception { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -865,7 +865,7 @@ public void testNestedSource() throws Exception { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -891,7 +891,7 @@ public void testNestedSource() throws Exception { ), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0)); } ); @@ -901,7 +901,7 @@ public void testNestedSource() throws Exception { .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertFalse(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().isEmpty()); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 0ce4f34463b03..0805d0f366b0f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -3340,7 +3340,7 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().field("*").highlighterType(highlighterType)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getHighlightFields().get("text").fragments().length, equalTo(1)); } ); @@ -3412,7 +3412,7 @@ public void testKeywordFieldHighlighting() throws IOException { .highlighter(new HighlightBuilder().field("*")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("keyword_field"); assertThat(highlightField.fragments()[0].string(), equalTo("some text")); } @@ -3569,7 +3569,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { .should(QueryBuilders.termQuery("field", "hello")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertHighlight(response, 0, "field", 0, 1, equalTo("hello world")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java index d1eb1ab533ab7..16e5e42e00c9f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -191,26 +191,26 @@ public void testStoredFields() throws Exception { indicesAdmin().prepareRefresh().get(); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field1"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); // field2 is not stored, check that it is not extracted from source. assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); assertThat(response.getHits().getAt(0).getFields().get("field2"), nullValue()); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -218,7 +218,7 @@ public void testStoredFields() throws Exception { assertResponse( prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -226,20 +226,20 @@ public void testStoredFields() throws Exception { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), nullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -247,7 +247,7 @@ public void testStoredFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), notNullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -311,7 +311,7 @@ public void testScriptDocAndFields() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.millis", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertFalse(response.getHits().getAt(0).hasSource()); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); @@ -342,7 +342,7 @@ public void testScriptDocAndFields() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value * factor", Map.of("factor", 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(singleton("sNum1"))); @@ -429,7 +429,7 @@ public void testIdBasedScriptFields() throws Exception { .setSize(numDocs) .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); @@ -638,7 +638,7 @@ public void testStoredFieldsWithoutSource() throws Exception { .addStoredField("boolean_field") .addStoredField("binary_field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -681,7 +681,7 @@ public void testSearchFieldsMetadata() throws Exception { .get(); assertResponse(prepareSearch("my-index").addStoredField("field1").addStoredField("_routing"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field("field1"), nullValue()); assertThat(response.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1")); }); @@ -749,7 +749,7 @@ public void testGetFieldsComplexField() throws Exception { String field = "field1.field2.field3.field4"; assertResponse(prepareSearch("my-index").addStoredField(field), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(response.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); assertThat(response.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2")); @@ -866,7 +866,7 @@ public void testDocValueFields() throws Exception { builder.addDocValueField("*_field"); } assertResponse(builder, response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -906,7 +906,7 @@ public void testDocValueFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("ip_field").getValues(), equalTo(List.of("::1"))); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -955,7 +955,7 @@ public void testDocValueFields() throws Exception { .addDocValueField("double_field", "#.0") .addDocValueField("date_field", "epoch_millis"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 36e75435bb5de..76384253282de 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -250,7 +250,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -276,7 +276,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -300,7 +300,7 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -373,7 +373,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -386,7 +386,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -405,7 +405,7 @@ public void testBoostModeSettingWorks() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("2")); assertThat(sh.getAt(1).getId(), equalTo("1")); } @@ -461,7 +461,7 @@ public void testParseGeoPoint() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -481,7 +481,7 @@ public void testParseGeoPoint() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0f, 1.e-5)); } @@ -528,7 +528,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -546,7 +546,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -564,7 +564,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0 + 0.5, 1.e-5)); logger.info( @@ -588,7 +588,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo((2.0 + 0.5) / 2, 1.e-5)); } @@ -606,7 +606,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -624,7 +624,7 @@ public void testCombineModes() throws Exception { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0, 1.e-5)); } @@ -1131,7 +1131,7 @@ public void testMultiFieldOptions() throws Exception { assertResponse(client().search(new SearchRequest(new String[] {}).source(searchSource().query(baseQuery))), response -> { assertSearchHits(response, "1", "2"); SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); }); List lonlat = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index 7fb06c0b83015..a85d133450bec 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -144,7 +144,7 @@ public void testExplainScript() throws InterruptedException, IOException, Execut ), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(20L)); + assertThat(hits.getTotalHits().value(), equalTo(20L)); int idCounter = 19; for (SearchHit hit : hits.getHits()) { assertThat(hit.getId(), equalTo(Integer.toString(idCounter))); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index a0fe7e661020d..a38c9dc916056 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -145,9 +145,9 @@ public void testMinScoreFunctionScoreBasic() throws Exception { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -167,9 +167,9 @@ public void testMinScoreFunctionScoreBasic() throws Exception { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -224,9 +224,9 @@ public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOExcept protected void assertMinScoreSearchResponses(int numDocs, SearchResponse searchResponse, int numMatchingDocs) { assertNoFailures(searchResponse); - assertThat((int) searchResponse.getHits().getTotalHits().value, is(numMatchingDocs)); + assertThat((int) searchResponse.getHits().getTotalHits().value(), is(numMatchingDocs)); int pos = 0; - for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value; hitId--) { + for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value(); hitId--) { assertThat(searchResponse.getHits().getAt(pos).getId(), equalTo(Integer.toString(hitId))); pos++; } @@ -242,7 +242,7 @@ public void testWithEmptyFunctions() throws IOException, ExecutionException, Int assertNoFailuresAndResponse( client().search(new SearchRequest(new String[] {}).source(searchSource().explain(true).query(termQuery("text", "text")))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); termQueryScore[0] = response.getHits().getAt(0).getScore(); } ); @@ -259,7 +259,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getScore(), equalTo(expectedScore)); } ); @@ -269,7 +269,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(2f)) ) ), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 025d224923dc0..a2d017abc2eb7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -135,7 +135,7 @@ public void testRescorePhrase() throws Exception { 5 ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -415,7 +415,7 @@ private static void assertEquivalent(String query, SearchResponse plain, SearchR assertNoFailures(rescored); SearchHits leftHits = plain.getHits(); SearchHits rightHits = rescored.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); @@ -841,7 +841,7 @@ public void testRescorePhaseWithInvalidSort() throws Exception { .setTrackScores(true) .addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); assertThat(response.getHits().getHits().length, equalTo(5)); for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.getScore(), equalTo(101f)); @@ -888,7 +888,7 @@ public void testRescoreAfterCollapse() throws Exception { .addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore"))) .setCollapse(new CollapseBuilder("group")); assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); assertThat(resp.getHits().getHits().length, equalTo(3)); SearchHit hit1 = resp.getHits().getAt(0); @@ -968,7 +968,7 @@ public void testRescoreAfterCollapseRandom() throws Exception { .setSize(Math.min(numGroups, 10)); long expectedNumHits = numHits; assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); for (int pos = 0; pos < resp.getHits().getHits().length; pos++) { SearchHit hit = resp.getHits().getAt(pos); assertThat(hit.getId(), equalTo(sortedGroups[pos].id())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 7fdb31a468998..22e27d78531a6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -268,7 +268,7 @@ public void testSeedReportedInExplain() throws Exception { .setExplain(true), response -> { assertNoFailures(response); - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); SearchHit firstHit = response.getHits().getAt(0); assertThat(firstHit.getExplanation().toString(), containsString("" + seed)); } @@ -283,12 +283,12 @@ public void testNoDocs() throws Exception { prepareSearch("test").setQuery( functionScoreQuery(matchAllQuery(), randomFunction().seed(1234).setField(SeqNoFieldMapper.NAME)) ), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); assertNoFailuresAndResponse( prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction())), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 9b574cb54a116..2fde645f0036b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -426,7 +426,7 @@ public void testExplain() throws Exception { .setExplain(true), response -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); Explanation explanation = response.getHits().getHits()[0].getExplanation(); assertThat(explanation.getValue(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(explanation.toString(), startsWith("0.36464313 = Score based on 2 child docs in range from 0 to 1")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index 6993f24b895e0..e6cd89c09b979 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -147,10 +147,10 @@ public void testProfileMatchesRegular() throws Exception { ); } - if (vanillaResponse.getHits().getTotalHits().value != profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() != profileResponse.getHits().getTotalHits().value()) { Set vanillaSet = new HashSet<>(Arrays.asList(vanillaResponse.getHits().getHits())); Set profileSet = new HashSet<>(Arrays.asList(profileResponse.getHits().getHits())); - if (vanillaResponse.getHits().getTotalHits().value > profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() > profileResponse.getHits().getTotalHits().value()) { vanillaSet.removeAll(profileSet); fail("Vanilla hits were larger than profile hits. Non-overlapping elements were: " + vanillaSet.toString()); } else { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java index f263ececfdc7d..26b040e2309c2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java @@ -133,7 +133,7 @@ public void testExists() throws Exception { response ), count, - response.getHits().getTotalHits().value + response.getHits().getTotalHits().value() ); } catch (AssertionError e) { for (SearchHit searchHit : allDocs.getHits()) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 96042e198ef43..0fd2bd6f94770 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -347,7 +347,7 @@ public void testPhraseType() { ).type(MatchQueryParser.Type.PHRASE) ) ), - response -> assertThat(response.getHits().getTotalHits().value, greaterThan(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), greaterThan(1L)) ); assertSearchHitsWithoutFailures( @@ -428,8 +428,8 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio matchResp -> { assertThat( "field: " + field + " query: " + builder.toString(), - multiMatchResp.getHits().getTotalHits().value, - equalTo(matchResp.getHits().getTotalHits().value) + multiMatchResp.getHits().getTotalHits().value(), + equalTo(matchResp.getHits().getTotalHits().value()) ); SearchHits hits = multiMatchResp.getHits(); if (field.startsWith("missing")) { @@ -451,7 +451,7 @@ public void testEquivalence() { var response = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); final int numDocs; try { - numDocs = (int) response.getHits().getTotalHits().value; + numDocs = (int) response.getHits().getTotalHits().value(); } finally { response.decRef(); } @@ -944,7 +944,7 @@ private static void assertEquivalent(String query, SearchResponse left, SearchRe assertNoFailures(right); SearchHits leftHits = left.getHits(); SearchHits rightHits = right.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java index e25e330e072a6..c8fe9498b156f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java @@ -263,7 +263,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 45b98686e0484..cffba49d5941c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -10,7 +10,7 @@ package org.elasticsearch.search.query; import org.apache.lucene.analysis.pattern.PatternReplaceCharFilter; -import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.tests.analysis.MockTokenizer; @@ -264,7 +264,7 @@ public void testConstantScoreQuery() throws Exception { MatchQueryBuilder matchQuery = matchQuery("f", English.intToEnglish(between(0, num))); final long[] constantScoreTotalHits = new long[1]; assertResponse(prepareSearch("test_1").setQuery(constantScoreQuery(matchQuery)).setSize(num), response -> { - constantScoreTotalHits[0] = response.getHits().getTotalHits().value; + constantScoreTotalHits[0] = response.getHits().getTotalHits().value(); SearchHits hits = response.getHits(); for (SearchHit searchHit : hits) { assertThat(searchHit, hasScore(1.0f)); @@ -277,7 +277,7 @@ public void testConstantScoreQuery() throws Exception { ).setSize(num), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(constantScoreTotalHits[0])); + assertThat(hits.getTotalHits().value(), equalTo(constantScoreTotalHits[0])); if (constantScoreTotalHits[0] > 1) { float expected = hits.getAt(0).getScore(); for (SearchHit searchHit : hits) { @@ -1693,7 +1693,7 @@ public void testQueryStringParserCache() throws Exception { assertResponse( prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); first[0] = response.getHits().getAt(0).getScore(); } @@ -1704,7 +1704,7 @@ public void testQueryStringParserCache() throws Exception { prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); float actual = response.getHits().getAt(0).getScore(); assertThat(finalI + " expected: " + first[0] + " actual: " + actual, Float.compare(first[0], actual), equalTo(0)); @@ -1917,7 +1917,9 @@ public Map> getTokenizers() { } /** - * Test correct handling {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexReader, MultiTermQuery)}. That rewrite method is e.g. + * Test correct handling + * {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexSearcher, MultiTermQuery)}. + * That rewrite method is e.g. * set for fuzzy queries with "constant_score" rewrite nested inside a `span_multi` query and would cause NPEs due to an unset * {@link AttributeSource}. */ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 2fe7931d64c81..43cd4ad30353c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -583,7 +583,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java index 13a7d1fa59496..97aa428822fae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java @@ -75,7 +75,7 @@ public void testSimpleSearch() throws ExecutionException, InterruptedException { assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.RUNNING), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.FAILED), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(testClusterInfo.get("total_docs"))); + assertThat(response.getHits().getTotalHits().value(), equalTo(testClusterInfo.get("total_docs"))); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java index 26af82cf021f2..4a966677cce2b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java @@ -223,8 +223,8 @@ public void testRankDocsRetrieverBasicWithPagination() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_4")); @@ -272,8 +272,8 @@ public void testRankDocsRetrieverWithAggs() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertNotNull(resp.getAggregations()); assertNotNull(resp.getAggregations().get("topic")); @@ -330,8 +330,8 @@ public void testRankDocsRetrieverWithCollapse() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(4)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); @@ -392,8 +392,8 @@ public void testRankDocsRetrieverWithCollapseAndAggs() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertNotNull(resp.getAggregations()); assertNotNull(resp.getAggregations().get("topic")); @@ -444,8 +444,8 @@ public void testRankDocsRetrieverWithNestedQuery() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); @@ -503,8 +503,8 @@ public void testRankDocsRetrieverMultipleCompoundRetrievers() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_4")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_2")); @@ -541,8 +541,8 @@ public void testRankDocsRetrieverDifferentNestedSorting() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_4")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_7")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java index 43197b77b2c1e..25b43a2dc946e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java @@ -78,8 +78,8 @@ public void testRewrite() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_0")); }); } @@ -91,8 +91,8 @@ public void testRewriteCompound() { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index 35990fa3755b1..9a7ce2c5c28ab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -123,17 +123,17 @@ public void testSimplePreference() { assertResponse( prepareSearch().setQuery(matchAllQuery()), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("_local"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("1234"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 33b554a508e2b..06ce330213af8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -51,15 +51,15 @@ public void testNodeSelection() { // Before we've gathered stats for all nodes, we should try each node once. Set nodeIds = new HashSet<>(); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertEquals(3, nodeIds.size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index 2c96c27a0d12d..f59be6bb75928 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -122,7 +122,7 @@ public void testCustomScriptBinaryField() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length", emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sbinaryData").getValues().get(0), equalTo(16)); } @@ -175,7 +175,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); @@ -196,7 +196,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); } @@ -214,7 +214,7 @@ public void testCustomScriptBoost() throws Exception { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java index d3da4639a3927..ac5738a9b67b2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -44,7 +44,7 @@ public void testDuelQueryThenFetch() throws Exception { prepareSearch("index").setSearchType(context.searchType).addSort(context.sort).setSize(context.numDocs), control -> { SearchHits sh = control.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(sh.getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(sh.getHits().length, equalTo(context.numDocs)); SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) @@ -55,7 +55,7 @@ public void testDuelQueryThenFetch() throws Exception { try { assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); int counter = 0; @@ -69,7 +69,7 @@ public void testDuelQueryThenFetch() throws Exception { searchScrollResponse.decRef(); searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(10)).get(); assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); if (searchScrollResponse.getHits().getHits().length == 0) { break; } @@ -241,7 +241,7 @@ private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int try { while (true) { assertNoFailures(scroll); - assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); + assertEquals(control.getHits().getTotalHits().value(), scroll.getHits().getTotalHits().value()); assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); if (scroll.getHits().getHits().length == 0) { break; @@ -255,7 +255,7 @@ private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int scroll.decRef(); scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll(TimeValue.timeValueMinutes(10)).get(); } - assertEquals(control.getHits().getTotalHits().value, scrollDocs); + assertEquals(control.getHits().getTotalHits().value(), scrollDocs); } catch (AssertionError e) { logger.info("Control:\n{}", control); logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index 7c3dde22ce9d0..7ac24b77a4b6d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -89,7 +89,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -98,7 +98,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -107,7 +107,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(30)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -145,7 +145,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -155,7 +155,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -166,7 +166,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -176,7 +176,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(0)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -262,7 +262,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -278,7 +278,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -289,7 +289,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -300,7 +300,7 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -381,7 +381,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -397,7 +397,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -408,7 +408,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -419,7 +419,7 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -535,7 +535,7 @@ public void testCloseAndReopenOrDeleteWithActiveScroll() { prepareSearch().setQuery(matchAllQuery()).setSize(35).setScroll(TimeValue.timeValueMinutes(2)).addSort("field", SortOrder.ASC), searchResponse -> { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -601,7 +601,7 @@ public void testInvalidScrollKeepAlive() throws IOException { assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)), searchResponse -> { assertNotNull(searchResponse.getScrollId()); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); Exception ex = expectThrows( Exception.class, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 7c459f91a1ac0..353858e9d6974 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -150,7 +150,7 @@ public void testWithNullStrings() throws InterruptedException { .setQuery(matchAllQuery()) .searchAfter(new Object[] { 0, null }), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index a62a042a3cab5..e87c4790aa665 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -555,7 +555,7 @@ public void testStrictlyCountRequest() throws Exception { assertNoFailuresAndResponse( prepareSearch("test_count_1", "test_count_2").setTrackTotalHits(true).setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(11L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(11L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 979cb9e8a8c4c..e079994003751 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -117,7 +117,7 @@ public void testWithPreferenceAndRoutings() throws Exception { setupIndex(totalDocs, numShards); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -129,7 +129,7 @@ public void testWithPreferenceAndRoutings() throws Exception { }); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -147,7 +147,7 @@ public void testWithPreferenceAndRoutings() throws Exception { .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) ); assertResponse(prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()) @@ -166,7 +166,7 @@ private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String f SearchResponse searchResponse = request.slice(sliceBuilder).get(); try { totalResults += searchResponse.getHits().getHits().length; - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); int numSliceResults = searchResponse.getHits().getHits().length; String scrollId = searchResponse.getScrollId(); for (SearchHit hit : searchResponse.getHits().getHits()) { @@ -238,7 +238,7 @@ private void assertSearchSlicesWithPointInTime( SearchResponse searchResponse = request.get(); try { - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); while (true) { int numHits = searchResponse.getHits().getHits().length; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 3be427e37d60c..d1841ebaf8071 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -286,7 +286,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut assertNoFailuresAndResponse( prepareSearch("test").setQuery(matchAllQuery()).setSize(size).addSort("dense_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = denseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -307,7 +307,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut .setSize(size) .addSort("sparse_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) sparseBytes.size())); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) sparseBytes.size())); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = sparseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -818,7 +818,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -828,7 +828,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_last")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -838,7 +838,7 @@ public void testSortMissingNumbers() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_first")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -884,7 +884,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -896,7 +896,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -908,7 +908,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -920,7 +920,7 @@ public void testSortMissingStrings() throws IOException { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -1183,7 +1183,7 @@ public void testSortMVField() throws Exception { refresh(); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1197,7 +1197,7 @@ public void testSortMVField() throws Exception { }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1214,7 +1214,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1232,7 +1232,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.AVG)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1250,7 +1250,7 @@ public void testSortMVField() throws Exception { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.MEDIAN)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1264,7 +1264,7 @@ public void testSortMVField() throws Exception { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1277,7 +1277,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1290,7 +1290,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1303,7 +1303,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1316,7 +1316,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1329,7 +1329,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1342,7 +1342,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1355,7 +1355,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(7f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1368,7 +1368,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(3f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1381,7 +1381,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(7d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1394,7 +1394,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(3d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1407,7 +1407,7 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("07")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1719,8 +1719,8 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception prepareSearch("test2").setFrom(from).setSize(size).addSort(sortField, order), singleShardResponse -> { assertThat( - multiShardResponse.getHits().getTotalHits().value, - equalTo(singleShardResponse.getHits().getTotalHits().value) + multiShardResponse.getHits().getTotalHits().value(), + equalTo(singleShardResponse.getHits().getTotalHits().value()) ); assertThat(multiShardResponse.getHits().getHits().length, equalTo(singleShardResponse.getHits().getHits().length)); for (int i = 0; i < multiShardResponse.getHits().getHits().length; i++) { @@ -1747,14 +1747,14 @@ public void testCustomFormat() throws Exception { ); assertNoFailuresAndResponse(prepareSearch("test").addSort(SortBuilders.fieldSort("ip")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertArrayEquals(new String[] { "192.168.1.7" }, response.getHits().getAt(0).getSortValues()); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(1).getSortValues()); }); assertNoFailuresAndResponse( prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).searchAfter(new Object[] { "192.168.1.7" }), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(0).getSortValues()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java index ae0d2cbeb841f..fc5d40ae18c14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -362,7 +362,7 @@ public void testDocumentsWithNullValue() throws Exception { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addScriptField("id", scripField).addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -373,7 +373,7 @@ public void testDocumentsWithNullValue() throws Exception { .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'][0]", Collections.emptyMap())) .addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -391,7 +391,7 @@ public void testDocumentsWithNullValue() throws Exception { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -409,7 +409,7 @@ public void testDocumentsWithNullValue() throws Exception { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("2")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 6351d8d906389..ec9c680e17fc3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -64,12 +64,12 @@ public void testInnerHits() { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); assertThat(hits.getAt(0).getId(), nullValue()); assertThat(hits.getAt(0).getSourceAsString(), nullValue()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java index 2952150c2cb22..f90056c6ae859 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java @@ -54,10 +54,10 @@ public void testCustomBM25Similarity() throws Exception { .get(); assertResponse(prepareSearch().setQuery(matchQuery("field1", "quick brown fox")), bm25SearchResponse -> { - assertThat(bm25SearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(bm25SearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float bm25Score = bm25SearchResponse.getHits().getHits()[0].getScore(); assertResponse(prepareSearch().setQuery(matchQuery("field2", "quick brown fox")), booleanSearchResponse -> { - assertThat(booleanSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(booleanSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float defaultScore = booleanSearchResponse.getHits().getHits()[0].getScore(); assertThat(bm25Score, not(equalTo(defaultScore))); }); diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 507fef10a5f44..56672957dd571 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.index.codec.Elasticsearch814Codec; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; import org.elasticsearch.plugins.internal.RestExtension; @@ -455,7 +454,10 @@ org.elasticsearch.index.codec.vectors.ES815HnswBitVectorsFormat, org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat; - provides org.apache.lucene.codecs.Codec with Elasticsearch814Codec; + provides org.apache.lucene.codecs.Codec + with + org.elasticsearch.index.codec.Elasticsearch814Codec, + org.elasticsearch.index.codec.Elasticsearch816Codec; provides org.apache.logging.log4j.core.util.ContextDataProvider with org.elasticsearch.common.logging.DynamicContextDataProvider; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index be6d714c939de..998bc175dc6b6 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -230,6 +230,7 @@ static TransportVersion def(int id) { public static final TransportVersion ADD_DATA_STREAM_OPTIONS = def(8_754_00_0); public static final TransportVersion CCS_REMOTE_TELEMETRY_STATS = def(8_755_00_0); public static final TransportVersion ESQL_CCS_EXECUTION_INFO = def(8_756_00_0); + public static final TransportVersion REGEX_AND_RANGE_INTERVAL_QUERIES = def(8_757_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index b45bce2d14d85..e668624440351 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -13,6 +13,7 @@ import org.apache.lucene.backward_codecs.lucene50.Lucene50PostingsFormat; import org.apache.lucene.backward_codecs.lucene84.Lucene84PostingsFormat; import org.apache.lucene.backward_codecs.lucene90.Lucene90PostingsFormat; +import org.apache.lucene.backward_codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.KnnVectorsReader; @@ -20,7 +21,7 @@ import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.TermVectorsReader; -import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; +import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; @@ -31,6 +32,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PointValues; @@ -272,7 +274,7 @@ void analyzeDocValues(SegmentReader reader, IndexDiskUsageStats stats) throws IO } case SORTED_SET -> { SortedSetDocValues sortedSet = iterateDocValues(maxDocs, () -> docValuesReader.getSortedSet(field), dv -> { - while (dv.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + for (int i = 0; i < dv.docValueCount(); i++) { cancellationChecker.logEvent(); } }); @@ -304,6 +306,9 @@ private static void readProximity(Terms terms, PostingsEnum postings) throws IOE private static BlockTermState getBlockTermState(TermsEnum termsEnum, BytesRef term) throws IOException { if (term != null && termsEnum.seekExact(term)) { final TermState termState = termsEnum.termState(); + if (termState instanceof final Lucene912PostingsFormat.IntBlockTermState blockTermState) { + return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); + } if (termState instanceof final ES812PostingsFormat.IntBlockTermState blockTermState) { return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); } @@ -540,13 +545,14 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I if (field.getVectorDimension() > 0) { switch (field.getVectorEncoding()) { case BYTE -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file ByteVectorValues vectorValues = vectorReader.getByteVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -554,22 +560,23 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } case FLOAT32 -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file FloatVectorValues vectorValues = vectorReader.getFloatVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -577,11 +584,11 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } diff --git a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java index 8ac2033e2ff19..dda589a458f88 100644 --- a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java @@ -54,7 +54,7 @@ SearchSortValuesAndFormats getBottomSortValues() { } synchronized void consumeTopDocs(TopFieldDocs topDocs, DocValueFormat[] sortValuesFormat) { - totalHits += topDocs.totalHits.value; + totalHits += topDocs.totalHits.value(); if (validateShardSortFields(topDocs.fields) == false) { return; } diff --git a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java index d41a2561646b8..b52d76aac4132 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java @@ -57,8 +57,8 @@ public void consumeResult(SearchPhaseResult result, Runnable next) { return; } // set the relation to the first non-equal relation - relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation); - totalHits.add(result.queryResult().getTotalHits().value); + relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation()); + totalHits.add(result.queryResult().getTotalHits().value()); terminatedEarly.compareAndSet(false, (result.queryResult().terminatedEarly() != null && result.queryResult().terminatedEarly())); timedOut.compareAndSet(false, result.queryResult().searchTimedOut()); next.run(); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index a6acb3ee2a52e..6605ad26fb3ad 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -878,8 +878,8 @@ TotalHits getTotalHits() { void add(TopDocsAndMaxScore topDocs, boolean timedOut, Boolean terminatedEarly) { if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { - totalHits += topDocs.topDocs.totalHits.value; - if (topDocs.topDocs.totalHits.relation == Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHits += topDocs.topDocs.totalHits.value(); + if (topDocs.topDocs.totalHits.relation() == Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 021ad8127a2d0..6a881163914e4 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -701,7 +701,7 @@ String jvmVendor() { } String javaVersion() { - return Constants.JAVA_VERSION; + return Runtime.version().toString(); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 3acd577aa42e3..700472a0809b6 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -89,7 +89,7 @@ import java.util.Objects; public class Lucene { - public static final String LATEST_CODEC = "Lucene99"; + public static final String LATEST_CODEC = "Lucene912"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; @@ -242,7 +242,7 @@ public static void checkSegmentInfoIntegrity(final Directory directory) throws I @Override protected Object doBody(String segmentFileName) throws IOException { - try (IndexInput input = directory.openInput(segmentFileName, IOContext.READ)) { + try (IndexInput input = directory.openInput(segmentFileName, IOContext.READONCE)) { CodecUtil.checksumEntireFile(input); } return null; @@ -393,8 +393,8 @@ public static ScoreDoc readScoreDoc(StreamInput in) throws IOException { private static final Class GEO_DISTANCE_SORT_TYPE_CLASS = LatLonDocValuesField.newDistanceSort("some_geo_field", 0, 0).getClass(); public static void writeTotalHits(StreamOutput out, TotalHits totalHits) throws IOException { - out.writeVLong(totalHits.value); - out.writeEnum(totalHits.relation); + out.writeVLong(totalHits.value()); + out.writeEnum(totalHits.relation()); } public static void writeTopDocs(StreamOutput out, TopDocsAndMaxScore topDocs) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 625438ebdff97..cbceef120b877 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -27,6 +27,7 @@ import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.core.Nullable; import java.io.IOException; @@ -177,6 +178,11 @@ public boolean seekExact(BytesRef text) throws IOException { } } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) { + return () -> this.seekExact(bytesRef); + } + @Override public int docFreq() throws IOException { return currentDocFreq; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java index 5bc52253939af..9460aba0a99cb 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java @@ -14,7 +14,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -38,8 +37,6 @@ public static Automaton caseInsensitivePrefix(String s) { Automaton a = Operations.concatenate(list); // since all elements in the list should be deterministic already, the concatenation also is, so no need to determinized assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); - assert a.isDeterministic(); return a; } @@ -100,7 +97,7 @@ public static Automaton toCaseInsensitiveWildcardAutomaton(Term wildcardquery) { i += length; } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } protected static Automaton toCaseInsensitiveString(BytesRef br) { @@ -117,7 +114,6 @@ public static Automaton toCaseInsensitiveString(String s) { Automaton a = Operations.concatenate(list); // concatenating deterministic automata should result in a deterministic automaton. No need to determinize here. assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); return a; } @@ -132,7 +128,6 @@ public static Automaton toCaseInsensitiveChar(int codepoint) { if (altCase != codepoint) { result = Operations.union(case1, Automata.makeChar(altCase)); // this automaton should always be deterministic, no need to determinize - result = MinimizationOperations.minimize(result, 0); assert result.isDeterministic(); } else { result = case1; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java index b6f102a98203f..65688b69f5aa0 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java @@ -20,12 +20,12 @@ public CaseInsensitivePrefixQuery(Term term) { super(term, caseInsensitivePrefix(term.text())); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary) { + super(term, caseInsensitivePrefix(term.text()), isBinary); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { + super(term, caseInsensitivePrefix(term.text()), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java index 91700e5ffe6c1..6368acf383120 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java @@ -26,8 +26,8 @@ public CaseInsensitiveWildcardQuery(Term term) { super(term, toCaseInsensitiveWildcardAutomaton(term)); } - public CaseInsensitiveWildcardQuery(Term term, int determinizeWorkLimit, boolean isBinary, RewriteMethod rewriteMethod) { - super(term, toCaseInsensitiveWildcardAutomaton(term), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitiveWildcardQuery(Term term, boolean isBinary, RewriteMethod rewriteMethod) { + super(term, toCaseInsensitiveWildcardAutomaton(term), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 25fa926ada2c8..e2ac58caccd57 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -123,7 +123,7 @@ public static Query applyMinimumShouldMatch(BooleanQuery query, @Nullable String } int optionalClauses = 0; for (BooleanClause c : query.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { optionalClauses++; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java index 13fae303909f5..299739fc3ba8a 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -19,7 +19,7 @@ import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.AttributeSource; @@ -42,7 +42,7 @@ public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrap private final boolean hardLimit; public SpanBooleanQueryRewriteWithMaxClause() { - this(BooleanQuery.getMaxClauseCount(), true); + this(IndexSearcher.getMaxClauseCount(), true); } public SpanBooleanQueryRewriteWithMaxClause(int maxExpansions, boolean hardLimit) { @@ -59,10 +59,11 @@ public boolean isHardLimit() { } @Override - public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public SpanQuery rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { final MultiTermQuery.RewriteMethod delegate = new MultiTermQuery.RewriteMethod() { @Override - public Query rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public Query rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { + IndexReader reader = indexSearcher.getIndexReader(); Collection queries = collectTerms(reader, query); if (queries.size() == 0) { return new SpanMatchNoDocsQuery(query.getField(), "no expansion found for " + query.toString()); @@ -99,7 +100,7 @@ private Collection collectTerms(IndexReader reader, MultiTermQuery qu + query.toString() + " ] " + "exceeds maxClauseCount [ Boolean maxClauseCount is set to " - + BooleanQuery.getMaxClauseCount() + + IndexSearcher.getMaxClauseCount() + "]" ); } else { @@ -112,6 +113,6 @@ private Collection collectTerms(IndexReader reader, MultiTermQuery qu return queries; } }; - return (SpanQuery) delegate.rewrite(reader, query); + return (SpanQuery) delegate.rewrite(indexSearcher, query); } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index f8d0c81466dcc..54cd4c9946f62 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.similarities.TFIDFSimilarity; @@ -207,7 +208,7 @@ public final class XMoreLikeThis { /** * Return a Query with no more than this many terms. * - * @see BooleanQuery#getMaxClauseCount + * @see IndexSearcher#getMaxClauseCount * @see #setMaxQueryTerms */ public static final int DEFAULT_MAX_QUERY_TERMS = 25; @@ -468,7 +469,7 @@ private void addToQuery(PriorityQueue q, BooleanQuery.Builder query) try { query.add(tq, BooleanClause.Occur.SHOULD); - } catch (BooleanQuery.TooManyClauses ignore) { + } catch (IndexSearcher.TooManyClauses ignore) { break; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index ff82160be0325..5a0c216c4e717 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -272,44 +272,65 @@ class CustomBoostFactorWeight extends Weight { this.needsScores = needsScores; } - private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + private ScorerSupplier functionScorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - final long leadCost = subQueryScorer.iterator().cost(); - final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; - final Bits[] docSets = new Bits[functions.length]; - for (int i = 0; i < functions.length; i++) { - ScoreFunction function = functions[i]; - leafFunctions[i] = function.getLeafScoreFunction(context); - if (filterWeights[i] != null) { - ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); - docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); - } else { - docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; + final Bits[] docSets = new Bits[functions.length]; + for (int i = 0; i < functions.length; i++) { + ScoreFunction function = functions[i]; + leafFunctions[i] = function.getLeafScoreFunction(context); + if (filterWeights[i] != null) { + ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); + docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); + } else { + docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + } + } + return new FunctionFactorScorer( + subQueryScorer, + scoreMode, + functions, + maxBoost, + leafFunctions, + docSets, + combineFunction, + needsScores + ); } - } - return new FunctionFactorScorer( - this, - subQueryScorer, - scoreMode, - functions, - maxBoost, - leafFunctions, - docSets, - combineFunction, - needsScores - ); + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer scorer = functionScorer(context); - if (scorer != null && minScore != null) { - scorer = new MinScoreScorer(this, scorer, minScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier scorerSupplier = functionScorerSupplier(context); + + if (scorerSupplier == null || minScore == null) { + return scorerSupplier; } - return scorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(scorerSupplier.get(leadCost), minScore); + } + + @Override + public long cost() { + return scorerSupplier.cost(); + } + }; } @Override @@ -356,7 +377,8 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } else if (singleFunction && functionsExplanations.size() == 1) { factorExplanation = functionsExplanations.get(0); } else { - FunctionFactorScorer scorer = functionScorer(context); + + FunctionFactorScorer scorer = (FunctionFactorScorer) functionScorerSupplier(context).get(1L); int actualDoc = scorer.iterator().advance(doc); assert (actualDoc == doc); double score = scorer.computeScore(doc, expl.getValue().floatValue()); @@ -391,7 +413,6 @@ static class FunctionFactorScorer extends FilterScorer { private final boolean needsScores; private FunctionFactorScorer( - CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, ScoreFunction[] functions, @@ -401,7 +422,7 @@ private FunctionFactorScorer( CombineFunction scoreCombiner, boolean needsScores ) throws IOException { - super(scorer, w); + super(scorer); this.scoreMode = scoreMode; this.functions = functions; this.leafFunctions = leafFunctions; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java index 3d23f66b09d82..0fd46447b3ea9 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import java.io.IOException; @@ -25,12 +24,11 @@ public final class MinScoreScorer extends Scorer { private float curScore; private final float boost; - public MinScoreScorer(Weight weight, Scorer scorer, float minScore) { - this(weight, scorer, minScore, 1f); + public MinScoreScorer(Scorer scorer, float minScore) { + this(scorer, minScore, 1f); } - public MinScoreScorer(Weight weight, Scorer scorer, float minScore, float boost) { - super(weight); + public MinScoreScorer(Scorer scorer, float minScore, float boost) { this.in = scorer; this.minScore = minScore; this.boost = boost; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 4222b5dff98ab..d38243f5348c4 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -27,14 +27,8 @@ public class ScriptScoreFunction extends ScoreFunction { static final class CannedScorer extends Scorable { - protected int docid; protected float score; - @Override - public int docID() { - return docid; - } - @Override public float score() { return score; @@ -70,14 +64,13 @@ public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOEx if (script.needs_termStats()) { assert termStatsFactory != null; - leafScript._setTermStats(termStatsFactory.apply(ctx, scorer::docID)); + leafScript._setTermStats(termStatsFactory.apply(ctx, leafScript::docId)); } return new LeafScoreFunction() { private double score(int docId, float subQueryScore, ScoreScript.ExplanationHolder holder) throws IOException { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore; double result = leafScript.execute(holder); @@ -97,7 +90,6 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE Explanation exp; if (leafScript instanceof ExplainableScoreScript) { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore.getValue().floatValue(); exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore); } else { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java index 5e3f8e8e62714..e58b2fffed001 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -38,6 +39,7 @@ import java.util.HashSet; import java.util.Objects; import java.util.Set; +import java.util.function.IntSupplier; /** * A query that uses a script to compute documents' scores. @@ -104,30 +106,40 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo } return new Weight(this) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - if (minScore == null) { - final BulkScorer subQueryBulkScorer = subQueryWeight.bulkScorer(context); - if (subQueryBulkScorer == null) { - return null; - } - return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); - } else { - return super.bulkScorer(context); - } - } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - Scorer scriptScorer = new ScriptScorer(this, makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); - if (minScore != null) { - scriptScorer = new MinScoreScorer(this, scriptScorer, minScore); - } - return scriptScorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + Scorer scriptScorer = new ScriptScorer(makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); + if (minScore != null) { + scriptScorer = new MinScoreScorer(scriptScorer, minScore); + } + return scriptScorer; + } + + @Override + public BulkScorer bulkScorer() throws IOException { + if (minScore == null) { + final BulkScorer subQueryBulkScorer = subQueryScorerSupplier.bulkScorer(); + return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); + } else { + return super.bulkScorer(); + } + } + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override @@ -138,7 +150,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } ExplanationHolder explanationHolder = new ExplanationHolder(); Scorer scorer = new ScriptScorer( - this, makeScoreScript(context), subQueryWeight.scorer(context), subQueryScoreMode, @@ -231,14 +242,12 @@ private static class ScriptScorer extends Scorer { private final ExplanationHolder explanation; ScriptScorer( - Weight weight, ScoreScript scoreScript, Scorer subQueryScorer, ScoreMode subQueryScoreMode, float boost, ExplanationHolder explanation ) { - super(weight); this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); @@ -292,19 +301,27 @@ private static class ScriptScorable extends Scorable { private final ScoreScript scoreScript; private final Scorable subQueryScorer; private final float boost; + private final IntSupplier docIDSupplier; - ScriptScorable(ScoreScript scoreScript, Scorable subQueryScorer, ScoreMode subQueryScoreMode, float boost) { + ScriptScorable( + ScoreScript scoreScript, + Scorable subQueryScorer, + ScoreMode subQueryScoreMode, + float boost, + IntSupplier docIDSupplier + ) { this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); } this.subQueryScorer = subQueryScorer; this.boost = boost; + this.docIDSupplier = docIDSupplier; } @Override public float score() throws IOException { - int docId = docID(); + int docId = docIDSupplier.getAsInt(); scoreScript.setDocument(docId); float score = (float) scoreScript.execute(null); if (score < 0f || Float.isNaN(score)) { @@ -320,10 +337,6 @@ public float score() throws IOException { return score * boost; } - @Override - public int docID() { - return subQueryScorer.docID(); - } } /** @@ -350,9 +363,18 @@ public int score(LeafCollector collector, Bits acceptDocs, int min, int max) thr private LeafCollector wrapCollector(LeafCollector collector) { return new FilterLeafCollector(collector) { + + private int docID; + @Override public void setScorer(Scorable scorer) throws IOException { - in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost)); + in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost, () -> docID)); + } + + @Override + public void collect(int doc) throws IOException { + this.docID = doc; + super.collect(doc); } }; } diff --git a/server/src/main/java/org/elasticsearch/common/regex/Regex.java b/server/src/main/java/org/elasticsearch/common/regex/Regex.java index d5b2e8497fc0b..aaaab78b71736 100644 --- a/server/src/main/java/org/elasticsearch/common/regex/Regex.java +++ b/server/src/main/java/org/elasticsearch/common/regex/Regex.java @@ -69,7 +69,7 @@ public static Automaton simpleMatchToAutomaton(String pattern) { previous = i + 1; } automata.add(Automata.makeString(pattern.substring(previous))); - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** @@ -113,7 +113,7 @@ public static Automaton simpleMatchToAutomaton(String... patterns) { prefixAutomaton.add(Automata.makeAnyString()); automata.add(Operations.concatenate(prefixAutomaton)); } - return Operations.union(automata); + return Operations.determinize(Operations.union(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 232ce34b153ab..defaddb25eb47 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -254,7 +254,7 @@ public static KeyStoreWrapper load(Path configDir) throws IOException { } Directory directory = new NIOFSDirectory(configDir); - try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME)) { final int formatVersion; try { formatVersion = CodecUtil.checkHeader(input, KEYSTORE_FILENAME, MIN_FORMAT_VERSION, CURRENT_VERSION); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index 2ef96123e63d8..c4b03c712c272 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -280,8 +280,8 @@ public static Function, Map> filter(String[] include = matchAllAutomaton; } else { Automaton includeA = Regex.simpleMatchToAutomaton(includes); - includeA = makeMatchDotsInFieldNames(includeA); - include = new CharacterRunAutomaton(includeA, MAX_DETERMINIZED_STATES); + includeA = Operations.determinize(makeMatchDotsInFieldNames(includeA), MAX_DETERMINIZED_STATES); + include = new CharacterRunAutomaton(includeA); } Automaton excludeA; @@ -289,9 +289,9 @@ public static Function, Map> filter(String[] excludeA = Automata.makeEmpty(); } else { excludeA = Regex.simpleMatchToAutomaton(excludes); - excludeA = makeMatchDotsInFieldNames(excludeA); + excludeA = Operations.determinize(makeMatchDotsInFieldNames(excludeA), MAX_DETERMINIZED_STATES); } - CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA, MAX_DETERMINIZED_STATES); + CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA); // NOTE: We cannot use Operations.minus because of the special case that // we want all sub properties to match as soon as an object matches diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 749946e05b745..0c6cf2c8a0761 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.search.DocIdSetIterator; @@ -449,7 +450,7 @@ OnDiskState loadBestOnDiskState(boolean checkClean) throws IOException { // resources during test execution checkIndex.setThreadCount(1); checkIndex.setInfoStream(printStream); - checkIndex.setChecksumsOnly(true); + checkIndex.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); isClean = checkIndex.checkIndex().clean; } @@ -705,10 +706,11 @@ private static void consumeFromType( final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + final StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { logger.trace("processing doc {}", docIdSetIterator.docID()); - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final BytesArray documentData = new BytesArray(document.getBinaryValue(DATA_FIELD_NAME)); if (document.getField(PAGE_FIELD_NAME) == null) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 7eed5f2b7759d..4ff7ef60cc0a2 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -18,7 +18,6 @@ import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; import org.elasticsearch.client.internal.Client; @@ -451,7 +450,7 @@ public boolean match(String setting) { } public static Type defaultStoreType(final boolean allowMmap) { - if (allowMmap && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (allowMmap && Constants.JRE_IS_64BIT) { return Type.HYBRIDFS; } else { return Type.NIOFS; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 5f9cea7966560..ea9ecd2d9c8f5 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.UpdateForV9; import java.lang.reflect.Field; +import java.text.ParseException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -48,29 +49,38 @@ private static IndexVersion def(int id, Version luceneVersion) { return new IndexVersion(id, luceneVersion); } + // TODO: this is just a hack to allow to keep the V7 IndexVersion constants, during compilation. Remove + private static Version parseUnchecked(String version) { + try { + return Version.parse(version); + } catch (ParseException e) { + throw new RuntimeException(e); + } + } + @UpdateForV9 // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_0_0 = def(7_00_00_99, Version.LUCENE_8_0_0); - - public static final IndexVersion V_7_1_0 = def(7_01_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_0 = def(7_02_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_1 = def(7_02_01_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_3_0 = def(7_03_00_99, Version.LUCENE_8_1_0); - public static final IndexVersion V_7_4_0 = def(7_04_00_99, Version.LUCENE_8_2_0); - public static final IndexVersion V_7_5_0 = def(7_05_00_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_5_2 = def(7_05_02_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_6_0 = def(7_06_00_99, Version.LUCENE_8_4_0); - public static final IndexVersion V_7_7_0 = def(7_07_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_8_0 = def(7_08_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_9_0 = def(7_09_00_99, Version.LUCENE_8_6_0); - public static final IndexVersion V_7_10_0 = def(7_10_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_11_0 = def(7_11_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_12_0 = def(7_12_00_99, Version.LUCENE_8_8_0); - public static final IndexVersion V_7_13_0 = def(7_13_00_99, Version.LUCENE_8_8_2); - public static final IndexVersion V_7_14_0 = def(7_14_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_15_0 = def(7_15_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_16_0 = def(7_16_00_99, Version.LUCENE_8_10_1); - public static final IndexVersion V_7_17_0 = def(7_17_00_99, Version.LUCENE_8_11_1); + + public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_1_0 = def(7_01_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_0 = def(7_02_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_1 = def(7_02_01_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_3_0 = def(7_03_00_99, parseUnchecked("8.1.0")); + public static final IndexVersion V_7_4_0 = def(7_04_00_99, parseUnchecked("8.2.0")); + public static final IndexVersion V_7_5_0 = def(7_05_00_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_5_2 = def(7_05_02_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_6_0 = def(7_06_00_99, parseUnchecked("8.4.0")); + public static final IndexVersion V_7_7_0 = def(7_07_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_8_0 = def(7_08_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_9_0 = def(7_09_00_99, parseUnchecked("8.6.0")); + public static final IndexVersion V_7_10_0 = def(7_10_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_11_0 = def(7_11_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_12_0 = def(7_12_00_99, parseUnchecked("8.8.0")); + public static final IndexVersion V_7_13_0 = def(7_13_00_99, parseUnchecked("8.8.2")); + public static final IndexVersion V_7_14_0 = def(7_14_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_15_0 = def(7_15_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_16_0 = def(7_16_00_99, parseUnchecked("8.10.1")); + public static final IndexVersion V_7_17_0 = def(7_17_00_99, parseUnchecked("8.11.1")); public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); @@ -116,6 +126,8 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion LENIENT_UPDATEABLE_SYNONYMS = def(8_513_00_0, Version.LUCENE_9_11_1); public static final IndexVersion ENABLE_IGNORE_MALFORMED_LOGSDB = def(8_514_00_0, Version.LUCENE_9_11_1); public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_00_0, Version.LUCENE_9_11_1); + public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_00_0, Version.LUCENE_9_12_0); + public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index f1a9d4ed2d211..144b99abe5644 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -12,7 +12,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; @@ -46,7 +46,7 @@ public class CodecService implements CodecProvider { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays); + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, mapperService, bigArrays); if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { @@ -58,7 +58,7 @@ public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) ); - Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); diff --git a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java index 2ba169583b712..00614140e237a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java @@ -49,11 +49,12 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm deduplicated[i++] = new FieldInfo( FieldMapper.internFieldName(fi.getName()), fi.number, - fi.hasVectors(), + fi.hasTermVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType(), + fi.docValuesSkipIndexType(), fi.getDocValuesGen(), internStringStringMap(fi.attributes()), fi.getPointDimensionCount(), diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java index 44108109ad329..f3d758f4fc8b7 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java @@ -9,14 +9,14 @@ package org.elasticsearch.index.codec; +import org.apache.lucene.backward_codecs.lucene99.Lucene99Codec; +import org.apache.lucene.backward_codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java new file mode 100644 index 0000000000000..00711c7ecc306 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 8.16. This extends the Lucene 9.12 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch816Codec extends CodecService.DeduplicateFieldInfosCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch816Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch816Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch816Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch816Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch816", new Lucene912Codec()); + this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); + this.defaultPostingsFormat = new Lucene912PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java index 5d97f78e2747b..64c2ca788f63c 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.mapper.MapperService; @@ -22,11 +22,11 @@ * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new * changes in {@link PerFieldMapperCodec}. */ -public final class LegacyPerFieldMapperCodec extends Lucene99Codec { +public final class LegacyPerFieldMapperCodec extends Lucene912Codec { private final PerFieldFormatSupplier formatSupplier; - public LegacyPerFieldMapperCodec(Lucene99Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public LegacyPerFieldMapperCodec(Lucene912Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index 46b05fdd282db..83c5cb396d88b 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -26,7 +26,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Elasticsearch814Codec { +public final class PerFieldMapperCodec extends Elasticsearch816Codec { private final PerFieldFormatSupplier formatSupplier; diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java index d26fb52a82bcd..81129835518da 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java @@ -36,7 +36,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -142,12 +141,7 @@ static final class FieldsReader extends FieldsProducer { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { CodecUtil.checkIndexHeader( metaIn, BLOOM_CODEC_NAME, diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java index 01d874adec14d..abf68abe51887 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java @@ -38,7 +38,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -291,12 +290,7 @@ static final class FieldsReader extends FieldsProducer { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { Map bloomFilters = null; Throwable priorE = null; long indexFileLength = 0; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java index 71d9768ac5ff7..414fb4af20540 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java @@ -15,6 +15,7 @@ import org.apache.lucene.codecs.lucene90.IndexedDISI; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.EmptyDocValuesProducer; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; @@ -41,9 +42,13 @@ import org.elasticsearch.core.IOUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_LEVEL_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SORTED_SET; final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { @@ -51,9 +56,16 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { IndexOutput data, meta; final int maxDoc; private byte[] termsDictBuffer; - - ES87TSDBDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) - throws IOException { + private final int skipIndexIntervalSize; + + ES87TSDBDocValuesConsumer( + SegmentWriteState state, + int skipIndexIntervalSize, + String dataCodec, + String dataExtension, + String metaCodec, + String metaExtension + ) throws IOException { this.termsDictBuffer = new byte[1 << 14]; boolean success = false; try { @@ -76,6 +88,7 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { state.segmentSuffix ); maxDoc = state.segmentInfo.maxDoc(); + this.skipIndexIntervalSize = skipIndexIntervalSize; success = true; } finally { if (success == false) { @@ -88,12 +101,17 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.NUMERIC); - writeField(field, new EmptyDocValuesProducer() { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { return DocValues.singleton(valuesProducer.getNumeric(field)); } - }, -1); + }; + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, producer); + } + + writeField(field, producer, -1); } private long[] writeField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { @@ -263,13 +281,11 @@ public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) th public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.SORTED); - doAddSortedField(field, valuesProducer); + doAddSortedField(field, valuesProducer, false); } - private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { - SortedDocValues sorted = valuesProducer.getSorted(field); - int maxOrd = sorted.getValueCount(); - writeField(field, new EmptyDocValuesProducer() { + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer, boolean addTypeByte) throws IOException { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { SortedDocValues sorted = valuesProducer.getSorted(field); @@ -306,7 +322,16 @@ public long cost() { }; return DocValues.singleton(sortedOrds); } - }, maxOrd); + }; + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, producer); + } + if (addTypeByte) { + meta.writeByte((byte) 0); // multiValued (0 = singleValued) + } + SortedDocValues sorted = valuesProducer.getSorted(field); + int maxOrd = sorted.getValueCount(); + writeField(field, producer, maxOrd); addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); } @@ -459,6 +484,12 @@ public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProdu } private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, valuesProducer); + } + if (maxOrd > -1) { + meta.writeByte((byte) 1); // multiValued (1 = multiValued) + } long[] stats = writeField(field, valuesProducer, maxOrd); int numDocsWithField = Math.toIntExact(stats[0]); long numValues = stats[1]; @@ -510,16 +541,14 @@ public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) meta.writeByte(SORTED_SET); if (isSingleValued(valuesProducer.getSortedSet(field))) { - meta.writeByte((byte) 0); // multiValued (0 = singleValued) doAddSortedField(field, new EmptyDocValuesProducer() { @Override public SortedDocValues getSorted(FieldInfo field) throws IOException { return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); } - }); + }, true); return; } - meta.writeByte((byte) 1); // multiValued (1 = multiValued) SortedSetDocValues values = valuesProducer.getSortedSet(field); long maxOrd = values.getValueCount(); @@ -603,4 +632,157 @@ public void close() throws IOException { meta = data = null; } } + + private static class SkipAccumulator { + int minDocID; + int maxDocID; + int docCount; + long minValue; + long maxValue; + + SkipAccumulator(int docID) { + minDocID = docID; + minValue = Long.MAX_VALUE; + maxValue = Long.MIN_VALUE; + docCount = 0; + } + + boolean isDone(int skipIndexIntervalSize, int valueCount, long nextValue, int nextDoc) { + if (docCount < skipIndexIntervalSize) { + return false; + } + // Once we reach the interval size, we will keep accepting documents if + // - next doc value is not a multi-value + // - current accumulator only contains a single value and next value is the same value + // - the accumulator is dense and the next doc keeps the density (no gaps) + return valueCount > 1 || minValue != maxValue || minValue != nextValue || docCount != nextDoc - minDocID; + } + + void accumulate(long value) { + minValue = Math.min(minValue, value); + maxValue = Math.max(maxValue, value); + } + + void accumulate(SkipAccumulator other) { + assert minDocID <= other.minDocID && maxDocID < other.maxDocID; + maxDocID = other.maxDocID; + minValue = Math.min(minValue, other.minValue); + maxValue = Math.max(maxValue, other.maxValue); + docCount += other.docCount; + } + + void nextDoc(int docID) { + maxDocID = docID; + ++docCount; + } + + public static SkipAccumulator merge(List list, int index, int length) { + SkipAccumulator acc = new SkipAccumulator(list.get(index).minDocID); + for (int i = 0; i < length; i++) { + acc.accumulate(list.get(index + i)); + } + return acc; + } + } + + private void writeSkipIndex(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + assert field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE; + final long start = data.getFilePointer(); + final SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + long globalMaxValue = Long.MIN_VALUE; + long globalMinValue = Long.MAX_VALUE; + int globalDocCount = 0; + int maxDocId = -1; + final List accumulators = new ArrayList<>(); + SkipAccumulator accumulator = null; + final int maxAccumulators = 1 << (SKIP_INDEX_LEVEL_SHIFT * (SKIP_INDEX_MAX_LEVEL - 1)); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + final long firstValue = values.nextValue(); + if (accumulator != null && accumulator.isDone(skipIndexIntervalSize, values.docValueCount(), firstValue, doc)) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + accumulator = null; + if (accumulators.size() == maxAccumulators) { + writeLevels(accumulators); + accumulators.clear(); + } + } + if (accumulator == null) { + accumulator = new SkipAccumulator(doc); + accumulators.add(accumulator); + } + accumulator.nextDoc(doc); + accumulator.accumulate(firstValue); + for (int i = 1, end = values.docValueCount(); i < end; ++i) { + accumulator.accumulate(values.nextValue()); + } + } + + if (accumulators.isEmpty() == false) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + writeLevels(accumulators); + } + meta.writeLong(start); // record the start in meta + meta.writeLong(data.getFilePointer() - start); // record the length + assert globalDocCount == 0 || globalMaxValue >= globalMinValue; + meta.writeLong(globalMaxValue); + meta.writeLong(globalMinValue); + assert globalDocCount <= maxDocId + 1; + meta.writeInt(globalDocCount); + meta.writeInt(maxDocId); + } + + private void writeLevels(List accumulators) throws IOException { + final List> accumulatorsLevels = new ArrayList<>(SKIP_INDEX_MAX_LEVEL); + accumulatorsLevels.add(accumulators); + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL - 1; i++) { + accumulatorsLevels.add(buildLevel(accumulatorsLevels.get(i))); + } + int totalAccumulators = accumulators.size(); + for (int index = 0; index < totalAccumulators; index++) { + // compute how many levels we need to write for the current accumulator + final int levels = getLevels(index, totalAccumulators); + // write the number of levels + data.writeByte((byte) levels); + // write intervals in reverse order. This is done so we don't + // need to read all of them in case of slipping + for (int level = levels - 1; level >= 0; level--) { + final SkipAccumulator accumulator = accumulatorsLevels.get(level).get(index >> (SKIP_INDEX_LEVEL_SHIFT * level)); + data.writeInt(accumulator.maxDocID); + data.writeInt(accumulator.minDocID); + data.writeLong(accumulator.maxValue); + data.writeLong(accumulator.minValue); + data.writeInt(accumulator.docCount); + } + } + } + + private static List buildLevel(List accumulators) { + final int levelSize = 1 << SKIP_INDEX_LEVEL_SHIFT; + final List collector = new ArrayList<>(); + for (int i = 0; i < accumulators.size() - levelSize + 1; i += levelSize) { + collector.add(SkipAccumulator.merge(accumulators, i, levelSize)); + } + return collector; + } + + private static int getLevels(int index, int size) { + if (Integer.numberOfTrailingZeros(index) >= SKIP_INDEX_LEVEL_SHIFT) { + // TODO: can we do it in constant time rather than linearly with SKIP_INDEX_MAX_LEVEL? + final int left = size - index; + for (int level = SKIP_INDEX_MAX_LEVEL - 1; level > 0; level--) { + final int numberIntervals = 1 << (SKIP_INDEX_LEVEL_SHIFT * level); + if (left >= numberIntervals && index % numberIntervals == 0) { + return level + 1; + } + } + } + return 1; + } + } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java index 742249892f61f..496c41b42869a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java @@ -43,13 +43,57 @@ public class ES87TSDBDocValuesFormat extends org.apache.lucene.codecs.DocValuesF static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; + // number of documents in an interval + private static final int DEFAULT_SKIP_INDEX_INTERVAL_SIZE = 4096; + // bytes on an interval: + // * 1 byte : number of levels + // * 16 bytes: min / max value, + // * 8 bytes: min / max docID + // * 4 bytes: number of documents + private static final long SKIP_INDEX_INTERVAL_BYTES = 29L; + // number of intervals represented as a shift to create a new level, this is 1 << 3 == 8 + // intervals. + static final int SKIP_INDEX_LEVEL_SHIFT = 3; + // max number of levels + // Increasing this number, it increases how much heap we need at index time. + // we currently need (1 * 8 * 8 * 8) = 512 accumulators on heap + static final int SKIP_INDEX_MAX_LEVEL = 4; + // number of bytes to skip when skipping a level. It does not take into account the + // current interval that is being read. + static final long[] SKIP_INDEX_JUMP_LENGTH_PER_LEVEL = new long[SKIP_INDEX_MAX_LEVEL]; + + static { + // Size of the interval minus read bytes (1 byte for level and 4 bytes for maxDocID) + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[0] = SKIP_INDEX_INTERVAL_BYTES - 5L; + for (int level = 1; level < SKIP_INDEX_MAX_LEVEL; level++) { + // jump from previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] = SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level - 1]; + // nodes added by new level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] += (1 << (level * SKIP_INDEX_LEVEL_SHIFT)) * SKIP_INDEX_INTERVAL_BYTES; + // remove the byte levels added in the previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] -= (1 << ((level - 1) * SKIP_INDEX_LEVEL_SHIFT)); + } + } + + private final int skipIndexIntervalSize; + + /** Default constructor. */ public ES87TSDBDocValuesFormat() { + this(DEFAULT_SKIP_INDEX_INTERVAL_SIZE); + } + + /** Doc values fields format with specified skipIndexIntervalSize. */ + public ES87TSDBDocValuesFormat(int skipIndexIntervalSize) { super(CODEC_NAME); + if (skipIndexIntervalSize < 2) { + throw new IllegalArgumentException("skipIndexIntervalSize must be > 1, got [" + skipIndexIntervalSize + "]"); + } + this.skipIndexIntervalSize = skipIndexIntervalSize; } @Override public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { - return new ES87TSDBDocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + return new ES87TSDBDocValuesConsumer(state, skipIndexIntervalSize, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index a887516e5e7cc..edf94cc91ebe7 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -16,6 +16,8 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -27,6 +29,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.DataInput; @@ -43,6 +46,8 @@ import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_JUMP_LENGTH_PER_LEVEL; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_SHIFT; public class ES87TSDBDocValuesProducer extends DocValuesProducer { @@ -51,6 +56,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { private final Map sorted = new HashMap<>(); private final Map sortedSets = new HashMap<>(); private final Map sortedNumerics = new HashMap<>(); + private final Map skippers = new HashMap<>(); private final IndexInput data; private final int maxDoc; @@ -61,7 +67,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { // read in the entries from the metadata file. int version = -1; - try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) { + try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName)) { Throwable priorE = null; try { @@ -659,9 +665,8 @@ public long nextOrd() throws IOException { i = 0; count = ords.docValueCount(); } - if (i++ == count) { - return NO_MORE_ORDS; - } + assert i < count; + i++; return ords.nextValue(); } @@ -700,6 +705,116 @@ public long cost() { }; } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + final DocValuesSkipperEntry entry = skippers.get(field.name); + + final IndexInput input = data.slice("doc value skipper", entry.offset, entry.length); + // Prefetch the first page of data. Following pages are expected to get prefetched through + // read-ahead. + if (input.length() > 0) { + input.prefetch(0, 1); + } + // TODO: should we write to disk the actual max level for this segment? + return new DocValuesSkipper() { + final int[] minDocID = new int[SKIP_INDEX_MAX_LEVEL]; + final int[] maxDocID = new int[SKIP_INDEX_MAX_LEVEL]; + + { + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = -1; + } + } + + final long[] minValue = new long[SKIP_INDEX_MAX_LEVEL]; + final long[] maxValue = new long[SKIP_INDEX_MAX_LEVEL]; + final int[] docCount = new int[SKIP_INDEX_MAX_LEVEL]; + int levels = 1; + + @Override + public void advance(int target) throws IOException { + if (target > entry.maxDocId) { + // skipper is exhausted + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = DocIdSetIterator.NO_MORE_DOCS; + } + } else { + // find next interval + assert target > maxDocID[0] : "target must be bigger that current interval"; + while (true) { + levels = input.readByte(); + assert levels <= SKIP_INDEX_MAX_LEVEL && levels > 0 : "level out of range [" + levels + "]"; + boolean valid = true; + // check if current interval is competitive or we can jump to the next position + for (int level = levels - 1; level >= 0; level--) { + if ((maxDocID[level] = input.readInt()) < target) { + input.skipBytes(SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level]); // the jump for the level + valid = false; + break; + } + minDocID[level] = input.readInt(); + maxValue[level] = input.readLong(); + minValue[level] = input.readLong(); + docCount[level] = input.readInt(); + } + if (valid) { + // adjust levels + while (levels < SKIP_INDEX_MAX_LEVEL && maxDocID[levels] >= target) { + levels++; + } + break; + } + } + } + } + + @Override + public int numLevels() { + return levels; + } + + @Override + public int minDocID(int level) { + return minDocID[level]; + } + + @Override + public int maxDocID(int level) { + return maxDocID[level]; + } + + @Override + public long minValue(int level) { + return minValue[level]; + } + + @Override + public long maxValue(int level) { + return maxValue[level]; + } + + @Override + public int docCount(int level) { + return docCount[level]; + } + + @Override + public long minValue() { + return entry.minValue; + } + + @Override + public long maxValue() { + return entry.maxValue; + } + + @Override + public int docCount() { + return entry.docCount; + } + }; + } + @Override public void checkIntegrity() throws IOException { CodecUtil.checksumEntireFile(data); @@ -717,6 +832,9 @@ private void readFields(IndexInput meta, FieldInfos infos) throws IOException { throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); } byte type = meta.readByte(); + if (info.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + skippers.put(info.name, readDocValueSkipperMeta(meta)); + } if (type == ES87TSDBDocValuesFormat.NUMERIC) { numerics.put(info.name, readNumeric(meta)); } else if (type == ES87TSDBDocValuesFormat.BINARY) { @@ -739,6 +857,17 @@ private static NumericEntry readNumeric(IndexInput meta) throws IOException { return entry; } + private static DocValuesSkipperEntry readDocValueSkipperMeta(IndexInput meta) throws IOException { + long offset = meta.readLong(); + long length = meta.readLong(); + long maxValue = meta.readLong(); + long minValue = meta.readLong(); + int docCount = meta.readInt(); + int maxDocID = meta.readInt(); + + return new DocValuesSkipperEntry(offset, length, minValue, maxValue, docCount, maxDocID); + } + private static void readNumeric(IndexInput meta, NumericEntry entry) throws IOException { entry.docsWithFieldOffset = meta.readLong(); entry.docsWithFieldLength = meta.readLong(); @@ -1249,6 +1378,8 @@ private void set() { } } + private record DocValuesSkipperEntry(long offset, long length, long minValue, long maxValue, int docCount, int maxDocId) {} + private static class NumericEntry { long docsWithFieldOffset; long docsWithFieldLength; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java index 5818af87feac7..f432350069483 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java @@ -29,6 +29,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.hnsw.OrdinalTranslatedKnnCollector; import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import java.io.IOException; @@ -55,6 +56,11 @@ public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException return new ES813FlatVectorReader(format.fieldsReader(state)); } + @Override + public int getMaxDimensions(String fieldName) { + return DenseVectorFieldMapper.MAX_DIMS_COUNT; + } + static class ES813FlatVectorWriter extends KnnVectorsWriter { private final FlatVectorsWriter writer; @@ -66,7 +72,7 @@ static class ES813FlatVectorWriter extends KnnVectorsWriter { @Override public KnnFieldVectorsWriter addField(FieldInfo fieldInfo) throws IOException { - return writer.addField(fieldInfo, null); + return writer.addField(fieldInfo); } @Override @@ -145,10 +151,5 @@ public void search(String field, byte[] target, KnnCollector knnCollector, Bits public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java index d2c40a890e246..34ad69c62aa95 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java @@ -27,6 +27,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.hnsw.OrdinalTranslatedKnnCollector; import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import java.io.IOException; @@ -58,6 +59,11 @@ public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException return new ES813FlatVectorReader(format.fieldsReader(state)); } + @Override + public int getMaxDimensions(String fieldName) { + return DenseVectorFieldMapper.MAX_DIMS_COUNT; + } + @Override public String toString() { return NAME + "(name=" + NAME + ", innerFormat=" + format + ")"; @@ -74,7 +80,7 @@ public ES813FlatVectorWriter(FlatVectorsWriter writer) { @Override public KnnFieldVectorsWriter addField(FieldInfo fieldInfo) throws IOException { - return writer.addField(fieldInfo, null); + return writer.addField(fieldInfo); } @Override @@ -153,11 +159,5 @@ public void search(String field, byte[] target, KnnCollector knnCollector, Bits public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index 9d993bd948f0f..e78fc22f3215f 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.codec.vectors; -import org.apache.lucene.codecs.KnnFieldVectorsWriter; import org.apache.lucene.codecs.hnsw.DefaultFlatVectorScorer; import org.apache.lucene.codecs.hnsw.FlatFieldVectorsWriter; import org.apache.lucene.codecs.hnsw.FlatVectorsFormat; @@ -23,18 +22,17 @@ import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.QuantizedVectorsReader; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.simdvec.VectorScorerFactory; import org.elasticsearch.simdvec.VectorSimilarityType; @@ -67,6 +65,7 @@ public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat { private final boolean compress; public ES814ScalarQuantizedVectorsFormat(Float confidenceInterval, int bits, boolean compress) { + super(NAME); if (confidenceInterval != null && confidenceInterval != DYNAMIC_CONFIDENCE_INTERVAL && (confidenceInterval < MINIMUM_CONFIDENCE_INTERVAL || confidenceInterval > MAXIMUM_CONFIDENCE_INTERVAL)) { @@ -137,8 +136,8 @@ static final class ES814ScalarQuantizedVectorsWriter extends FlatVectorsWriter { } @Override - public FlatFieldVectorsWriter addField(FieldInfo fieldInfo, KnnFieldVectorsWriter knnFieldVectorsWriter) throws IOException { - return delegate.addField(fieldInfo, knnFieldVectorsWriter); + public FlatFieldVectorsWriter addField(FieldInfo fieldInfo) throws IOException { + return delegate.addField(fieldInfo); } @Override @@ -243,9 +242,9 @@ public String toString() { } @Override - public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, RandomAccessVectorValues values) + public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, KnnVectorValues values) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorerSupplier(sim, values); @@ -253,7 +252,7 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarity if (factory != null) { var scorer = factory.getInt7SQVectorScorerSupplier( VectorSimilarityType.of(sim), - values.getSlice(), + qValues.getSlice(), qValues, qValues.getScalarQuantizer().getConstantMultiplier() ); @@ -266,9 +265,9 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarity } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, float[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, float[] query) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorer(sim, values, query); @@ -284,7 +283,7 @@ public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, Ra } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, byte[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, byte[] query) throws IOException { return delegate.getRandomVectorScorer(sim, values, query); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java index 2df0757a8b8ee..4fb8f8126bf57 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormat.java @@ -15,6 +15,7 @@ import org.apache.lucene.codecs.hnsw.FlatVectorsFormat; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import java.io.IOException; @@ -41,6 +42,11 @@ public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException return new ES813FlatVectorFormat.ES813FlatVectorReader(format.fieldsReader(state)); } + @Override + public int getMaxDimensions(String s) { + return DenseVectorFieldMapper.MAX_DIMS_COUNT; + } + @Override public String toString() { return NAME; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java index cc6479cf1e2bf..29e179dfc7c5d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java @@ -14,13 +14,14 @@ import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; import org.apache.lucene.codecs.lucene99.Lucene99FlatVectorsFormat; +import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorSimilarityFunction; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.elasticsearch.script.field.vectors.ESVectorUtil; import java.io.IOException; @@ -29,6 +30,10 @@ class ES815BitFlatVectorsFormat extends FlatVectorsFormat { private final FlatVectorsFormat delegate = new Lucene99FlatVectorsFormat(FlatBitVectorScorer.INSTANCE); + protected ES815BitFlatVectorsFormat() { + super("ES815BitFlatVectorsFormat"); + } + @Override public FlatVectorsWriter fieldsWriter(SegmentWriteState segmentWriteState) throws IOException { return delegate.fieldsWriter(segmentWriteState); @@ -57,14 +62,14 @@ public String toString() { @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues + KnnVectorValues vectorValues ) throws IOException { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - assert randomAccessVectorValues instanceof RandomAccessQuantizedByteVectorValues == false; + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + assert byteVectorValues instanceof QuantizedByteVectorValues == false; return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(randomAccessVectorValuesBytes); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(byteVectorValues); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -73,18 +78,15 @@ public RandomVectorScorerSupplier getRandomVectorScorerSupplier( @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - byte[] bytes - ) { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + KnnVectorValues vectorValues, + byte[] target + ) throws IOException { + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - checkDimensions(bytes.length, randomAccessVectorValuesBytes.dimension()); + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + checkDimensions(target.length, byteVectorValues.dimension()); return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer( - randomAccessVectorValuesBytes, - bytes - ); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer(byteVectorValues, target); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -92,10 +94,10 @@ public RandomVectorScorer getRandomVectorScorer( @Override public RandomVectorScorer getRandomVectorScorer( - VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - float[] floats - ) { + VectorSimilarityFunction similarityFunction, + KnnVectorValues vectorValues, + float[] target + ) throws IOException { throw new IllegalArgumentException("Unsupported vector type"); } } @@ -106,9 +108,9 @@ static float hammingScore(byte[] a, byte[] b) { static class HammingVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final byte[] query; - private final RandomAccessVectorValues.Bytes byteValues; + private final ByteVectorValues byteValues; - HammingVectorScorer(RandomAccessVectorValues.Bytes byteValues, byte[] query) { + HammingVectorScorer(ByteVectorValues byteValues, byte[] query) { super(byteValues); this.query = query; this.byteValues = byteValues; @@ -121,9 +123,9 @@ public float score(int i) throws IOException { } static class HammingScorerSupplier implements RandomVectorScorerSupplier { - private final RandomAccessVectorValues.Bytes byteValues, byteValues1, byteValues2; + private final ByteVectorValues byteValues, byteValues1, byteValues2; - HammingScorerSupplier(RandomAccessVectorValues.Bytes byteValues) throws IOException { + HammingScorerSupplier(ByteVectorValues byteValues) throws IOException { this.byteValues = byteValues; this.byteValues1 = byteValues.copy(); this.byteValues2 = byteValues.copy(); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java index 55271719a4574..c3e3b8763fe47 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormat.java @@ -17,6 +17,7 @@ import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsWriter; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import java.io.IOException; @@ -62,6 +63,11 @@ public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException return new Lucene99HnswVectorsReader(state, flatVectorsFormat.fieldsReader(state)); } + @Override + public int getMaxDimensions(String s) { + return DenseVectorFieldMapper.MAX_DIMS_COUNT; + } + @Override public String toString() { return "ES815HnswBitVectorsFormat(name=ES815HnswBitVectorsFormat, maxConn=" diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index 05cc6d148be5e..e44b344d3b283 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -119,7 +119,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.parallelArray = new ParallelArray(this.searchBatchSize); this.indexVersionCreated = indexVersionCreated; final TopDocs topDocs = searchOperations(null, accessStats); - this.totalHits = Math.toIntExact(topDocs.totalHits.value); + this.totalHits = Math.toIntExact(topDocs.totalHits.value()); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); } @@ -341,7 +341,7 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { assert storedFieldsReaderOrd == leaf.ord : storedFieldsReaderOrd + " != " + leaf.ord; storedFieldsReader.document(segmentDocID, fields); } else { - leaf.reader().document(segmentDocID, fields); + leaf.reader().storedFields().document(segmentDocID, fields); } final Translog.Operation op; diff --git a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java index 18b5ba69ca320..3e99818d1827b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -13,6 +13,7 @@ import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CodecReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterNumericDocValues; @@ -188,6 +189,11 @@ public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { return in.getSortedSet(field); } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return in.getSkipper(field); + } + @Override public void checkIntegrity() throws IOException { in.checkIntegrity(); diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index c7acd730fadb5..0f772b49bf92b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -13,10 +13,11 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.IndexCommit; @@ -152,6 +153,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -171,6 +173,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -190,6 +193,7 @@ private static class TranslogLeafReader extends LeafReader { false, IndexOptions.DOCS, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -346,6 +350,11 @@ public NumericDocValues getNormValues(String field) throws IOException { return getDelegate().getNormValues(field); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return getDelegate().getDocValuesSkipper(field); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { return getDelegate().getFloatVectorValues(field); @@ -389,11 +398,6 @@ public LeafMetaData getMetaData() { return getDelegate().getMetaData(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - return getDelegate().getTermVectors(docID); - } - @Override public TermVectors termVectors() throws IOException { return getDelegate().termVectors(); @@ -429,11 +433,6 @@ public int maxDoc() { return 1; } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - private void readStoredFieldsDirectly(StoredFieldVisitor visitor) throws IOException { if (visitor.needsField(FAKE_SOURCE_FIELD) == StoredFieldVisitor.Status.YES) { BytesReference sourceBytes = operation.source(); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java index 84e85f3ddf2b4..d4e34181b876f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java @@ -52,12 +52,7 @@ public boolean advanceExact(int target) throws IOException { @Override public long nextOrd() throws IOException { - long segmentOrd = values.nextOrd(); - if (segmentOrd == SortedSetDocValues.NO_MORE_ORDS) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return getGlobalOrd(segmentOrd); - } + return getGlobalOrd(values.nextOrd()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java index 0439383ccbd05..0f72e491d8110 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java @@ -40,13 +40,13 @@ public static boolean significantlySmallerThanSinglePackedOrdinals( float acceptableOverheadRatio ) { int bitsPerOrd = PackedInts.bitsRequired(numOrds); - bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue; + bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue(); // Compute the worst-case number of bits per value for offsets in the worst case, eg. if no docs have a value at the // beginning of the block and all docs have one at the end of the block final float avgValuesPerDoc = (float) numDocsWithValue / maxDoc; final int maxDelta = (int) Math.ceil(OFFSETS_PAGE_SIZE * (1 - avgValuesPerDoc) * avgValuesPerDoc); int bitsPerOffset = PackedInts.bitsRequired(maxDelta) + 1; // +1 because of the sign - bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue; + bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue(); final long expectedMultiSizeInBytes = (long) numDocsWithValue * bitsPerOrd + (long) maxDoc * bitsPerOffset; final long expectedSingleSizeInBytes = (long) maxDoc * bitsPerOrd; @@ -153,6 +153,7 @@ private static class MultiDocs extends AbstractSortedSetDocValues { private long currentOffset; private long currentEndOffset; + private int count; MultiDocs(MultiOrdinals ordinals, ValuesHolder values) { this.valueCount = ordinals.valueCount; @@ -170,21 +171,19 @@ public long getValueCount() { public boolean advanceExact(int docId) { currentOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; currentEndOffset = endOffsets.get(docId); + count = Math.toIntExact(currentEndOffset - currentOffset); return currentOffset != currentEndOffset; } @Override public long nextOrd() { - if (currentOffset == currentEndOffset) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return ords.get(currentOffset++); - } + assert currentOffset != currentEndOffset; + return ords.get(currentOffset++); } @Override public int docValueCount() { - return Math.toIntExact(currentEndOffset - currentOffset); + return count; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 9f60d99e0ded4..53ccccdbd4bab 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -345,7 +345,7 @@ public CompletionFieldType fieldType() { } static PostingsFormat postingsFormat() { - return PostingsFormat.forName("Completion99"); + return PostingsFormat.forName("Completion912"); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 2e602033442c7..10e1b78d2a63a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.document.Field; import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -682,7 +683,7 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo long pivotLong = resolution.convert(pivotTime); // As we already apply boost in AbstractQueryBuilder::toQuery, we always passing a boost of 1.0 to distanceFeatureQuery if (isIndexed()) { - return LongPoint.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); + return LongField.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); } else { return new LongScriptFieldDistanceFeatureQuery( new Script(""), @@ -953,7 +954,7 @@ private void indexValue(DocumentParserContext context, long timestamp) { } if (indexed && hasDocValues) { - context.doc().add(new LongField(fieldType().name(), timestamp)); + context.doc().add(new LongField(fieldType().name(), timestamp, Field.Store.NO)); } else if (hasDocValues) { context.doc().add(new SortedNumericDocValuesField(fieldType().name(), timestamp)); } else if (indexed) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java index 494005ce12cb1..d37f6c51d288d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java @@ -11,10 +11,11 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -147,11 +148,6 @@ public FieldInfos getFieldInfos() { return new FieldInfos(new FieldInfo[0]); } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { return new StoredFields() { @@ -203,6 +199,11 @@ public NumericDocValues getNormValues(String field) throws IOException { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String s) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -233,11 +234,6 @@ public LeafMetaData getMetaData() { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public int numDocs() { throw new UnsupportedOperationException(); @@ -284,6 +280,7 @@ private static FieldInfo fieldInfo(String name) { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -484,9 +481,7 @@ private static SortedSetDocValues sortedSetDocValues(List values) { @Override public long nextOrd() { i++; - if (i >= values.size()) { - return NO_MORE_ORDS; - } + assert i < values.size(); return i; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java index b9d89462c3467..8e418f45ddb3a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java @@ -22,6 +22,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Map; /** @@ -97,13 +98,13 @@ public boolean isSearchable() { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(v -> { + List bytesRefs = values.stream().map(v -> { Object idObject = v; if (idObject instanceof BytesRef) { idObject = ((BytesRef) idObject).utf8ToString(); } return Uid.encodeId(idObject.toString()); - }).toArray(BytesRef[]::new); + }).toList(); return new TermInSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java index 6900dcd773917..8114167c02486 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -76,8 +75,8 @@ static CompiledAutomaton buildIpPrefixAutomaton(String ipPrefix) { } else { result = Automata.makeAnyBinary(); } - result = MinimizationOperations.minimize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CompiledAutomaton(result, null, false, 0, true); + result = Operations.determinize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return new CompiledAutomaton(result, false, false, true); } private static Automaton getIpv6Automaton(String ipPrefix) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 529ff19bfffd7..1c0b40e4190dc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -32,7 +32,6 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; import org.apache.lucene.util.automaton.CompiledAutomaton.AUTOMATON_TYPE; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; @@ -491,7 +490,7 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { if (isIndexed()) { return super.termsQuery(values, context); } else { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + Collection bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } } @@ -597,7 +596,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi ? AutomatonQueries.caseInsensitivePrefix(prefix) : Operations.concatenate(Automata.makeString(prefix), Automata.makeAnyString()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a, true, true); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java index f1924fd04f3fe..c6f1b490a2be2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.SearchExecutionContext; @@ -70,7 +69,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + var bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 2c851b70d2606..35722be20b9be 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -440,6 +440,30 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex ); } + /** + * Create a regexp {@link IntervalsSource} for the given pattern. + */ + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + throw new IllegalArgumentException( + "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" + ); + } + + /** + * Create a range {@link IntervalsSource} for the given ranges + */ + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + throw new IllegalArgumentException( + "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" + ); + } + /** * An enum used to describe the relation between the range of terms in a * shard when compared with a query range diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 8cc67cc481b9b..55ed1e10428aa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatField; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntField; @@ -585,7 +586,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final float f = value.floatValue(); if (indexed && docValued) { - document.add(new FloatField(name, f)); + document.add(new FloatField(name, f, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(f))); } else if (indexed) { @@ -735,7 +736,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final double d = value.doubleValue(); if (indexed && docValued) { - document.add(new DoubleField(name, d)); + document.add(new DoubleField(name, d, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(d))); } else if (indexed) { @@ -1159,7 +1160,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final int i = value.intValue(); if (indexed && docValued) { - document.add(new IntField(name, i)); + document.add(new IntField(name, i, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, i)); } else if (indexed) { @@ -1306,7 +1307,7 @@ public Query rangeQuery( public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final long l = value.longValue(); if (indexed && docValued) { - document.add(new LongField(name, l)); + document.add(new LongField(name, l, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, l)); } else if (indexed) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java index 9a6dd1d127651..670ddc4d5ccda 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java @@ -248,6 +248,22 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex throw new QueryShardException(context, fail("wildcard intervals query")); } + @Override + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + throw new QueryShardException(context, fail("regexp intervals query")); + } + + @Override + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + throw new QueryShardException(context, fail("range intervals query")); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { throw new IllegalArgumentException(fail("aggregation or sorts")); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 9ea16933f7ab5..ceb96b87a0983 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -101,9 +101,7 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool failIfNotIndexed(); Term prefix = new Term(name(), indexedValueForSearch(value)); if (caseInsensitive) { - return method == null - ? new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + return method == null ? new CaseInsensitivePrefixQuery(prefix, false) : new CaseInsensitivePrefixQuery(prefix, false, method); } return method == null ? new PrefixQuery(prefix) : new PrefixQuery(prefix, method); } @@ -170,9 +168,7 @@ protected Query wildcardQuery( term = new Term(name(), indexedValueForSearch(value)); } if (caseInsensitive) { - return method == null - ? new CaseInsensitiveWildcardQuery(term) - : new CaseInsensitiveWildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + return method == null ? new CaseInsensitiveWildcardQuery(term) : new CaseInsensitiveWildcardQuery(term, false, method); } return method == null ? new WildcardQuery(term) : new WildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, method); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java index 674a016264c3a..e2ff9cc7ea632 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import java.util.Collection; +import java.util.List; import java.util.Map; /** Base {@link MappedFieldType} implementation for a field that is indexed @@ -69,7 +70,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + List bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return new TermInSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index a30793cdc5d97..c78a933838d7c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -36,6 +36,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PhraseQuery; @@ -602,8 +603,8 @@ public Query prefixQuery( } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField.name(), value)), BooleanClause.Occur.SHOULD) .build(); @@ -620,7 +621,10 @@ public IntervalsSource intervals(BytesRef term) { return Intervals.fixField(name(), Intervals.term(term)); } String wildcardTerm = term.utf8ToString() + "?".repeat(Math.max(0, minChars - term.length)); - return Intervals.or(Intervals.fixField(name(), Intervals.wildcard(new BytesRef(wildcardTerm))), Intervals.term(term)); + return Intervals.or( + Intervals.fixField(name(), Intervals.wildcard(new BytesRef(wildcardTerm), IndexSearcher.getMaxClauseCount())), + Intervals.term(term) + ); } @Override @@ -822,7 +826,7 @@ public IntervalsSource prefixIntervals(BytesRef term, SearchExecutionContext con if (prefixFieldType != null) { return prefixFieldType.intervals(term); } - return Intervals.prefix(term); + return Intervals.prefix(term, IndexSearcher.getMaxClauseCount()); } @Override @@ -836,8 +840,14 @@ public IntervalsSource fuzzyIntervals( if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - FuzzyQuery fq = new FuzzyQuery(new Term(name(), term), maxDistance, prefixLength, 128, transpositions); - return Intervals.multiterm(fq.getAutomata(), term); + FuzzyQuery fq = new FuzzyQuery( + new Term(name(), term), + maxDistance, + prefixLength, + IndexSearcher.getMaxClauseCount(), + transpositions + ); + return Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), term); } @Override @@ -845,7 +855,29 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex if (getTextSearchInfo().hasPositions() == false) { throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); } - return Intervals.wildcard(pattern); + return Intervals.wildcard(pattern, IndexSearcher.getMaxClauseCount()); + } + + @Override + public IntervalsSource regexpIntervals(BytesRef pattern, SearchExecutionContext context) { + if (getTextSearchInfo().hasPositions() == false) { + throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + } + return Intervals.regexp(pattern, IndexSearcher.getMaxClauseCount()); + } + + @Override + public IntervalsSource rangeIntervals( + BytesRef lowerTerm, + BytesRef upperTerm, + boolean includeLower, + boolean includeUpper, + SearchExecutionContext context + ) { + if (getTextSearchInfo().hasPositions() == false) { + throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + } + return Intervals.range(lowerTerm, upperTerm, includeLower, includeUpper, IndexSearcher.getMaxClauseCount()); } private void checkForPositions() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index ac1de94ea7a73..93a2157b2338a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -28,10 +28,10 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -394,7 +394,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi a = Operations.concatenate(a, Automata.makeAnyString()); } assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a); if (searchAfter != null) { @@ -483,6 +482,11 @@ public AttributeSource attributes() { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java index b94ea67c8de8d..b29f093e3a217 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java @@ -205,12 +205,8 @@ public long nextOrd() throws IOException { } long ord = delegate.nextOrd(); - if (ord != NO_MORE_ORDS && ord <= maxOrd) { - assert ord >= minOrd; - return mapOrd(ord); - } else { - return NO_MORE_ORDS; - } + assert ord <= maxOrd; + return mapOrd(ord); } @Override @@ -223,9 +219,9 @@ public boolean advanceExact(int target) throws IOException { if (delegate.advanceExact(target)) { int count = 0; - while (true) { + for (int i = 0; i < delegate.docValueCount(); i++) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } if (ord >= minOrd) { @@ -246,7 +242,7 @@ public boolean advanceExact(int target) throws IOException { while (true) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java index e8da3b72ae7c7..04069333deb13 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java @@ -45,24 +45,13 @@ public int size() { } @Override - public float[] vectorValue() throws IOException { - // Lazy load vectors as we may iterate but not actually require the vector - return vectorValue(in.docID()); + public DocIndexIterator iterator() { + return in.iterator(); } @Override - public int docID() { - return in.docID(); - } - - @Override - public int nextDoc() throws IOException { - return in.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - return in.advance(target); + public FloatVectorValues copy() throws IOException { + return in.copy(); } @Override @@ -74,22 +63,24 @@ public float magnitude() { return magnitude; } - private float[] vectorValue(int docId) throws IOException { + @Override + public float[] vectorValue(int ord) throws IOException { + int docId = ordToDoc(ord); if (docId != this.docId) { this.docId = docId; hasMagnitude = decodedMagnitude(docId); // We should only copy and transform if we have a stored a non-unit length magnitude if (hasMagnitude) { - System.arraycopy(in.vectorValue(), 0, vector, 0, dimension()); + System.arraycopy(in.vectorValue(ord), 0, vector, 0, dimension()); for (int i = 0; i < vector.length; i++) { vector[i] *= magnitude; } return vector; } else { - return in.vectorValue(); + return in.vectorValue(ord); } } else { - return hasMagnitude ? vector : in.vectorValue(); + return hasMagnitude ? vector : in.vectorValue(ord); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 4adfe619ca4e1..a48af90d539e6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReadState; @@ -2167,6 +2168,7 @@ private class IndexedSyntheticFieldLoader extends SourceLoader.DocValuesBasedSyn private ByteVectorValues byteVectorValues; private boolean hasValue; private boolean hasMagnitude; + private int ord; private final IndexVersion indexCreatedVersion; private final VectorSimilarity vectorSimilarity; @@ -2184,16 +2186,20 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) { magnitudeReader = leafReader.getNumericDocValues(fullPath() + COSINE_MAGNITUDE_FIELD_SUFFIX); } + KnnVectorValues.DocIndexIterator iterator = values.iterator(); return docId -> { - hasValue = docId == values.advance(docId); + hasValue = docId == iterator.advance(docId); hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); + ord = iterator.index(); return hasValue; }; } byteVectorValues = leafReader.getByteVectorValues(fullPath()); if (byteVectorValues != null) { + KnnVectorValues.DocIndexIterator iterator = byteVectorValues.iterator(); return docId -> { - hasValue = docId == byteVectorValues.advance(docId); + hasValue = docId == iterator.advance(docId); + ord = iterator.index(); return hasValue; }; } @@ -2216,7 +2222,7 @@ public void write(XContentBuilder b) throws IOException { } b.startArray(leafName()); if (values != null) { - for (float v : values.vectorValue()) { + for (float v : values.vectorValue(ord)) { if (hasMagnitude) { b.value(v * magnitude); } else { @@ -2224,7 +2230,7 @@ public void write(XContentBuilder b) throws IOException { } } } else if (byteVectorValues != null) { - byte[] vectorValue = byteVectorValues.vectorValue(); + byte[] vectorValue = byteVectorValues.vectorValue(ord); for (byte value : vectorValue) { b.value(value); } diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index 16aada4066f71..1560004b13785 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -412,8 +412,8 @@ public Query createPhraseQuery(String field, String queryText, int phraseSlop) { protected Query newSynonymQuery(String field, TermAndBoost[] terms) { CombinedFieldQuery.Builder query = new CombinedFieldQuery.Builder(); for (TermAndBoost termAndBoost : terms) { - assert termAndBoost.boost == BoostAttribute.DEFAULT_BOOST; - BytesRef bytes = termAndBoost.term; + assert termAndBoost.boost() == BoostAttribute.DEFAULT_BOOST; + BytesRef bytes = termAndBoost.term(); query.addTerm(bytes); } for (FieldAndBoost fieldAndBoost : fields) { diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java index 46d7fec641943..96c39ed356f90 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalBuilder.java @@ -20,7 +20,7 @@ import org.apache.lucene.queries.intervals.IntervalMatchesIterator; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; @@ -189,7 +189,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -204,7 +204,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept TokenStream ts = it.next(); IntervalsSource phrase = combineSources(analyzeTerms(ts), 0, true); if (paths.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } paths.add(phrase); } diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java index 9b579c97f197a..647e45d1beda1 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java @@ -14,11 +14,13 @@ import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -77,10 +79,16 @@ public static IntervalsSourceProvider fromXContent(XContentParser parser) throws return Wildcard.fromXContent(parser); case "fuzzy": return Fuzzy.fromXContent(parser); + case "regexp": + return Regexp.fromXContent(parser); + case "range": + return Range.fromXContent(parser); } throw new ParsingException( parser.getTokenLocation(), - "Unknown interval type [" + parser.currentName() + "], expecting one of [match, any_of, all_of, prefix, wildcard]" + "Unknown interval type [" + + parser.currentName() + + "], expecting one of [match, any_of, all_of, prefix, wildcard, regexp, range]" ); } @@ -747,6 +755,129 @@ String getUseField() { } } + public static class Regexp extends IntervalsSourceProvider implements VersionedNamedWriteable { + + public static final String NAME = "regexp"; + + private final String pattern; + private final String analyzer; + private final String useField; + + public Regexp(String pattern, String analyzer, String useField) { + this.pattern = pattern; + this.analyzer = analyzer; + this.useField = useField; + } + + public Regexp(StreamInput in) throws IOException { + this.pattern = in.readString(); + this.analyzer = in.readOptionalString(); + this.useField = in.readOptionalString(); + } + + @Override + public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) { + NamedAnalyzer analyzer = null; + if (this.analyzer != null) { + analyzer = context.getIndexAnalyzers().get(this.analyzer); + } + if (useField != null) { + fieldType = context.getFieldType(useField); + assert fieldType != null; + } + if (analyzer == null) { + analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + } + BytesRef normalizedPattern = analyzer.normalize(fieldType.name(), pattern); + IntervalsSource source = fieldType.regexpIntervals(normalizedPattern, context); + if (useField != null) { + source = Intervals.fixField(useField, source); + } + return source; + } + + @Override + public void extractFields(Set fields) { + if (useField != null) { + fields.add(useField); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Regexp regexp = (Regexp) o; + return Objects.equals(pattern, regexp.pattern) + && Objects.equals(analyzer, regexp.analyzer) + && Objects.equals(useField, regexp.useField); + } + + @Override + public int hashCode() { + return Objects.hash(pattern, analyzer, useField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.REGEX_AND_RANGE_INTERVAL_QUERIES; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(pattern); + out.writeOptionalString(analyzer); + out.writeOptionalString(useField); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field("pattern", pattern); + if (analyzer != null) { + builder.field("analyzer", analyzer); + } + if (useField != null) { + builder.field("use_field", useField); + } + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { + String term = (String) args[0]; + String analyzer = (String) args[1]; + String useField = (String) args[2]; + return new Regexp(term, analyzer, useField); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("pattern")); + PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); + PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); + } + + public static Regexp fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + String getPattern() { + return pattern; + } + + String getAnalyzer() { + return analyzer; + } + + String getUseField() { + return useField; + } + } + public static class Fuzzy extends IntervalsSourceProvider { public static final String NAME = "fuzzy"; @@ -908,6 +1039,190 @@ String getUseField() { } } + public static class Range extends IntervalsSourceProvider implements VersionedNamedWriteable { + + public static final String NAME = "range"; + + private final String lowerTerm; + private final String upperTerm; + private final boolean includeLower; + private final boolean includeUpper; + private final String analyzer; + private final String useField; + + public Range(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, String analyzer, String useField) { + this.lowerTerm = lowerTerm; + this.upperTerm = upperTerm; + this.includeLower = includeLower; + this.includeUpper = includeUpper; + this.analyzer = analyzer; + this.useField = useField; + } + + public Range(StreamInput in) throws IOException { + this.lowerTerm = in.readString(); + this.upperTerm = in.readString(); + this.includeLower = in.readBoolean(); + this.includeUpper = in.readBoolean(); + this.analyzer = in.readOptionalString(); + this.useField = in.readOptionalString(); + } + + @Override + public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) { + NamedAnalyzer analyzer = null; + if (this.analyzer != null) { + analyzer = context.getIndexAnalyzers().get(this.analyzer); + } + if (useField != null) { + fieldType = context.getFieldType(useField); + assert fieldType != null; + } + if (analyzer == null) { + analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + } + BytesRef normalizedLowerTerm = analyzer.normalize(fieldType.name(), lowerTerm); + BytesRef normalizedUpperTerm = analyzer.normalize(fieldType.name(), upperTerm); + + IntervalsSource source = fieldType.rangeIntervals( + normalizedLowerTerm, + normalizedUpperTerm, + includeLower, + includeUpper, + context + ); + if (useField != null) { + source = Intervals.fixField(useField, source); + } + return source; + } + + @Override + public void extractFields(Set fields) { + if (useField != null) { + fields.add(useField); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Range range = (Range) o; + return includeLower == range.includeLower + && includeUpper == range.includeUpper + && Objects.equals(lowerTerm, range.lowerTerm) + && Objects.equals(upperTerm, range.upperTerm) + && Objects.equals(analyzer, range.analyzer) + && Objects.equals(useField, range.useField); + } + + @Override + public int hashCode() { + return Objects.hash(lowerTerm, upperTerm, includeLower, includeUpper, analyzer, useField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.REGEX_AND_RANGE_INTERVAL_QUERIES; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(lowerTerm); + out.writeString(upperTerm); + out.writeBoolean(includeLower); + out.writeBoolean(includeUpper); + out.writeOptionalString(analyzer); + out.writeOptionalString(useField); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + if (includeLower) { + builder.field("gte", lowerTerm); + } else { + builder.field("gt", lowerTerm); + } + if (includeUpper) { + builder.field("lte", upperTerm); + } else { + builder.field("lt", upperTerm); + } + if (analyzer != null) { + builder.field("analyzer", analyzer); + } + if (useField != null) { + builder.field("use_field", useField); + } + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { + String gte = (String) args[0]; + String gt = (String) args[1]; + String lte = (String) args[2]; + String lt = (String) args[3]; + if ((gte == null && gt == null) || (gte != null && gt != null)) { + throw new IllegalArgumentException("Either [gte] or [gt], one of them must be provided"); + } + if ((lte == null && lt == null) || (lte != null && lt != null)) { + throw new IllegalArgumentException("Either [lte] or [lt], one of them must be provided"); + } + boolean includeLower = gte != null ? true : false; + String lowerTerm = gte != null ? gte : gt; + boolean includeUpper = lte != null ? true : false; + String upperTerm = lte != null ? lte : lt; + String analyzer = (String) args[4]; + String useField = (String) args[5]; + return new Range(lowerTerm, upperTerm, includeLower, includeUpper, analyzer, useField); + }); + + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("gte")); + PARSER.declareString(optionalConstructorArg(), new ParseField("gt")); + PARSER.declareString(optionalConstructorArg(), new ParseField("lte")); + PARSER.declareString(optionalConstructorArg(), new ParseField("lt")); + PARSER.declareString(optionalConstructorArg(), new ParseField("analyzer")); + PARSER.declareString(optionalConstructorArg(), new ParseField("use_field")); + } + + public static Range fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + String getLowerTerm() { + return lowerTerm; + } + + String getUpperTerm() { + return upperTerm; + } + + boolean getIncludeLower() { + return includeLower; + } + + boolean getIncludeUpper() { + return includeUpper; + } + + String getAnalyzer() { + return analyzer; + } + + String getUseField() { + return useField; + } + } + static class ScriptFilterSource extends FilteredIntervalsSource { final IntervalFilterScript script; diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 55642ccf0275a..626875c75a5fe 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -16,8 +16,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -443,12 +443,12 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index 6072a81691ffa..19be37e3d21be 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -10,9 +10,12 @@ import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.UpdateForV10; import java.util.Locale; +import static org.apache.lucene.util.automaton.RegExp.DEPRECATED_COMPLEMENT; + /** * Regular expression syntax flags. Each flag represents optional syntax support in the regular expression: *

    @@ -37,8 +40,11 @@ public enum RegexpFlag { /** * Enables complement expression of the form: {@code ~<expression>} + * We use the deprecated support in Lucene 10. Will be removed in Lucene 11 + * https://github.com/elastic/elasticsearch/issues/113465 */ - COMPLEMENT(RegExp.COMPLEMENT), + @UpdateForV10 + COMPLEMENT(DEPRECATED_COMPLEMENT), /** * Enables empty language expression: {@code #} @@ -63,7 +69,7 @@ public enum RegexpFlag { /** * Enables all available option flags */ - ALL(RegExp.ALL); + ALL(RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); final int value; diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index dc439fab58ffc..461dc66322434 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -280,7 +280,9 @@ protected Query doToQuery(SearchExecutionContext context) throws QueryShardExcep int matchFlagsValue = caseInsensitive ? RegExp.ASCII_CASE_INSENSITIVE : 0; Query query = null; // For BWC we mask irrelevant bits (RegExp changed ALL from 0xffff to 0xff) - int sanitisedSyntaxFlag = syntaxFlagsValue & RegExp.ALL; + // We need to preserve the DEPRECATED_COMPLEMENT for now though + int deprecatedComplementFlag = syntaxFlagsValue & RegExp.DEPRECATED_COMPLEMENT; + int sanitisedSyntaxFlag = syntaxFlagsValue & (RegExp.ALL | deprecatedComplementFlag); MappedFieldType fieldType = context.getFieldType(fieldName); if (fieldType != null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index c96771978bd42..8d3fd1d92e1e7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchException; @@ -184,7 +185,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final FilterScript leafScript = filterScript.newInstance(new DocValuesDocReader(lookup, context)); TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -201,7 +202,8 @@ public float matchCost() { return 1000f; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java index 81afdf0ebe5e0..a6116ccf2c495 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.sandbox.search.CoveringQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValues; @@ -273,8 +272,8 @@ protected Query doToQuery(SearchExecutionContext context) { return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query."); } // Fail before we attempt to create the term queries: - if (values.size() > BooleanQuery.getMaxClauseCount()) { - throw new BooleanQuery.TooManyClauses(); + if (values.size() > IndexSearcher.getMaxClauseCount()) { + throw new IndexSearcher.TooManyClauses(); } List queries = createTermQueries(context); diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 528f0bd6dae08..1327721a88427 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -149,7 +149,7 @@ private static Response wrapSearchResponse(SearchResponse response) { } hits = unmodifiableList(hits); } - long total = response.getHits().getTotalHits().value; + long total = response.getHits().getTotalHits().value(); return new Response(response.isTimedOut(), failures, total, hits, response.getScrollId()); } diff --git a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java index 505c20f642093..5f135c674ba1a 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostAttribute; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.QueryBuilder; @@ -690,7 +691,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -708,7 +709,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in SpanQuery q = createSpanQuery(ts, field, usePrefix); if (q != null) { if (queries.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queries.add(q); } @@ -722,14 +723,14 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in Term[] terms = graph.getTerms(field, start); assert terms.length > 0; if (terms.length >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queryPos = newSpanQuery(terms, usePrefix); } if (queryPos != null) { if (clauses.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } clauses.add(queryPos); } diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java index 52122ed86ef69..446d78078e642 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java @@ -200,7 +200,7 @@ protected Query createBooleanPrefixQuery(String field, String queryText, Boolean protected Query newSynonymQuery(String field, TermAndBoost[] terms) { BytesRef[] values = new BytesRef[terms.length]; for (int i = 0; i < terms.length; i++) { - values[i] = terms[i].term; + values[i] = terms[i].term(); } return blendTerms(context, values, tieBreaker, lenient, blendedFields); } diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index db0077284bbd3..96e8ac35c8e32 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -24,8 +24,6 @@ import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; -import java.io.IOException; -import java.io.UncheckedIOException; import java.util.function.Predicate; /** Utility class to filter parent and children clauses when building nested @@ -55,15 +53,10 @@ public boolean mightMatchNestedDocs(Query query) { // cover a high majority of use-cases return mightMatchNestedDocs(((TermQuery) query).getTerm().field()); } else if (query instanceof TermInSetQuery tis) { - try { - if (tis.getTermsCount() > 0) { - return mightMatchNestedDocs(tis.getField()); - } else { - return false; - } - } catch (IOException e) { - // this handling isn't needed any more once we move to Lucene 10 - throw new UncheckedIOException("We are not doing IO here, this should never happen.", e); + if (tis.getTermsCount() > 0) { + return mightMatchNestedDocs(tis.getField()); + } else { + return false; } } else if (query instanceof PointRangeQuery) { return mightMatchNestedDocs(((PointRangeQuery) query).getField()); @@ -75,13 +68,13 @@ public boolean mightMatchNestedDocs(Query query) { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(this::mightMatchNestedDocs); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(this::mightMatchNestedDocs); } } else if (query instanceof ESToParentBlockJoinQuery) { @@ -122,15 +115,10 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { } else if (query instanceof TermQuery) { return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath); } else if (query instanceof TermInSetQuery tis) { - try { - if (tis.getTermsCount() > 0) { - return mightMatchNonNestedDocs(tis.getField(), nestedPath); - } else { - return false; - } - } catch (IOException e) { - // this handling isn't needed any more once we move to Lucene 10 - throw new UncheckedIOException("We are not doing IO here, this should never happen.", e); + if (tis.getTermsCount() > 0) { + return mightMatchNonNestedDocs(tis.getField(), nestedPath); + } else { + return false; } } else if (query instanceof PointRangeQuery) { return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath); @@ -142,13 +130,13 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } } else { @@ -183,5 +171,4 @@ boolean mightMatchNonNestedDocs(String field, String nestedPath) { } return true; } - } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 76dba60689422..d237a03335337 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -760,7 +760,14 @@ private Query getRegexpQuerySingle(String field, String termStr) throws ParseExc setAnalyzer(forceAnalyzer); return super.getRegexpQuery(field, termStr); } - return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getDeterminizeWorkLimit(), getMultiTermRewriteMethod(), context); + return currentFieldType.regexpQuery( + termStr, + RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT, + 0, + getDeterminizeWorkLimit(), + getMultiTermRewriteMethod(), + context + ); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java index 562bf1e75dc1f..97d1b3342ca2b 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java @@ -33,7 +33,7 @@ public static Tuple getClea final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); @@ -64,7 +64,7 @@ public static void execute(Terminal terminal, Directory indexDirectory, Lock wri final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java index f1291ac6faa51..94a29258f3202 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -73,7 +74,7 @@ public String toString() { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { LeafReader leafReader = context.reader(); FixedBitSet bitSet = new FixedBitSet(leafReader.maxDoc()); Terms terms = leafReader.terms(RoutingFieldMapper.NAME); @@ -82,87 +83,102 @@ public Scorer scorer(LeafReaderContext context) throws IOException { int targetShardId = indexRouting.getShard(Uid.decodeId(ref.bytes, ref.offset, ref.length), null); return shardId == targetShardId; }; - if (terms == null) { - // this is the common case - no partitioning and no _routing values - // in this case we also don't do anything special with regards to nested docs since we basically delete - // by ID and parent and nested all have the same id. - assert indexMetadata.isRoutingPartitionedIndex() == false; - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); - } else { - final BitSet parentBitSet; - if (nestedParentBitSetProducer == null) { - parentBitSet = null; - } else { - parentBitSet = nestedParentBitSetProducer.getBitSet(context); - if (parentBitSet == null) { - return null; // no matches - } - } - if (indexMetadata.isRoutingPartitionedIndex()) { - // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing - // this index is routing partitioned. - Visitor visitor = new Visitor(leafReader); - TwoPhaseIterator twoPhaseIterator = parentBitSet == null - ? new RoutingPartitionedDocIdSetIterator(visitor) - : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); - return new ConstantScoreScorer(this, score(), scoreMode, twoPhaseIterator); - } else { - // here we potentially guard the docID consumers with our parent bitset if we have one. - // this ensures that we are only marking root documents in the nested case and if necessary - // we do a second pass to mark the corresponding children in markChildDocs - Function maybeWrapConsumer = consumer -> { - if (parentBitSet != null) { - return docId -> { - if (parentBitSet.get(docId)) { - consumer.accept(docId); + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + if (terms == null) { + // this is the common case - no partitioning and no _routing values + // in this case we also don't do anything special with regards to nested docs since we basically delete + // by ID and parent and nested all have the same id. + assert indexMetadata.isRoutingPartitionedIndex() == false; + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); + } else { + final BitSet parentBitSet; + if (nestedParentBitSetProducer == null) { + parentBitSet = null; + } else { + parentBitSet = nestedParentBitSetProducer.getBitSet(context); + if (parentBitSet == null) { + return null; // no matches + } + } + if (indexMetadata.isRoutingPartitionedIndex()) { + // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing + // this index is routing partitioned. + Visitor visitor = new Visitor(leafReader); + TwoPhaseIterator twoPhaseIterator = parentBitSet == null + ? new RoutingPartitionedDocIdSetIterator(visitor) + : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); + return new ConstantScoreScorer(score(), scoreMode, twoPhaseIterator); + } else { + // here we potentially guard the docID consumers with our parent bitset if we have one. + // this ensures that we are only marking root documents in the nested case and if necessary + // we do a second pass to mark the corresponding children in markChildDocs + Function maybeWrapConsumer = consumer -> { + if (parentBitSet != null) { + return docId -> { + if (parentBitSet.get(docId)) { + consumer.accept(docId); + } + }; } + return consumer; }; - } - return consumer; - }; - // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to delete - findSplitDocs(RoutingFieldMapper.NAME, ref -> { - int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); - return shardId == targetShardId; - }, leafReader, maybeWrapConsumer.apply(bitSet::set)); - - // TODO have the IndexRouting build the query and pass routingRequired in - boolean routingRequired = indexMetadata.mapping() == null ? false : indexMetadata.mapping().routingRequired(); - // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the ones - // with a routing value from the next iteration and delete / select based on the ID. - if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { - /* - * This is a special case where some docs don't have routing values. - * It's annoying, but it's allowed to build an index where some documents - * hve routing and others don't. - * - * Luckily, if the routing field is required in the mapping then we can - * safely assume that all documents which are don't have a routing are - * nested documents. And we pick those up later based on the assignment - * of the document that contains them. - */ - FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); - findSplitDocs( - RoutingFieldMapper.NAME, - Predicates.never(), - leafReader, - maybeWrapConsumer.apply(hasRoutingValue::set) - ); - IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { - if (hasRoutingValue.get(docId) == false) { - bitSetConsumer.accept(docId); + // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to + // delete + findSplitDocs(RoutingFieldMapper.NAME, ref -> { + int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); + return shardId == targetShardId; + }, leafReader, maybeWrapConsumer.apply(bitSet::set)); + + // TODO have the IndexRouting build the query and pass routingRequired in + boolean routingRequired = indexMetadata.mapping() == null + ? false + : indexMetadata.mapping().routingRequired(); + // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the + // ones + // with a routing value from the next iteration and delete / select based on the ID. + if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { + /* + * This is a special case where some docs don't have routing values. + * It's annoying, but it's allowed to build an index where some documents + * hve routing and others don't. + * + * Luckily, if the routing field is required in the mapping then we can + * safely assume that all documents which are don't have a routing are + * nested documents. And we pick those up later based on the assignment + * of the document that contains them. + */ + FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); + findSplitDocs( + RoutingFieldMapper.NAME, + Predicates.never(), + leafReader, + maybeWrapConsumer.apply(hasRoutingValue::set) + ); + IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { + if (hasRoutingValue.get(docId) == false) { + bitSetConsumer.accept(docId); + } + }); } - }); + } + if (parentBitSet != null) { + // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. + markChildDocs(parentBitSet, bitSet); + } } + + return new ConstantScoreScorer(score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); } - if (parentBitSet != null) { - // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. - markChildDocs(parentBitSet, bitSet); - } - } - return new ConstantScoreScorer(this, score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); + @Override + public long cost() { + return leafReader.maxDoc(); + } + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java index 3dc5953e3d3d8..bc94db13074db 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java @@ -33,6 +33,7 @@ import java.nio.file.Path; import java.util.HashSet; import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { @@ -67,12 +68,12 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index // Use Lucene defaults final FSDirectory primaryDirectory = FSDirectory.open(location, lockFactory); if (primaryDirectory instanceof MMapDirectory mMapDirectory) { - return new HybridDirectory(lockFactory, setPreload(mMapDirectory, lockFactory, preLoadExtensions)); + return new HybridDirectory(lockFactory, setPreload(mMapDirectory, preLoadExtensions)); } else { return primaryDirectory; } case MMAPFS: - return setPreload(new MMapDirectory(location, lockFactory), lockFactory, preLoadExtensions); + return setPreload(new MMapDirectory(location, lockFactory), preLoadExtensions); case SIMPLEFS: case NIOFS: return new NIOFSDirectory(location, lockFactory); @@ -81,17 +82,23 @@ protected Directory newFSDirectory(Path location, LockFactory lockFactory, Index } } - public static MMapDirectory setPreload(MMapDirectory mMapDirectory, LockFactory lockFactory, Set preLoadExtensions) - throws IOException { - assert mMapDirectory.getPreload() == false; + /** Sets the preload, if any, on the given directory based on the extensions. Returns the same directory instance. */ + // visibility and extensibility for testing + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + mMapDirectory.setPreload(getPreloadFunc(preLoadExtensions)); + return mMapDirectory; + } + + /** Gets a preload function based on the given preLoadExtensions. */ + static BiPredicate getPreloadFunc(Set preLoadExtensions) { if (preLoadExtensions.isEmpty() == false) { if (preLoadExtensions.contains("*")) { - mMapDirectory.setPreload(true); + return MMapDirectory.ALL_FILES; } else { - return new PreLoadMMapDirectory(mMapDirectory, lockFactory, preLoadExtensions); + return (name, context) -> preLoadExtensions.contains(FileSwitchDirectory.getExtension(name)); } } - return mMapDirectory; + return MMapDirectory.NO_FILES; } /** @@ -116,6 +123,8 @@ public IndexInput openInput(String name, IOContext context) throws IOException { // we need to do these checks on the outer directory since the inner doesn't know about pending deletes ensureOpen(); ensureCanRead(name); + // we switch the context here since mmap checks for the READONCE context by identity + context = context == Store.READONCE_CHECKSUM ? IOContext.READONCE : context; // we only use the mmap to open inputs. Everything else is managed by the NIOFSDirectory otherwise // we might run into trouble with files that are pendingDelete in one directory but still // listed in listAll() from the other. We on the other hand don't want to list files from both dirs @@ -162,50 +171,4 @@ MMapDirectory getDelegate() { return delegate; } } - - // TODO it would be nice to share code between PreLoadMMapDirectory and HybridDirectory but due to the nesting aspect of - // directories here makes it tricky. It would be nice to allow MMAPDirectory to pre-load on a per IndexInput basis. - static final class PreLoadMMapDirectory extends MMapDirectory { - private final MMapDirectory delegate; - private final Set preloadExtensions; - - PreLoadMMapDirectory(MMapDirectory delegate, LockFactory lockFactory, Set preload) throws IOException { - super(delegate.getDirectory(), lockFactory); - super.setPreload(false); - this.delegate = delegate; - this.delegate.setPreload(true); - this.preloadExtensions = preload; - assert getPreload() == false; - } - - @Override - public void setPreload(boolean preload) { - throw new IllegalArgumentException("can't set preload on a preload-wrapper"); - } - - @Override - public IndexInput openInput(String name, IOContext context) throws IOException { - if (useDelegate(name)) { - // we need to do these checks on the outer directory since the inner doesn't know about pending deletes - ensureOpen(); - ensureCanRead(name); - return delegate.openInput(name, context); - } - return super.openInput(name, context); - } - - @Override - public synchronized void close() throws IOException { - IOUtils.close(super::close, delegate); - } - - boolean useDelegate(String name) { - final String extension = FileSwitchDirectory.getExtension(name); - return preloadExtensions.contains(extension); - } - - MMapDirectory getDelegate() { - return delegate; - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java index fdc508098d82e..186aff230b8d0 100644 --- a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java +++ b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java @@ -57,6 +57,7 @@ public enum LuceneFilesExtensions { NVM("nvm", "Norms Metadata", true, false), PAY("pay", "Payloads", false, false), POS("pos", "Positions", false, false), + PSM("psm", "Postings Metadata", true, false), SI("si", "Segment Info", true, false), // Term dictionaries are typically performance-sensitive and hot in the page // cache, so we use mmap, which provides better performance. diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index a1038356735f0..c3d21b23d6a49 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -33,6 +33,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.Lock; import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; @@ -147,7 +148,15 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * Specific {@link IOContext} indicating that we will read only the Lucene file footer (containing the file checksum) * See {@link MetadataSnapshot#checksumFromLuceneFile}. */ - public static final IOContext READONCE_CHECKSUM = new IOContext(IOContext.READONCE, true); + public static final IOContext READONCE_CHECKSUM = createReadOnceContext(); + + // while equivalent, these different read once contexts are checked by identity in directory implementations + private static IOContext createReadOnceContext() { + var context = IOContext.READONCE.withReadAdvice(ReadAdvice.SEQUENTIAL); + assert context != IOContext.READONCE; + assert context.equals(IOContext.READONCE); + return context; + } private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; @@ -632,7 +641,7 @@ private static void failIfCorrupted(Directory directory) throws IOException { List ex = new ArrayList<>(); for (String file : files) { if (file.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { - try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(file)) { CodecUtil.checkHeader(input, CODEC, CORRUPTED_MARKER_CODEC_VERSION, CORRUPTED_MARKER_CODEC_VERSION); final int size = input.readVInt(); final byte[] buffer = new byte[size]; @@ -919,7 +928,10 @@ private static void checksumFromLuceneFile( boolean readFileAsHash, BytesRef writerUuid ) throws IOException { - try (IndexInput in = directory.openInput(file, READONCE_CHECKSUM)) { + // We select the read once context carefully here since these constants, while equivalent are + // checked by identity in the different directory implementations. + var context = file.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : READONCE_CHECKSUM; + try (IndexInput in = directory.openInput(file, context)) { final long length = in.length(); if (length < CodecUtil.footerLength()) { // If the file isn't long enough to contain the footer then verifying it triggers an IAE, but really it's corrupted diff --git a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java index 2be9d0f224e24..501c2496aacb6 100644 --- a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java +++ b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java @@ -195,7 +195,7 @@ public BytesRef hash() { * * This ID may be {@link StoreFileMetadata#UNAVAILABLE_WRITER_UUID} (i.e. zero-length) if unavailable, e.g.: * - * - The file was written by a version of Lucene prior to {@link org.apache.lucene.util.Version#LUCENE_8_6_0}. + * - The file was written by a version of Lucene prior to 8.6.0. * - The metadata came from a version of Elasticsearch prior to {@link StoreFileMetadata#WRITER_UUID_MIN_VERSION}). * - The file is not one of the files listed above. * diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 763abb41797b5..db84be817bbd7 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -96,7 +96,7 @@ static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequ /* or from an existing document */ else if (docIdAndVersion != null) { // fields with stored term vectors - termVectorsByField = docIdAndVersion.reader.getTermVectors(docIdAndVersion.docId); + termVectorsByField = docIdAndVersion.reader.termVectors().get(docIdAndVersion.docId); Set selectedFields = request.selectedFields(); // generate tvs for fields where analyzer is overridden if (selectedFields == null && request.perFieldAnalyzer() != null) { @@ -301,7 +301,7 @@ private static Fields generateTermVectors( } } /* and read vectors from it */ - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java index 29161814e7724..525da1670f900 100644 --- a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java @@ -94,7 +94,7 @@ static Automaton buildAutomaton(String pattern) { String output = pattern; output = output.replace(".", "\\."); output = output.replace("*", ".*"); - return new RegExp(output).toAutomaton(); + return new RegExp(output, RegExp.ALL | RegExp.ALL).toAutomaton(); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index abba6ec6ae684..9bca59e9e4d62 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -12,13 +12,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.LRUQueryCache; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.ShardCoreKeyMap; @@ -173,24 +171,12 @@ public int count(LeafReaderContext context) throws IOException { return in.count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.bulkScorer(context); - } - @Override public boolean isCacheable(LeafReaderContext ctx) { return in.isCacheable(ctx); diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index 08148de0591cb..f3456870114f5 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -41,6 +41,8 @@ import java.util.Objects; import java.util.Set; +import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; + /** * Uses a pattern string to define a protected space for indices belonging to a system feature, and, if needed, provides metadata for * managing indices that match the pattern. @@ -360,7 +362,7 @@ protected SystemIndexDescriptor( this.primaryIndex = primaryIndex; this.aliasName = aliasName; - final Automaton automaton = buildAutomaton(indexPattern, aliasName); + final Automaton automaton = Operations.determinize(buildAutomaton(indexPattern, aliasName), DEFAULT_DETERMINIZE_WORK_LIMIT); this.indexPatternAutomaton = new CharacterRunAutomaton(automaton); if (primaryIndex != null && indexPatternAutomaton.run(primaryIndex) == false) { throw new IllegalArgumentException("primary index does not match the index pattern!"); @@ -883,15 +885,15 @@ static Automaton buildAutomaton(String pattern, String alias) { final String patternAsRegex = patternToRegex(pattern); final String aliasAsRegex = alias == null ? null : patternToRegex(alias); - final Automaton patternAutomaton = new RegExp(patternAsRegex).toAutomaton(); + final Automaton patternAutomaton = new RegExp(patternAsRegex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); if (aliasAsRegex == null) { return patternAutomaton; } - final Automaton aliasAutomaton = new RegExp(aliasAsRegex).toAutomaton(); + final Automaton aliasAutomaton = new RegExp(aliasAsRegex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); - return Operations.union(patternAutomaton, aliasAutomaton); + return Operations.determinize(Operations.union(patternAutomaton, aliasAutomaton), DEFAULT_DETERMINIZE_WORK_LIMIT); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index a0a4388a4d54a..856b30d1c19e8 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -14,7 +14,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateResponse.ResetFeatureStateStatus; @@ -178,7 +177,7 @@ public SystemIndices(List pluginAndModuleFeatures) { this.netNewSystemIndexAutomaton = buildNetNewIndexCharacterRunAutomaton(featureDescriptors); this.productToSystemIndicesMatcher = getProductToSystemIndicesMap(featureDescriptors); this.executorSelector = new ExecutorSelector(this); - this.systemNameAutomaton = MinimizationOperations.minimize( + this.systemNameAutomaton = Operations.determinize( Operations.union(List.of(systemIndexAutomata, systemDataStreamIndicesAutomata, buildDataStreamAutomaton(featureDescriptors))), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ); @@ -264,9 +263,7 @@ private static Map getProductToSystemIndicesMap(M .collect( Collectors.toUnmodifiableMap( Entry::getKey, - entry -> new CharacterRunAutomaton( - MinimizationOperations.minimize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ) + entry -> new CharacterRunAutomaton(Operations.determinize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)) ) ); } @@ -426,7 +423,7 @@ private static Automaton buildIndexAutomaton(Map featureDescrip .stream() .map(SystemIndices::featureToIndexAutomaton) .reduce(Operations::union); - return MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map featureDescriptors) { @@ -437,9 +434,7 @@ private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map SystemIndexDescriptor.buildAutomaton(descriptor.getIndexPattern(), descriptor.getAliasName())) .reduce(Operations::union); - return new CharacterRunAutomaton( - MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ); + return new CharacterRunAutomaton(Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton featureToIndexAutomaton(Feature feature) { @@ -459,7 +454,7 @@ private static Automaton buildDataStreamAutomaton(Map featureDe .map(dsName -> SystemIndexDescriptor.buildAutomaton(dsName, null)) .reduce(Operations::union); - return automaton.isPresent() ? MinimizationOperations.minimize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; + return automaton.isPresent() ? Operations.determinize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; } private static Predicate buildDataStreamNamePredicate(Map featureDescriptors) { @@ -472,7 +467,7 @@ private static Automaton buildDataStreamBackingIndicesAutomaton(Map 1) { throw new IllegalStateException( "failed to extract doc:" + target + ", the grouping field must be single valued" ); } + ord = (int) sorted.nextOrd(); return true; } else { return false; diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java index ed07525c1dd7b..443963dd59dcd 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java @@ -170,10 +170,10 @@ public static TopFieldGroups merge(Sort sort, int start, int size, TopFieldGroup final TopFieldGroups shard = shardHits[shardIDX]; // totalHits can be non-zero even if no hits were // collected, when searchAfter was used: - totalHitCount += shard.totalHits.value; + totalHitCount += shard.totalHits.value(); // If any hit count is a lower bound then the merged // total hit count is a lower bound as well - if (shard.totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + if (shard.totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } if (CollectionUtils.isEmpty(shard.scoreDocs) == false) { diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java index dca4ff503c788..67ece200c06ee 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java @@ -18,7 +18,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -61,7 +61,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final BinaryDocValues values = context.reader().getBinaryDocValues(fieldName); if (values == null) { return null; @@ -106,7 +106,8 @@ public float matchCost() { return 4; // at most 4 comparisons } }; - return new ConstantScoreScorer(this, score(), scoreMode, iterator); + + return new DefaultScorerSupplier(new ConstantScoreScorer(score(), scoreMode, iterator)); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java index c75c6e2373f25..788bf76087d1f 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.IOSupplier; import org.apache.lucene.util.InPlaceMergeSorter; import java.io.IOException; @@ -188,7 +189,11 @@ private static TermStates adjustTTF(IndexReaderContext readerContext, TermStates int df = termContext.docFreq(); long ttf = sumTTF; for (int i = 0; i < len; i++) { - TermState termState = termContext.get(leaves.get(i)); + IOSupplier termStateSupplier = termContext.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } @@ -212,7 +217,11 @@ private static TermStates adjustDF(IndexReaderContext readerContext, TermStates } TermStates newCtx = new TermStates(readerContext); for (int i = 0; i < len; ++i) { - TermState termState = ctx.get(leaves.get(i)); + IOSupplier termStateSupplier = ctx.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java index 8e85c1d974382..13b0bf650a39e 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -76,15 +77,17 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo throw new IllegalStateException("Executing against a different reader than the query has been rewritten against"); } return new ConstantScoreWeight(this, boost) { + @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final int maxDoc = context.reader().maxDoc(); if (context.docBase + maxDoc <= minDoc) { return null; } final int segmentMinDoc = Math.max(0, minDoc - context.docBase); final DocIdSetIterator disi = new MinDocIterator(segmentMinDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java index 1a8ac203f0cb5..6575f7f416bd9 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; @@ -67,8 +68,8 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Sort segmentSort = context.reader().getMetaData().getSort(); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Sort segmentSort = context.reader().getMetaData().sort(); if (segmentSort == null || Lucene.canEarlyTerminate(sort, segmentSort) == false) { throw new IOException("search sort :[" + sort + "] does not match the index sort:[" + segmentSort + "]"); } @@ -80,7 +81,8 @@ public Scorer scorer(LeafReaderContext context) throws IOException { return null; } final DocIdSetIterator disi = new MinDocQuery.MinDocIterator(firstDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java b/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java index 7421579d643e4..d420e519a30e7 100644 --- a/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java +++ b/server/src/main/java/org/elasticsearch/lucene/similarity/LegacyBM25Similarity.java @@ -43,7 +43,7 @@ public final class LegacyBM25Similarity extends Similarity { *
*/ public LegacyBM25Similarity() { - this.bm25Similarity = new BM25Similarity(); + this(new BM25Similarity()); } /** @@ -54,7 +54,12 @@ public LegacyBM25Similarity() { * not within the range {@code [0..1]} */ public LegacyBM25Similarity(float k1, float b, boolean discountOverlaps) { - this.bm25Similarity = new BM25Similarity(k1, b, discountOverlaps); + this(new BM25Similarity(k1, b, discountOverlaps)); + } + + private LegacyBM25Similarity(BM25Similarity bm25Similarity) { + super(bm25Similarity.getDiscountOverlaps()); + this.bm25Similarity = bm25Similarity; } @Override @@ -81,13 +86,6 @@ public float getB() { return bm25Similarity.getB(); } - /** - * Returns true if overlap tokens are discounted from the document's length. - */ - public boolean getDiscountOverlaps() { - return bm25Similarity.getDiscountOverlaps(); - } - @Override public String toString() { return bm25Similarity.toString(); diff --git a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java index bd64ee88fc300..064f8ef3eacd8 100644 --- a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java @@ -109,14 +109,8 @@ private ConstantScoreWeight getStandardWeight(ScoreMode scoreMode, float boost) final Component2D component2D = create(geometries); return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -125,7 +119,7 @@ public Scorer get(long leadCost) throws IOException { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final GeometryDocValueReader reader = new GeometryDocValueReader(); final Component2DVisitor visitor = Component2DVisitor.getVisitor(component2D, relation, encoder); @@ -143,7 +137,7 @@ public float matchCost() { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override @@ -167,14 +161,8 @@ private ConstantScoreWeight getContainsWeight(ScoreMode scoreMode, float boost) } return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -183,7 +171,7 @@ public Scorer get(long leadCost) throws IOException { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final Component2DVisitor[] visitors = new Component2DVisitor[components2D.size()]; for (int i = 0; i < components2D.size(); i++) { @@ -210,7 +198,7 @@ public float matchCost() { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/util/CombinedBitSet.java b/server/src/main/java/org/elasticsearch/lucene/util/CombinedBitSet.java index 2a2c816a9ce54..be41959417f14 100644 --- a/server/src/main/java/org/elasticsearch/lucene/util/CombinedBitSet.java +++ b/server/src/main/java/org/elasticsearch/lucene/util/CombinedBitSet.java @@ -77,6 +77,19 @@ public int nextSetBit(int index) { return next; } + @Override + public int nextSetBit(int index, int upperBound) { + assert index >= 0 && index < length : "index=" + index + " numBits=" + length(); + int next = first.nextSetBit(index, upperBound); + while (next != DocIdSetIterator.NO_MORE_DOCS && second.get(next) == false) { + if (next == length() - 1) { + return DocIdSetIterator.NO_MORE_DOCS; + } + next = first.nextSetBit(next + 1, upperBound); + } + return next; + } + @Override public long ramBytesUsed() { return first.ramBytesUsed(); diff --git a/server/src/main/java/org/elasticsearch/lucene/util/MatchAllBitSet.java b/server/src/main/java/org/elasticsearch/lucene/util/MatchAllBitSet.java index e315dc046fe92..e46bb78bd7954 100644 --- a/server/src/main/java/org/elasticsearch/lucene/util/MatchAllBitSet.java +++ b/server/src/main/java/org/elasticsearch/lucene/util/MatchAllBitSet.java @@ -69,6 +69,12 @@ public int nextSetBit(int index) { return index; } + @Override + public int nextSetBit(int index, int upperBound) { + assert index < upperBound; + return index; + } + @Override public long ramBytesUsed() { return RAM_BYTES_USED; diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index b3c95186b6037..80ee1174db7b1 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -85,6 +85,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -408,8 +409,8 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr Constants.OS_ARCH, Constants.JVM_VENDOR, Constants.JVM_NAME, - Constants.JAVA_VERSION, - Constants.JVM_VERSION + System.getProperty("java.version"), + Runtime.version().toString() ); logger.info("JVM home [{}], using bundled JDK [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); logger.info("JVM arguments {}", Arrays.toString(jvmInfo.getInputArguments())); @@ -509,6 +510,7 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, for (final ExecutorBuilder builder : threadPool.builders()) { additionalSettings.addAll(builder.getRegisteredSettings()); } + addBwcSearchWorkerSettings(additionalSettings); SettingsExtension.load().forEach(e -> additionalSettings.addAll(e.getSettings())); // this is as early as we can validate settings at this point. we already pass them to ThreadPool @@ -539,6 +541,17 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, return settingsModule; } + @UpdateForV9 + private static void addBwcSearchWorkerSettings(List> additionalSettings) { + // TODO remove the below settings, they are unused and only here to enable BwC for deployments that still use them + additionalSettings.add( + Setting.intSetting("thread_pool.search_worker.queue_size", 0, Setting.Property.NodeScope, Setting.Property.DeprecatedWarning) + ); + additionalSettings.add( + Setting.intSetting("thread_pool.search_worker.size", 0, Setting.Property.NodeScope, Setting.Property.DeprecatedWarning) + ); + } + private SearchModule createSearchModule(Settings settings, ThreadPool threadPool, TelemetryProvider telemetryProvider) { IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList(), telemetryProvider); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index a53db3c5cc2de..5e45aec6a5240 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -4030,7 +4030,7 @@ protected void snapshotFile(SnapshotShardContext context, FileInfo fileInfo) thr final String file = fileInfo.physicalName(); try ( Releasable ignored = context.withCommitRef(); - IndexInput indexInput = store.openVerifyingInput(file, IOContext.READONCE, fileInfo.metadata()) + IndexInput indexInput = store.openVerifyingInput(file, IOContext.DEFAULT, fileInfo.metadata()) ) { for (int i = 0; i < fileInfo.numberOfParts(); i++) { final long partBytes = fileInfo.partBytes(i); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java index a457ebb67fd47..09f31abb58eb3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java @@ -72,7 +72,7 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli return channel -> client.search(countRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(SearchResponse countResponse) throws Exception { - assert countResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert countResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; return RestTable.buildResponse(buildTable(request, countResponse), channel); } }); @@ -90,7 +90,7 @@ protected Table getTableWithHeader(final RestRequest request) { private Table buildTable(RestRequest request, SearchResponse response) { Table table = getTableWithHeader(request); table.startRow(); - table.addCell(response.getHits().getTotalHits().value); + table.addCell(response.getHits().getTotalHits().value()); table.endRow(); return table; diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index 5556052e79ab8..49e192060a9c7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -94,7 +94,7 @@ public RestResponse buildResponse(SearchResponse response, XContentBuilder build if (terminateAfter != DEFAULT_TERMINATE_AFTER) { builder.field("terminated_early", response.isTerminatedEarly()); } - builder.field("count", response.getHits().getTotalHits().value); + builder.field("count", response.getHits().getTotalHits().value()); buildBroadcastShardsHeader( builder, request, diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 28330c7c45479..38157efd8a370 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -95,6 +95,11 @@ public List routes() { ); } + @Override + public Set supportedCapabilities() { + return SearchCapabilities.CAPABILITIES; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java new file mode 100644 index 0000000000000..45fd6afe4fca6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.rest.action.search; + +import java.util.Set; + +/** + * A {@link Set} of "capabilities" supported by the {@link RestSearchAction}. + */ +public final class SearchCapabilities { + + private SearchCapabilities() {} + + /** Support regex and range match rules in interval queries. */ + private static final String RANGE_REGEX_INTERVAL_QUERY_CAPABILITY = "range_regexp_interval_queries"; + + public static final Set CAPABILITIES = Set.of(RANGE_REGEX_INTERVAL_QUERY_CAPABILITY); +} diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index c8129717b5ccd..6c7d36ee9a436 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -116,6 +116,11 @@ public void setDocument(int docid) { this.docId = docid; } + /** Get the current document. */ + public int docId() { + return docId; + } + public void setScorer(Scorable scorer) { this.scoreSupplier = () -> { try { diff --git a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java index 9dde32cc75e6a..5e6cdb4f2c0ba 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.TermStates; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.util.IOSupplier; import org.elasticsearch.common.util.CachedSupplier; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.search.internal.ContextIndexSearcher; @@ -215,7 +216,12 @@ private PostingsEnum[] loadPostings() { continue; } - TermState state = termStates.get(leafReaderContext); + IOSupplier stateSupplier = termStates.get(leafReaderContext); + if (stateSupplier == null) { + postings[i] = null; + continue; + } + TermState state = stateSupplier.get(); if (state == null) { postings[i] = null; continue; diff --git a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java index c80a2e7200ecc..d83530e82b16d 100644 --- a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java +++ b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java @@ -46,9 +46,8 @@ public void setDocument(int docID) { public void execute() { try { if (hasValue) { - long ord; - while ((ord = sortedSetDocValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { - BytesRef bytesRef = sortedSetDocValues.lookupOrd(ord); + for (int i = 0; i < sortedSetDocValues.docValueCount(); i++) { + BytesRef bytesRef = sortedSetDocValues.lookupOrd(sortedSetDocValues.nextOrd()); emit(bytesRef.utf8ToString()); } } diff --git a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java index 6297fbaa23187..d9550dd17a058 100644 --- a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java @@ -157,7 +157,8 @@ public SortedSetIpSupplier(SortedSetDocValues in) { public void setNextDocId(int docId) throws IOException { count = 0; if (in.advanceExact(docId)) { - for (long ord = in.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = in.nextOrd()) { + for (int i = 0; i < in.docValueCount(); i++) { + long ord = in.nextOrd(); ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java index fd7c5227e22ac..be1b972dcd41a 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.index.mapper.vectors.DenseVectorScriptDocValues; @@ -19,7 +20,8 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class ByteKnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected ByteVectorValues input; // null if no vectors + protected final ByteVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; // null if no vectors protected byte[] vector; protected final int dims; @@ -31,6 +33,7 @@ protected ByteKnnDenseVectorDocValuesField(@Nullable ByteVectorValues input, Str super(name, elementType); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -38,15 +41,15 @@ public void setNextDocId(int docId) throws IOException { if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java index c7678b03dd8c5..3e38092200511 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenormalizedCosineFloatVectorValues; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; @@ -20,7 +21,8 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected FloatVectorValues input; // null if no vectors + protected final FloatVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; protected float[] vector; protected final int dims; @@ -28,6 +30,7 @@ public KnnDenseVectorDocValuesField(@Nullable FloatVectorValues input, String na super(name, ElementType.FLOAT); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -35,15 +38,15 @@ public void setNextDocId(int docId) throws IOException { if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index de4cde5393c69..1521b17a81766 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -180,7 +180,14 @@ final class DefaultSearchContext extends SearchContext { this.indexShard = readerContext.indexShard(); Engine.Searcher engineSearcher = readerContext.acquireSearcher("search"); - if (executor == null) { + int maximumNumberOfSlices = determineMaximumNumberOfSlices( + executor, + request, + resultsType, + enableQueryPhaseParallelCollection, + field -> getFieldCardinality(field, readerContext.indexService(), engineSearcher.getDirectoryReader()) + ); + if (executor == null || maximumNumberOfSlices <= 1) { this.searcher = new ContextIndexSearcher( engineSearcher.getIndexReader(), engineSearcher.getSimilarity(), @@ -196,13 +203,7 @@ final class DefaultSearchContext extends SearchContext { engineSearcher.getQueryCachingPolicy(), lowLevelCancellation, executor, - determineMaximumNumberOfSlices( - executor, - request, - resultsType, - enableQueryPhaseParallelCollection, - field -> getFieldCardinality(field, readerContext.indexService(), engineSearcher.getDirectoryReader()) - ), + maximumNumberOfSlices, minimumDocsPerSlice ); } @@ -290,6 +291,7 @@ static int determineMaximumNumberOfSlices( ToLongFunction fieldCardinality ) { return executor instanceof ThreadPoolExecutor tpe + && tpe.getQueue().isEmpty() && isParallelCollectionSupportedForResults(resultsType, request.source(), fieldCardinality, enableQueryPhaseParallelCollection) ? tpe.getMaximumPoolSize() : 1; diff --git a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java index 49480816bbbb1..5ac25fe0ff695 100644 --- a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -477,11 +477,11 @@ protected BytesRef pick( @Override protected int pick(SortedSetDocValues values) throws IOException { - long maxOrd = -1; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { - maxOrd = ord; + int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextOrd(); } - return Math.toIntExact(maxOrd); + return Math.toIntExact(values.nextOrd()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 6a89d66bb3411..beac39c2de304 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -16,8 +16,11 @@ import java.util.Set; public final class SearchFeatures implements FeatureSpecification { + + public static final NodeFeature LUCENE_10_0_0_UPGRADE = new NodeFeature("lucene_10_upgrade"); + @Override public Set getFeatures() { - return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED); + return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED, LUCENE_10_0_0_UPGRADE); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index 8ff5de3c9b8ac..896dd7f999949 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -288,12 +288,12 @@ public Iterator toXContentChunked(ToXContent.Params params return Iterators.concat(Iterators.single((b, p) -> b.startObject(Fields.HITS)), Iterators.single((b, p) -> { boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { - long total = totalHits == null ? -1 : totalHits.value; + long total = totalHits == null ? -1 : totalHits.value(); b.field(Fields.TOTAL, total); } else if (totalHits != null) { b.startObject(Fields.TOTAL); - b.field("value", totalHits.value); - b.field("relation", totalHits.relation == Relation.EQUAL_TO ? "eq" : "gte"); + b.field("value", totalHits.value()); + b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); b.endObject(); } return b; diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 4afcc57b7b15a..6308b19358410 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -1263,6 +1263,16 @@ public static List getIntervalsSourceProviderNamed IntervalsSourceProvider.class, IntervalsSourceProvider.Fuzzy.NAME, IntervalsSourceProvider.Fuzzy::new + ), + new NamedWriteableRegistry.Entry( + IntervalsSourceProvider.class, + IntervalsSourceProvider.Regexp.NAME, + IntervalsSourceProvider.Regexp::new + ), + new NamedWriteableRegistry.Entry( + IntervalsSourceProvider.class, + IntervalsSourceProvider.Range.NAME, + IntervalsSourceProvider.Range::new ) ); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index a0b91261236b0..70101bbc7fc54 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -142,7 +142,6 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -228,7 +227,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv "search.worker_threads_enabled", true, Property.NodeScope, - Property.Dynamic + Property.Dynamic, + Property.DeprecatedWarning ); public static final Setting QUERY_PHASE_PARALLEL_COLLECTION_ENABLED = Setting.boolSetting( @@ -279,7 +279,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private final FetchPhase fetchPhase; private final RankFeatureShardPhase rankFeatureShardPhase; - private volatile boolean enableSearchWorkerThreads; + private volatile Executor searchExecutor; private volatile boolean enableQueryPhaseParallelCollection; private volatile long defaultKeepAlive; @@ -373,7 +373,10 @@ public SearchService( clusterService.getClusterSettings() .addSettingsUpdateConsumer(ENABLE_REWRITE_AGGS_TO_FILTER_BY_FILTER, this::setEnableRewriteAggsToFilterByFilter); - enableSearchWorkerThreads = SEARCH_WORKER_THREADS_ENABLED.get(settings); + if (SEARCH_WORKER_THREADS_ENABLED.get(settings)) { + searchExecutor = threadPool.executor(Names.SEARCH); + } + clusterService.getClusterSettings().addSettingsUpdateConsumer(SEARCH_WORKER_THREADS_ENABLED, this::setEnableSearchWorkerThreads); enableQueryPhaseParallelCollection = QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.get(settings); @@ -382,7 +385,11 @@ public SearchService( } private void setEnableSearchWorkerThreads(boolean enableSearchWorkerThreads) { - this.enableSearchWorkerThreads = enableSearchWorkerThreads; + if (enableSearchWorkerThreads) { + searchExecutor = threadPool.executor(Names.SEARCH); + } else { + searchExecutor = null; + } } private void setEnableQueryPhaseParallelCollection(boolean enableQueryPhaseParallelCollection) { @@ -1111,7 +1118,6 @@ private DefaultSearchContext createSearchContext( reader.indexShard().shardId(), request.getClusterAlias() ); - ExecutorService executor = this.enableSearchWorkerThreads ? threadPool.executor(Names.SEARCH_WORKER) : null; searchContext = new DefaultSearchContext( reader, request, @@ -1120,7 +1126,7 @@ private DefaultSearchContext createSearchContext( timeout, fetchPhase, lowLevelCancellation, - executor, + searchExecutor, resultsType, enableQueryPhaseParallelCollection, minimumDocsPerSlice diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java index f66f6b4a3805d..624db3f1cfe8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreCachingWrappingScorer; import org.apache.lucene.search.ScoreMode; import java.io.IOException; @@ -201,6 +200,7 @@ private static class MultiLeafBucketCollector extends LeafBucketCollector { private final boolean cacheScores; private final LeafBucketCollector[] collectors; private int numCollectors; + private ScoreCachingScorable scorable; private MultiLeafBucketCollector(List collectors, boolean cacheScores) { this.collectors = collectors.toArray(new LeafBucketCollector[collectors.size()]); @@ -211,11 +211,11 @@ private MultiLeafBucketCollector(List collectors, boolean c @Override public void setScorer(Scorable scorer) throws IOException { if (cacheScores) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); + scorable = new ScoreCachingScorable(scorer); } for (int i = 0; i < numCollectors; ++i) { final LeafCollector c = collectors[i]; - c.setScorer(scorer); + c.setScorer(cacheScores ? scorable : scorer); } } @@ -227,6 +227,9 @@ private void removeCollector(int i) { @Override public void collect(int doc, long bucket) throws IOException { + if (scorable != null) { + scorable.curDoc = doc; + } final LeafBucketCollector[] collectors = this.collectors; int numCollectors = this.numCollectors; for (int i = 0; i < numCollectors;) { @@ -244,4 +247,25 @@ public void collect(int doc, long bucket) throws IOException { } } } + + private static class ScoreCachingScorable extends Scorable { + + private final Scorable in; + private int curDoc = -1; // current document + private int scoreDoc = -1; // document that score was computed on + private float score; + + ScoreCachingScorable(Scorable in) { + this.in = in; + } + + @Override + public float score() throws IOException { + if (curDoc != scoreDoc) { + score = in.score(); + scoreDoc = curDoc; + } + return score; + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 2e9e04eca4afc..9ee15306ce636 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -278,7 +278,7 @@ private static boolean isMaybeMultivalued(LeafReaderContext context, SortField s * optimization and null if index sort is not applicable. */ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException { - Sort indexSort = context.reader().getMetaData().getSort(); + Sort indexSort = context.reader().getMetaData().sort(); if (indexSort == null) { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java index 0d0d2c6f922e8..dcc2ad52cbc50 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java @@ -41,8 +41,6 @@ import java.util.List; import java.util.function.BiConsumer; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - /** * A {@link SingleDimensionValuesSource} for global ordinals. */ @@ -247,9 +245,8 @@ public DocIdSetIterator competitiveIterator() { @Override public void collect(int doc, long bucket) throws IOException { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { - currentValue = ord; + for (int i = 0; i < dvs.docValueCount(); i++) { + currentValue = dvs.nextOrd(); next.collect(doc, bucket); } } else if (missingBucket) { @@ -306,8 +303,8 @@ public void collect(int doc, long bucket) throws IOException { public void collect(int doc, long bucket) throws IOException { if (currentValueIsSet == false) { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { + for (int i = 0; i < dvs.docValueCount(); i++) { + long ord = dvs.nextOrd(); if (term.equals(lookup.lookupOrd(ord))) { currentValueIsSet = true; currentValue = ord; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index f774f67b3df8f..af4d60bf424a7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -39,7 +39,6 @@ import java.util.function.Supplier; import static java.util.Collections.emptyList; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; class CountedTermsAggregator extends TermsAggregator { @@ -77,7 +76,8 @@ private LeafBucketCollector getLeafCollector(SortedSetDocValues ords, LeafBucket @Override public void collect(int doc, long owningBucketOrd) throws IOException { if (ords.advanceExact(doc)) { - for (long ord = ords.nextOrd(); ord != NO_MORE_ORDS; ord = ords.nextOrd()) { + for (int i = 0; i < ords.docValueCount(); i++) { + long ord = ords.nextOrd(); collectOrdinal(bucketOrds.add(owningBucketOrd, ords.lookupOrd(ord)), doc, sub); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java index 2969b7bf82c80..7dd192b317a57 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; @@ -21,7 +20,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -117,15 +115,6 @@ public int count(LeafReaderContext context) throws IOException { return multiValuedSegmentWeight().count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { /* @@ -144,19 +133,6 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti return multiValuedSegmentWeight().scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - PointValues points = context.reader().getPointValues(field); - if (points == null) { - return null; - } - if (points.size() == points.getDocCount()) { - // Each doc that has points has exactly one point. - return singleValuedSegmentWeight().bulkScorer(context); - } - return multiValuedSegmentWeight().bulkScorer(context); - } - private Weight singleValuedSegmentWeight() throws IOException { if (singleValuedSegmentWeight == null) { singleValuedSegmentWeight = delegateForSingleValuedSegments.createWeight(searcher, scoreMode, boost); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java index 282c09c84414c..e8e33655d47c1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; @@ -215,7 +216,7 @@ long count(LeafReaderContext ctx, FiltersAggregator.Counter counter, Bits live) // No hits in this segment. return 0; } - scorer.score(counter, live); + scorer.score(counter, live, 0, DocIdSetIterator.NO_MORE_DOCS); return counter.readAndReset(ctx); } @@ -228,7 +229,7 @@ void collect(LeafReaderContext ctx, LeafCollector collector, Bits live) throws I // No hits in this segment. return; } - scorer.score(collector, live); + scorer.score(collector, live, 0, DocIdSetIterator.NO_MORE_DOCS); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index 4c87b5961ac1a..b5d3485e72f82 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.bucket.global; import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorable; @@ -45,6 +46,7 @@ public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, return LeafBucketCollector.NO_OP_COLLECTOR; } grow(1); + scorer.score(new LeafCollector() { @Override public void collect(int doc) throws IOException { @@ -55,7 +57,7 @@ public void collect(int doc) throws IOException { public void setScorer(Scorable scorer) throws IOException { sub.setScorer(scorer); } - }, aggCtx.getLeafReaderContext().reader().getLiveDocs()); + }, aggCtx.getLeafReaderContext().reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 12182a5931a4f..0fbb9745aa400 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -197,7 +197,6 @@ void processBufferedChildBuckets() throws IOException { } for (; childDocId < currentParentDoc; childDocId = childDocs.nextDoc()) { - cachedScorer.doc = childDocId; for (var bucket : bucketBuffer) { collectBucket(sub, childDocId, bucket); } @@ -207,19 +206,12 @@ void processBufferedChildBuckets() throws IOException { } private static class CachedScorable extends Scorable { - int doc; float score; @Override public final float score() { return score; } - - @Override - public int docID() { - return doc; - } - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 2f18d2dc1e42e..6119af3cb6a57 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -157,7 +157,8 @@ abstract static class SortedSetRangeLeafCollector extends LeafBucketCollectorBas this.collector = (doc, bucket) -> { if (values.advanceExact(doc)) { int lo = 0; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); lo = collect(doc, ord, bucket, lo); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 0cfad5ba9e0c7..37cee75c11b48 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.util.BigArrays; @@ -99,7 +99,7 @@ public void collect(int doc, long bucket) throws IOException { // Designed to be overridden by subclasses that may score docs by criteria // other than Lucene score protected TopDocsCollector createTopDocsCollector(int size) throws IOException { - return TopScoreDocCollector.create(size, Integer.MAX_VALUE); + return new TopScoreDocCollectorManager(size, null, Integer.MAX_VALUE, false).newCollector(); } // Can be overridden by subclasses that have a different priority queue implementation @@ -214,7 +214,6 @@ class PerSegmentCollects extends Scorable { private final AggregationExecutionContext aggCtx; int maxDocId = Integer.MIN_VALUE; private float currentScore; - private int currentDocId = -1; private Scorable currentScorer; PerSegmentCollects(AggregationExecutionContext aggCtx) throws IOException { @@ -249,7 +248,6 @@ public void replayRelatedMatches(List sd) throws IOException { leafCollector.setScorer(this); currentScore = 0; - currentDocId = -1; if (maxDocId < 0) { return; } @@ -259,7 +257,6 @@ public void replayRelatedMatches(List sd) throws IOException { int rebased = scoreDoc.doc - aggCtx.getLeafReaderContext().docBase; if ((rebased >= 0) && (rebased <= maxDocId)) { currentScore = scoreDoc.score; - currentDocId = rebased; // We stored the bucket ID in Lucene's shardIndex property // for convenience. leafCollector.collect(rebased, scoreDoc.shardIndex); @@ -276,11 +273,6 @@ public float score() throws IOException { return currentScore; } - @Override - public int docID() { - return currentDocId; - } - public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index ef4101892a461..539b9440cea25 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -118,12 +118,12 @@ public long longValue() throws IOException { @Override public boolean advanceExact(int target) throws IOException { if (globalOrds.advanceExact(target)) { - value = globalOrds.nextOrd(); // Check there isn't a second value for this // document - if (globalOrds.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + if (globalOrds.docValueCount() > 1) { throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } + value = globalOrds.nextOrd(); return true; } else { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java index ed39f41d9daed..89fe1a53a01cc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -76,15 +77,15 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SplittableRandom random = new SplittableRandom(BitMixer.mix(hash ^ seed)); int maxDoc = context.reader().maxDoc(); - return new ConstantScoreScorer( - this, + Scorer scorer = new ConstantScoreScorer( boost, ScoreMode.COMPLETE_NO_SCORES, new RandomSamplingIterator(maxDoc, p, random::nextInt) ); + return new DefaultScorerSupplier(scorer); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 1b0ec8e356082..0f7c61dc9f25b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -53,7 +53,6 @@ import java.util.function.LongPredicate; import java.util.function.LongUnaryOperator; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; /** @@ -167,7 +166,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } } @@ -179,7 +179,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (false == acceptedGlobalOrdinals.test(globalOrd)) { continue; } @@ -350,7 +351,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == segmentOrds.advanceExact(doc)) { return; } - for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_ORDS; segmentOrd = segmentOrds.nextOrd()) { + for (int i = 0; i < segmentOrds.docValueCount(); i++) { + long segmentOrd = segmentOrds.nextOrd(); int docCount = docCountProvider.getDocCount(doc); segmentDocCounts.increment(segmentOrd + 1, docCount); } @@ -524,7 +526,8 @@ private void forEachExcludeDeletedDocs(BucketInfoConsumer consumer) throws IOExc if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } @@ -634,7 +637,8 @@ void forEachExcludeDeletedDocs(long owningBucketOrd, BucketInfoConsumer consumer if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java index 4d78df2704740..4bcbe08ed227c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -358,8 +358,8 @@ public IncludeExclude( if (exclude != null && excludeValues != null) { throw new IllegalArgumentException(); } - this.include = include == null ? null : new RegExp(include); - this.exclude = exclude == null ? null : new RegExp(exclude); + this.include = include == null ? null : new RegExp(include, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); + this.exclude = exclude == null ? null : new RegExp(exclude, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); this.includeValues = includeValues; this.excludeValues = excludeValues; this.incZeroBasedPartition = 0; @@ -529,7 +529,7 @@ private Automaton toAutomaton() { if (exclude != null) { a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } - return a; + return Operations.determinize(a, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public StringFilter convertToStringFilter(DocValueFormat format) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java index 84cd869517702..05aa80f06448d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java @@ -308,7 +308,8 @@ public void collect(int doc, long bucketOrd) throws IOException { bits = new BitArray(maxOrd, bigArrays); visitedOrds.set(bucketOrd, bits); } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java index 9214c4710db84..b7f98d9a8af6e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java @@ -254,8 +254,8 @@ public CompetitiveIterator competitiveIterator() { @Override public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues - .nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); if (bits.getAndSet(ord) == false) { competitiveIterator.onVisitedOrdinal(ord); } @@ -304,7 +304,8 @@ public void collect(int doc, long bucketOrd) throws IOException { public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { final BitArray bits = getNewOrExistingBitArray(bucketOrd); - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index f7f319618fa36..8ff381cbbc84d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -135,7 +135,7 @@ public InternalAggregation get() { maxScore = reduceAndFindMaxScore(aggregations, shardDocs); reducedTopDocs = TopDocs.merge(from, size, shardDocs); } - assert reducedTopDocs.totalHits.relation == Relation.EQUAL_TO; + assert reducedTopDocs.totalHits.relation() == Relation.EQUAL_TO; return new InternalTopHits( getName(), @@ -262,8 +262,8 @@ public boolean equals(Object obj) { InternalTopHits other = (InternalTopHits) obj; if (from != other.from) return false; if (size != other.size) return false; - if (topDocs.topDocs.totalHits.value != other.topDocs.topDocs.totalHits.value) return false; - if (topDocs.topDocs.totalHits.relation != other.topDocs.topDocs.totalHits.relation) return false; + if (topDocs.topDocs.totalHits.value() != other.topDocs.topDocs.totalHits.value()) return false; + if (topDocs.topDocs.totalHits.relation() != other.topDocs.topDocs.totalHits.relation()) return false; if (topDocs.topDocs.scoreDocs.length != other.topDocs.topDocs.scoreDocs.length) return false; for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc thisDoc = topDocs.topDocs.scoreDocs[d]; @@ -287,8 +287,8 @@ public int hashCode() { int hashCode = super.hashCode(); hashCode = 31 * hashCode + Integer.hashCode(from); hashCode = 31 * hashCode + Integer.hashCode(size); - hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value); - hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation.hashCode(); + hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value()); + hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation().hashCode(); for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc doc = topDocs.topDocs.scoreDocs[d]; hashCode = 31 * hashCode + doc.doc; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java index d59d824bde435..90d6c298fbd23 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java @@ -51,7 +51,7 @@ public static boolean hasValue(InternalTDigestPercentiles agg) { } public static boolean hasValue(InternalTopHits agg) { - return (agg.getHits().getTotalHits().value == 0 + return (agg.getHits().getTotalHits().value() == 0 && Double.isNaN(agg.getHits().getMaxScore()) && Double.isNaN(agg.getTopDocs().maxScore)) == false; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 9d8d98bc7c7cc..87d8f839dfca1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -19,8 +19,10 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.lucene.Lucene; @@ -136,12 +138,14 @@ public void collect(int docId, long bucket) throws IOException { // but here we create collectors ourselves and we need prevent OOM because of crazy an offset and size. topN = Math.min(topN, subSearchContext.searcher().getIndexReader().maxDoc()); if (sort == null) { - collectors = new Collectors(TopScoreDocCollector.create(topN, Integer.MAX_VALUE), null); + TopScoreDocCollector topScoreDocCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false) + .newCollector(); + collectors = new Collectors(topScoreDocCollector, null); } else { // TODO: can we pass trackTotalHits=subSearchContext.trackTotalHits(){ // Note that this would require to catch CollectionTerminatedException collectors = new Collectors( - TopFieldCollector.create(sort.sort, topN, Integer.MAX_VALUE), + new TopFieldCollectorManager(sort.sort, topN, null, Integer.MAX_VALUE, false).newCollector(), subSearchContext.trackScores() ? new MaxScoreCollector() : null ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java index 4724bd0db05df..9b47507628dd1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java @@ -271,18 +271,17 @@ public long nextOrd() throws IOException { if (hasOrds) { return values.nextOrd(); } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -321,7 +320,11 @@ public BytesRef lookupOrd(long ord) throws IOException { @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -339,12 +342,7 @@ public long nextOrd() throws IOException { return ord + 1; } } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index ac8cc5c8232eb..472619da78622 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -22,7 +22,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; -import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.lucene.search.function.MinScoreScorer; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; @@ -38,9 +37,6 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.FutureTask; -import java.util.concurrent.RunnableFuture; import java.util.function.IntSupplier; import static org.elasticsearch.index.IndexSortConfig.TIME_SERIES_SORT; @@ -68,10 +64,7 @@ public TimeSeriesIndexSearcher(IndexSearcher searcher, List cancellati searcher.getSimilarity(), searcher.getQueryCache(), searcher.getQueryCachingPolicy(), - false, - searcher.getExecutor(), - 1, - -1 + false ); } catch (IOException e) { // IOException from wrapping the index searcher which should never happen. @@ -94,28 +87,8 @@ public void setMinimumScore(Float minimumScore) { public void search(Query query, BucketCollector bucketCollector) throws IOException { query = searcher.rewrite(query); Weight weight = searcher.createWeight(query, bucketCollector.scoreMode(), 1); - if (searcher.getExecutor() == null) { - search(bucketCollector, weight); - bucketCollector.postCollection(); - return; - } - // offload to the search worker thread pool whenever possible. It will be null only when search.worker_threads_enabled is false - RunnableFuture task = new FutureTask<>(() -> { - search(bucketCollector, weight); - bucketCollector.postCollection(); - return null; - }); - searcher.getExecutor().execute(task); - try { - task.get(); - } catch (InterruptedException e) { - throw new ThreadInterruptedException(e); - } catch (ExecutionException e) { - if (e.getCause() instanceof RuntimeException runtimeException) { - throw runtimeException; - } - throw new RuntimeException(e.getCause()); - } + search(bucketCollector, weight); + bucketCollector.postCollection(); } private void search(BucketCollector bucketCollector, Weight weight) throws IOException { @@ -131,7 +104,7 @@ private void search(BucketCollector bucketCollector, Weight weight) throws IOExc Scorer scorer = weight.scorer(leaf); if (scorer != null) { if (minimumScore != null) { - scorer = new MinScoreScorer(weight, scorer, minimumScore); + scorer = new MinScoreScorer(scorer, minimumScore); } LeafWalker leafWalker = new LeafWalker(leaf, scorer, bucketCollector, () -> tsidOrd[0]); if (leafWalker.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java index 42b29fda3c472..769effdd60240 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java @@ -76,7 +76,7 @@ private static CharacterRunAutomaton nestedChildrenAutomaton(List nested for (String child : nestedChildren) { automata.add(Operations.concatenate(Automata.makeString(child + "."), Automata.makeAnyString())); } - return new CharacterRunAutomaton(Operations.union(automata)); + return new CharacterRunAutomaton(Operations.determinize(Operations.union(automata), AUTOMATON_MAX_DETERMINIZED_STATES)); } // Builds an automaton that will match any field that conforms to one of the input patterns @@ -84,7 +84,11 @@ private static CharacterRunAutomaton buildUnmappedFieldPatternAutomaton(List subInfos = fragInfo.getSubInfos(); CollectionUtil.introSort(subInfos, (o1, o2) -> { - int startOffset = o1.getTermsOffsets().get(0).getStartOffset(); - int startOffset2 = o2.getTermsOffsets().get(0).getStartOffset(); + int startOffset = o1.termsOffsets().get(0).getStartOffset(); + int startOffset2 = o2.termsOffsets().get(0).getStartOffset(); return Integer.compare(startOffset, startOffset2); }); return new WeightedFragInfo( - Math.min(fragInfo.getSubInfos().get(0).getTermsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), + Math.min(fragInfo.getSubInfos().get(0).termsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), fragInfo.getEndOffset(), subInfos, fragInfo.getTotalBoost() diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 208ca613a350b..b9577ee85d043 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -56,6 +56,7 @@ import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.Executor; +import java.util.stream.Collectors; /** * Context-aware extension of {@link IndexSearcher}. @@ -76,6 +77,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { private QueryProfiler profiler; private final MutableQueryTimeout cancellable; + private final boolean hasExecutor; private final int maximumNumberOfSlices; // don't create slices with less than this number of docs private final int minimumDocsPerSlice; @@ -133,6 +135,7 @@ public ContextIndexSearcher( int minimumDocsPerSlice ) throws IOException { super(wrapWithExitableDirectoryReader ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader, executor); + this.hasExecutor = executor != null; setSimilarity(similarity); setQueryCache(queryCache); setQueryCachingPolicy(queryCachingPolicy); @@ -141,6 +144,15 @@ public ContextIndexSearcher( this.maximumNumberOfSlices = maximumNumberOfSlices; } + /** + * Whether an executor was provided at construction time or not. This indicates whether operations that support concurrency + * may be executed concurrently. It is not straightforward to deduct this from {@link #getTaskExecutor()} because {@link IndexSearcher} + * creates a {@link org.apache.lucene.search.TaskExecutor} anyways. + */ + public boolean hasExecutor() { + return hasExecutor; + } + @Override protected LeafSlice[] slices(List leaves) { // we offload to the executor unconditionally, including requests that don't support concurrency @@ -149,11 +161,6 @@ protected LeafSlice[] slices(List leaves) { return leafSlices; } - // package private for testing - int getMinimumDocsPerSlice() { - return minimumDocsPerSlice; - } - public void setProfiler(QueryProfiler profiler) { this.profiler = profiler; } @@ -243,7 +250,14 @@ public static LeafSlice[] computeSlices(List leaves, int maxS throw new IllegalArgumentException("maxSliceNum must be >= 1 (got " + maxSliceNum + ")"); } if (maxSliceNum == 1) { - return new LeafSlice[] { new LeafSlice(new ArrayList<>(leaves)) }; + return new LeafSlice[] { + new LeafSlice( + new ArrayList<>( + leaves.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ) + ) }; } // total number of documents to be searched final int numDocs = leaves.stream().mapToInt(l -> l.reader().maxDoc()).sum(); @@ -291,7 +305,11 @@ private static LeafSlice[] computeSlices(List leaves, int min for (List currentLeaf : queue) { // LeafSlice ctor reorders leaves so that leaves within a slice preserve the order they had within the IndexReader. // This is important given how Elasticsearch sorts leaves by descending @timestamp to get better query performance. - slices[upto++] = new LeafSlice(currentLeaf); + slices[upto++] = new LeafSlice( + currentLeaf.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ); } return slices; @@ -322,12 +340,9 @@ public T search(Query query, CollectorManager col } /** - * Similar to the lucene implementation, with the following changes made: - * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search worker threads - * so it can be parallelized. Also, it needs to happen in the same thread where doc_values are read, as it consumes them and Lucene - * does not allow consuming them from a different thread. - * 2) handles the ES TimeExceededException - * */ + * Same implementation as the default one in Lucene, with an additional call to postCollection in cased there are no segments. + * The rest is a plain copy from Lucene. + */ private T search(Weight weight, CollectorManager collectorManager, C firstCollector) throws IOException { LeafSlice[] leafSlices = getSlices(); if (leafSlices.length == 0) { @@ -347,10 +362,10 @@ private T search(Weight weight, CollectorManager } final List> listTasks = new ArrayList<>(leafSlices.length); for (int i = 0; i < leafSlices.length; ++i) { - final LeafReaderContext[] leaves = leafSlices[i].leaves; + final LeafReaderContextPartition[] leaves = leafSlices[i].partitions; final C collector = collectors.get(i); listTasks.add(() -> { - search(Arrays.asList(leaves), weight, collector); + search(leaves, weight, collector); return collector; }); } @@ -359,14 +374,18 @@ private T search(Weight weight, CollectorManager } } + /** + * Similar to the lucene implementation, with the following changes made: + * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search threads + * so it can be parallelized. Also, it needs to happen in the same thread where doc_values are read, as it consumes them and Lucene + * does not allow consuming them from a different thread. + * 2) handles the ES TimeExceededException + */ @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { - collector.setWeight(weight); + public void search(LeafReaderContextPartition[] leaves, Weight weight, Collector collector) throws IOException { boolean success = false; try { - for (LeafReaderContext ctx : leaves) { // search each subreader - searchLeaf(ctx, weight, collector); - } + super.search(leaves, weight, collector); success = true; } catch (@SuppressWarnings("unused") TimeExceededException e) { timeExceeded = true; @@ -410,13 +429,8 @@ private static class TimeExceededException extends RuntimeException { // This exception should never be re-thrown, but we fill in the stacktrace to be able to trace where it does not get properly caught } - /** - * Lower-level search API. - * - * {@link LeafCollector#collect(int)} is called for every matching document in - * the provided ctx. - */ - private void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collector) throws IOException { + @Override + protected void searchLeaf(LeafReaderContext ctx, int minDocId, int maxDocId, Weight weight, Collector collector) throws IOException { cancellable.checkCancelled(); final LeafCollector leafCollector; try { @@ -436,7 +450,7 @@ private void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collecto bulkScorer = new CancellableBulkScorer(bulkScorer, cancellable::checkCancelled); } try { - bulkScorer.score(leafCollector, liveDocs); + bulkScorer.score(leafCollector, liveDocs, minDocId, maxDocId); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java index 3bdd7ff3630cf..64b54d3623f04 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java @@ -14,9 +14,9 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; -import org.apache.lucene.index.FilterVectorValues; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.QueryTimeout; @@ -32,6 +32,7 @@ import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import java.io.IOException; +import java.util.Objects; /** * Wraps an {@link IndexReader} with a {@link QueryCancellation} @@ -459,7 +460,6 @@ public void grow(int count) { } private static class ExitableByteVectorValues extends ByteVectorValues { - private int calls; private final QueryCancellation queryCancellation; private final ByteVectorValues in; @@ -479,8 +479,13 @@ public int size() { } @Override - public byte[] vectorValue() throws IOException { - return in.vectorValue(); + public byte[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); + } + + @Override + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -505,33 +510,17 @@ public DocIdSetIterator iterator() { } @Override - public int docID() { - return in.docID(); - } - - @Override - public int nextDoc() throws IOException { - final int nextDoc = in.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); } @Override - public int advance(int target) throws IOException { - final int advance = in.advance(target); - checkAndThrowWithSampling(); - return advance; - } - - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + public ByteVectorValues copy() throws IOException { + return in.copy(); } } - private static class ExitableFloatVectorValues extends FilterVectorValues { - private int calls; + private static class ExitableFloatVectorValues extends FilterFloatVectorValues { private final QueryCancellation queryCancellation; ExitableFloatVectorValues(FloatVectorValues vectorValues, QueryCancellation queryCancellation) { @@ -541,17 +530,13 @@ private static class ExitableFloatVectorValues extends FilterVectorValues { } @Override - public int advance(int target) throws IOException { - final int advance = super.advance(target); - checkAndThrowWithSampling(); - return advance; + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); } @Override - public int nextDoc() throws IOException { - final int nextDoc = super.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -575,13 +560,61 @@ public DocIdSetIterator iterator() { }; } - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + @Override + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); + } + + @Override + public FloatVectorValues copy() throws IOException { + return in.copy(); } } + private static KnnVectorValues.DocIndexIterator createExitableIterator( + KnnVectorValues.DocIndexIterator delegate, + QueryCancellation queryCancellation + ) { + return new KnnVectorValues.DocIndexIterator() { + private int calls; + + @Override + public int index() { + return delegate.index(); + } + + @Override + public int docID() { + return delegate.docID(); + } + + @Override + public long cost() { + return delegate.cost(); + } + + @Override + public int nextDoc() throws IOException { + int nextDoc = delegate.nextDoc(); + checkAndThrowWithSampling(); + return nextDoc; + } + + @Override + public int advance(int target) throws IOException { + final int advance = delegate.advance(target); + checkAndThrowWithSampling(); + return advance; + } + + private void checkAndThrowWithSampling() { + if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { + queryCancellation.checkCancelled(); + } + } + }; + } + private static class ExitableDocSetIterator extends DocIdSetIterator { private int calls; private final DocIdSetIterator in; @@ -622,4 +655,43 @@ private void checkAndThrowWithSampling() { } } } + + /** Delegates all methods to a wrapped {@link FloatVectorValues}. */ + private abstract static class FilterFloatVectorValues extends FloatVectorValues { + + /** Wrapped values */ + protected final FloatVectorValues in; + + /** Sole constructor */ + protected FilterFloatVectorValues(FloatVectorValues in) { + Objects.requireNonNull(in); + this.in = in; + } + + @Override + public DocIndexIterator iterator() { + return in.iterator(); + } + + @Override + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); + } + + @Override + public FloatVectorValues copy() throws IOException { + return in.copy(); + } + + @Override + public int dimension() { + return in.dimension(); + } + + @Override + public int size() { + return in.size(); + } + + } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java index 9b594e2935504..f03be3f09b7d2 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java @@ -103,15 +103,6 @@ static final class FieldUsageTrackingLeafReader extends SequentialStoredFieldsLe this.notifier = notifier; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f != null) { - f = new FieldUsageTrackingTermVectorFields(f); - } - return f; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -136,11 +127,6 @@ public PointValues getPointValues(String field) throws IOException { return pointValues; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { StoredFields storedFields = super.storedFields(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java index cd8f381e85f83..f559325063bef 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.elasticsearch.search.profile.Timer; import java.io.IOException; @@ -25,15 +24,12 @@ final class ProfileScorer extends Scorer { private final Scorer scorer; - private final ProfileWeight profileWeight; private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer, shallowAdvanceTimer, computeMaxScoreTimer, setMinCompetitiveScoreTimer; - ProfileScorer(ProfileWeight w, Scorer scorer, QueryProfileBreakdown profile) { - super(w); + ProfileScorer(Scorer scorer, QueryProfileBreakdown profile) { this.scorer = scorer; - this.profileWeight = w; scoreTimer = profile.getNewTimer(QueryTimingType.SCORE); nextDocTimer = profile.getNewTimer(QueryTimingType.NEXT_DOC); advanceTimer = profile.getNewTimer(QueryTimingType.ADVANCE); @@ -58,11 +54,6 @@ public float score() throws IOException { } } - @Override - public Weight getWeight() { - return profileWeight; - } - @Override public Collection getChildren() throws IOException { return scorer.getChildren(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java index 27bf8ea8aae47..5d35699adec95 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java @@ -37,15 +37,6 @@ public ProfileWeight(Query query, Weight subQueryWeight, QueryProfileBreakdown p this.profile = profile; } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier supplier = scorerSupplier(context); - if (supplier == null) { - return null; - } - return supplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final Timer timer = profile.getNewTimer(QueryTimingType.BUILD_SCORER); @@ -67,12 +58,24 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti public Scorer get(long loadCost) throws IOException { timer.start(); try { - return new ProfileScorer(weight, subQueryScorerSupplier.get(loadCost), profile); + return new ProfileScorer(subQueryScorerSupplier.get(loadCost), profile); } finally { timer.stop(); } } + @Override + public BulkScorer bulkScorer() throws IOException { + // We use the default bulk scorer instead of the specialized one. The reason + // is that Lucene's BulkScorers do everything at once: finding matches, + // scoring them and calling the collector, so they make it impossible to + // see where time is spent, which is the purpose of query profiling. + // The default bulk scorer will pull a scorer and iterate over matches, + // this might be a significantly different execution path for some queries + // like disjunctions, but in general this is what is done anyway + return super.bulkScorer(); + } + @Override public long cost() { timer.start(); @@ -90,18 +93,6 @@ public void setTopLevelScoringClause() throws IOException { }; } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - // We use the default bulk scorer instead of the specialized one. The reason - // is that Lucene's BulkScorers do everything at once: finding matches, - // scoring them and calling the collector, so they make it impossible to - // see where time is spent, which is the purpose of query profiling. - // The default bulk scorer will pull a scorer and iterate over matches, - // this might be a significantly different execution path for some queries - // like disjunctions, but in general this is what is done anyway - return super.bulkScorer(context); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return subQueryWeight.explain(context, doc); diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 423fad92483ed..22d30df503637 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -241,7 +241,7 @@ private static boolean canEarlyTerminate(IndexReader reader, SortAndFormats sort } final Sort sort = sortAndFormats.sort; for (LeafReaderContext ctx : reader.leaves()) { - Sort indexSort = ctx.reader().getMetaData().getSort(); + Sort indexSort = ctx.reader().getMetaData().sort(); if (indexSort == null || Lucene.canEarlyTerminate(sort, indexSort) == false) { return false; } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java index d1cbdd6adb761..00cf90fe12301 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java @@ -202,7 +202,11 @@ public DocIdSetIterator competitiveIterator() throws IOException { } }; } - return new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + LeafCollector leafCollector = new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { + leafCollector = ScoreCachingWrappingScorer.wrap(leafCollector); + } + return leafCollector; } private static FilterScorable wrapToIgnoreMinCompetitiveScore(Scorable scorer) { @@ -263,9 +267,6 @@ private class CompositeLeafCollector implements LeafCollector { @Override public void setScorer(Scorable scorer) throws IOException { - if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); - } // Ignore calls to setMinCompetitiveScore so that if the top docs collector // wants to skip low-scoring hits, the aggs collector still sees all hits. // this is important also for terminate_after in case used when total hits tracking is early terminated. diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java index 079e725dd375b..6b6434fbf08ed 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.search.rank.RankDoc; @@ -89,13 +90,13 @@ public Explanation explain(LeafReaderContext context, int doc) { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { // Segment starts indicate how many docs are in the segment, // upper equalling lower indicates no documents for this segment if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { return null; } - return new Scorer(this) { + Scorer scorer = new Scorer() { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; int upTo = -1; @@ -164,6 +165,7 @@ private int currentDocId() { } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsSortField.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsSortField.java index 9fd2aceaf7949..b2f6a13b77775 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsSortField.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsSortField.java @@ -17,6 +17,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.NumericComparator; +import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.hnsw.IntToIntFunction; import org.elasticsearch.search.rank.RankDoc; @@ -70,6 +71,16 @@ public void setTopValue(Integer value) { topValue = value; } + @Override + protected long missingValueAsComparableLong() { + return missingValue; + } + + @Override + protected long sortableBytesToLong(byte[] bytes) { + return NumericUtils.sortableBytesToInt(bytes, 0); + } + @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { IntToIntFunction docToRank = doc -> rankDocMap.getOrDefault(context.docBase + doc, Integer.MAX_VALUE); diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java index 5077d68c12baa..c65c2bb6650c1 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractFieldScript; @@ -69,10 +70,11 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext ctx) { + public ScorerSupplier scorerSupplier(LeafReaderContext ctx) throws IOException { S scriptContext = scriptContextFunction.apply(ctx); DocIdSetIterator approximation = DocIdSetIterator.all(ctx.reader().maxDoc()); - return new ConstantScoreScorer(this, score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java index 751ecb18cc68f..430d22ebc9084 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.SloppyMath; @@ -79,8 +80,9 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + return new DefaultScorerSupplier(scorer); } @Override @@ -116,8 +118,7 @@ private class DistanceScorer extends Scorer { private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java index 7c8ac4a8cae63..d18098ee7de33 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractLongFieldScript; @@ -56,8 +57,10 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + return new DefaultScorerSupplier( + new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost) + ); } @Override @@ -84,8 +87,7 @@ private class DistanceScorer extends Scorer { private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java index ab32427ed4ac1..3c5931367370e 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -34,7 +35,12 @@ public StringScriptFieldRegexpQuery( script, leafFactory, fieldName, - new ByteRunAutomaton(new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates)) + new ByteRunAutomaton( + Operations.determinize( + new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(), + maxDeterminizedStates + ) + ) ); this.pattern = pattern; this.syntaxFlags = syntaxFlags; diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java index 5bacaf0d36b55..6c1aa6f72c4a1 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -44,7 +45,7 @@ private static Automaton buildAutomaton(Term term, boolean caseInsensitive) { if (caseInsensitive) { return AutomatonQueries.toCaseInsensitiveWildcardAutomaton(term); } - return WildcardQuery.toAutomaton(term); + return WildcardQuery.toAutomaton(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java index f4ab7e29e1684..6de888ac8aff4 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -58,9 +59,10 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator iterator = createIterator(context, sliceStart, sliceStart + sliceSize); - return new ConstantScoreScorer(this, boost, scoreMode, iterator); + Scorer scorer = new ConstantScoreScorer(boost, scoreMode, iterator); + return new DefaultScorerSupplier(scorer); } private static DocIdSetIterator createIterator(LeafReaderContext context, int sliceStart, int sliceEnd) { diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java index b66ae219ace97..05cf173468fdc 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -41,7 +42,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), getField()); final DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -66,7 +67,8 @@ public float matchCost() { return 10; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java index 3927f54461bb8..9aecbfdd84ee6 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; @@ -49,10 +50,11 @@ public TermsSliceQuery(String field, int id, int max) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final DocIdSet disi = build(context.reader()); final DocIdSetIterator leafIt = disi.iterator(); - return new ConstantScoreScorer(this, score(), scoreMode, leafIt); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, leafIt); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index a6fd4ef90693d..e60e534d6acaa 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -126,7 +126,6 @@ public void setScorer(Scorable scorer) { @Override protected boolean advanceExact(int doc) throws IOException { - assert doc == scorer.docID() : "expected scorer to be on [" + doc + "] but was on [" + scorer.docID() + "]"; /* We will never be called by documents that don't match the * query and they'll all have a score, thus `true`. */ score = scorer.score(); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 8fdc33f38934f..fd6cfeaea639b 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.CollectionTerminatedException; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Weight; @@ -84,7 +85,7 @@ private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSu LeafCollector leafCollector = null; try { leafCollector = collector.getLeafCollector(context); - scorer.score(leafCollector, context.reader().getLiveDocs()); + scorer.score(leafCollector, context.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index 1366da366b068..ed8197786ba7e 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -133,7 +133,7 @@ public CandidateSet drawCandidates(CandidateSet set) throws IOException { * because that's what {@link DirectSpellChecker#suggestSimilar} expects * when filtering terms. */ - int threshold = thresholdTermFrequency(original.termStats.docFreq); + int threshold = thresholdTermFrequency(original.termStats.docFreq()); if (threshold == Integer.MAX_VALUE) { // the threshold is the max possible frequency so we can skip the search return set; @@ -226,7 +226,7 @@ public void nextToken() throws IOException { } private static double score(TermStats termStats, double errorScore, long dictionarySize) { - return errorScore * (((double) termStats.totalTermFreq + 1) / ((double) dictionarySize + 1)); + return errorScore * (((double) termStats.totalTermFreq() + 1) / ((double) dictionarySize + 1)); } // package protected for test diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java index a47cd5fe5a84b..0fd3ebcd00865 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java @@ -36,7 +36,7 @@ protected double scoreUnigram(Candidate word) throws IOException { @Override protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { join(separator, spare, w_1.term, word.term); - return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq + alpha * numTerms); + return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq() + alpha * numTerms); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java index fe64a65498776..0d66311303080 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java @@ -58,7 +58,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return unigramLambda * scoreUnigram(word); } - return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq)) + unigramLambda * scoreUnigram(word); + return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq())) + unigramLambda * scoreUnigram(word); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index 7257a0d972459..21d1f34b68eee 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -70,7 +70,7 @@ public void nextToken() throws IOException { if (posIncAttr.getPositionIncrement() == 0 && typeAttribute.type() == SynonymFilter.TYPE_SYNONYM) { assert currentSet != null; TermStats termStats = generator.termStats(term); - if (termStats.docFreq > 0) { + if (termStats.docFreq() > 0) { currentSet.addOneCandidate(generator.createCandidate(BytesRef.deepCopyOf(term), termStats, realWordLikelihood)); } } else { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java index d893e0986e0d3..270866c14b20a 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java @@ -35,7 +35,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return discount * scoreUnigram(word); } - return count / (w_1.termStats.totalTermFreq + 0.00000000001d); + return count / (w_1.termStats.totalTermFreq() + 0.00000000001d); } @Override @@ -50,7 +50,7 @@ protected double scoreTrigram(Candidate w, Candidate w_1, Candidate w_2) throws join(separator, spare, w_2.term, w_1.term, w.term); long trigramCount = frequency(spare.get()); if (trigramCount < 1) { - return discount * (bigramCount / (w_1.termStats.totalTermFreq + 0.00000000001d)); + return discount * (bigramCount / (w_1.termStats.totalTermFreq() + 0.00000000001d)); } return trigramCount / (bigramCount + 0.00000000001d); } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java index 44bbd0f50951c..31e19b6784757 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.VectorScorer; import org.apache.lucene.search.Weight; @@ -70,12 +71,12 @@ public Explanation explain(LeafReaderContext leafReaderContext, int i) throws IO } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - VectorScorer vectorScorer = vectorScorer(leafReaderContext); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + VectorScorer vectorScorer = vectorScorer(context); if (vectorScorer == null) { return null; } - return new DenseVectorScorer(this, vectorScorer); + return new DefaultScorerSupplier(new DenseVectorScorer(vectorScorer, boost)); } @Override @@ -178,11 +179,10 @@ static class DenseVectorScorer extends Scorer { private final DocIdSetIterator iterator; private final float boost; - DenseVectorScorer(DenseVectorWeight weight, VectorScorer vectorScorer) { - super(weight); + DenseVectorScorer(VectorScorer vectorScorer, float boost) { this.vectorScorer = vectorScorer; this.iterator = vectorScorer.iterator(); - this.boost = weight.boost; + this.boost = boost; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java index 413840f2b451b..9f3d83b4da082 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java @@ -34,7 +34,7 @@ public ESDiversifyingChildrenByteKnnVectorQuery( @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java index 80704a3b552fe..3907bdf89bc6f 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java @@ -34,7 +34,7 @@ public ESDiversifyingChildrenFloatKnnVectorQuery( @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java index 14bb94a366e50..9363f67a7350b 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java @@ -27,7 +27,7 @@ public ESKnnByteVectorQuery(String field, byte[] target, Integer k, int numCands protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java index 590d8cfbbaba1..be0437af9131d 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java @@ -27,7 +27,7 @@ public ESKnnFloatVectorQuery(String field, float[] target, Integer k, int numCan protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java index 06fb109d6580e..bb83b8528c6c8 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -88,13 +89,13 @@ public Explanation explain(LeafReaderContext context, int doc) { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { // Segment starts indicate how many docs are in the segment, // upper equalling lower indicates no documents for this segment if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { return null; } - return new Scorer(this) { + Scorer scorer = new Scorer() { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; int upTo = -1; @@ -177,6 +178,7 @@ private int currentDocId() { } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java index 77f60adc4fcd8..5219778047bcd 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.search.function.MinScoreScorer; @@ -142,12 +143,22 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer innerScorer = in.scorer(context); - if (innerScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier inScorerSupplier = in.scorerSupplier(context); + if (inScorerSupplier == null) { return null; } - return new MinScoreScorer(this, innerScorer, docScore, boost); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(inScorerSupplier.get(leadCost), docScore, boost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index b14116b3d55ba..c760e8043e262 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -230,7 +230,7 @@ public void getSynonymSetRules(String synonymSetId, ActionListener { - long totalSynonymRules = countResponse.getHits().getTotalHits().value; + long totalSynonymRules = countResponse.getHits().getTotalHits().value(); if (totalSynonymRules > maxSynonymsSets) { logger.warn( "The number of synonym rules in the synonym set [{}] exceeds the maximum allowed." @@ -265,7 +265,7 @@ public void getSynonymSetRules(String synonymSetId, int from, int size, ActionLi .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(new DelegatingIndexNotFoundActionListener<>(synonymSetId, listener, (searchListener, searchResponse) -> { - final long totalSynonymRules = searchResponse.getHits().getTotalHits().value; + final long totalSynonymRules = searchResponse.getHits().getTotalHits().value(); // If there are no rules, check that the synonym set actually exists to return the proper error if (totalSynonymRules == 0) { checkSynonymSetExists(synonymSetId, searchListener.delegateFailure((existsListener, response) -> { @@ -383,7 +383,7 @@ public void putSynonymRule(String synonymsSetId, SynonymRule synonymRule, Action .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(l1.delegateFailureAndWrap((searchListener, searchResponse) -> { - long synonymsSetSize = searchResponse.getHits().getTotalHits().value; + long synonymsSetSize = searchResponse.getHits().getTotalHits().value(); if (synonymsSetSize >= maxSynonymsSets) { listener.onFailure( new IllegalArgumentException("The number of synonym rules in a synonyms set cannot exceed " + maxSynonymsSets) diff --git a/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java b/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java index 134766fbeae57..c3a24d012c013 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java +++ b/server/src/main/java/org/elasticsearch/threadpool/DefaultBuiltInExecutorBuilders.java @@ -72,16 +72,6 @@ public Map getBuilders(Settings settings, int allocated new EsExecutors.TaskTrackingConfig(true, searchAutoscalingEWMA) ) ); - result.put( - ThreadPool.Names.SEARCH_WORKER, - new FixedExecutorBuilder( - settings, - ThreadPool.Names.SEARCH_WORKER, - searchOrGetThreadPoolSize, - -1, - EsExecutors.TaskTrackingConfig.DEFAULT - ) - ); result.put( ThreadPool.Names.SEARCH_COORDINATION, new FixedExecutorBuilder( diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index b3f8f2e02fc06..9eb994896cbff 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -88,7 +88,6 @@ public static class Names { public static final String ANALYZE = "analyze"; public static final String WRITE = "write"; public static final String SEARCH = "search"; - public static final String SEARCH_WORKER = "search_worker"; public static final String SEARCH_COORDINATION = "search_coordination"; public static final String AUTO_COMPLETE = "auto_complete"; public static final String SEARCH_THROTTLED = "search_throttled"; @@ -158,7 +157,6 @@ public static ThreadPoolType fromType(String type) { entry(Names.ANALYZE, ThreadPoolType.FIXED), entry(Names.WRITE, ThreadPoolType.FIXED), entry(Names.SEARCH, ThreadPoolType.FIXED), - entry(Names.SEARCH_WORKER, ThreadPoolType.FIXED), entry(Names.SEARCH_COORDINATION, ThreadPoolType.FIXED), entry(Names.AUTO_COMPLETE, ThreadPoolType.FIXED), entry(Names.MANAGEMENT, ThreadPoolType.SCALING), diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index b99a15507f742..4e85ba2cf479f 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1 +1,2 @@ org.elasticsearch.index.codec.Elasticsearch814Codec +org.elasticsearch.index.codec.Elasticsearch816Codec diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 2515c3e680789..1489fa6de90c5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; @@ -53,8 +53,9 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; -import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; @@ -327,11 +328,11 @@ public void testTriangle() throws Exception { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene99Codec(Lucene99Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene912Codec(Lucene912Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { - return new Completion99PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); + return new Completion912PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); } else { return super.postingsFormat(); } @@ -414,25 +415,25 @@ private static void addFieldsToDoc(Document doc, IndexableField[] fields) { enum CodecMode { BEST_SPEED { @Override - Lucene99Codec.Mode mode() { - return Lucene99Codec.Mode.BEST_SPEED; + Lucene912Codec.Mode mode() { + return Lucene912Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene99Codec.Mode mode() { - return Lucene99Codec.Mode.BEST_COMPRESSION; + Lucene912Codec.Mode mode() { + return Lucene912Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene99Codec.Mode mode(); + abstract Lucene912Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene99Codec(codecMode.mode())); + .setCodec(new Lucene912Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -640,7 +641,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene99Codec(mode.mode()) { + .setCodec(new Lucene912Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new ES812PostingsFormat(); @@ -687,7 +688,7 @@ static void collectPerFieldStats(SegmentReader reader, IndexDiskUsageStats stats final String[] files; final Directory directory; if (sis.getUseCompoundFile()) { - directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.READ); + directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.DEFAULT); files = directory.listAll(); } else { directory = reader.directory(); @@ -785,14 +786,15 @@ private static class RandomMatchQuery extends Query { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final FixedBitSet bits = new FixedBitSet(context.reader().maxDoc()); for (int i = 0; i < bits.length(); i++) { if (randomBoolean()) { bits.set(i); } } - return new ConstantScoreScorer(this, 1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + Scorer scorer = new ConstantScoreScorer(1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java index b7919878f9081..681d9d000beef 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java @@ -79,8 +79,8 @@ public void testNullShardResultHandling() throws Exception { queryPhaseResultConsumer.consumeResult(querySearchResult, nextCounter::incrementAndGet); } var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertFalse(reducePhase.isEmptyResult()); assertEquals(10, nextCounter.get()); } @@ -94,8 +94,8 @@ public void testEmptyResults() throws Exception { ) ) { var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertTrue(reducePhase.isEmptyResult()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 90174a89209b8..99401e8a8d40a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -151,11 +151,11 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNotNull(responseRef.get().get(1)); assertNull(responseRef.get().get(1).fetchResult()); - assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); assertEquals(2, mockSearchPhaseContext.numSuccess.get()); @@ -236,7 +236,7 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNull(responseRef.get().get(1)); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 31ef57482cab1..09dd7821cd123 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -116,7 +116,7 @@ public void testShortcutQueryAndFetchOptimization() throws Exception { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); if (numHits != 0) { assertEquals(42, searchResponse.getHits().getAt(0).docId()); } @@ -244,7 +244,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(42, searchResponse.getHits().getAt(1).docId()); assertEquals(0, searchResponse.getFailedShards()); @@ -353,7 +353,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(1, searchResponse.getFailedShards()); assertEquals(1, searchResponse.getSuccessfulShards()); @@ -468,7 +468,7 @@ public void run() { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); assertEquals(Math.min(numHits, resultSetSize), searchResponse.getHits().getHits().length); SearchHit[] hits = searchResponse.getHits().getHits(); for (int i = 0; i < hits.length; i++) { @@ -703,7 +703,7 @@ public void sendExecuteFetch( mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(1, searchResponse.getHits().getHits().length); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(0, searchResponse.getFailedShards()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 857402d1baaac..9a507977c0123 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -297,8 +297,8 @@ public void testMerge() { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } for (SearchHit hit : mergedResponse.hits().getHits()) { SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); @@ -415,8 +415,8 @@ protected boolean lessThan(RankDoc a, RankDoc b) { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } int rank = 1; for (SearchHit hit : mergedResponse.hits().getHits()) { @@ -522,8 +522,8 @@ private static int getTotalQueryHits(AtomicArray results) { int resultCount = 0; for (SearchPhaseResult shardResult : results.asList()) { TopDocs topDocs = shardResult.queryResult().topDocs().topDocs; - assert topDocs.totalHits.relation == Relation.EQUAL_TO; - resultCount += (int) topDocs.totalHits.value; + assert topDocs.totalHits.relation() == Relation.EQUAL_TO; + resultCount += (int) topDocs.totalHits.value(); } return resultCount; } @@ -784,7 +784,7 @@ public void testConsumerConcurrently() throws Exception { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -844,7 +844,7 @@ public void testConsumerOnlyAggs() throws Exception { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(0, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -902,7 +902,7 @@ public void testConsumerOnlyHits() throws Exception { assertAggReduction(request); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -969,7 +969,7 @@ public void testReduceTopNWithFromOffset() throws Exception { ScoreDoc[] scoreDocs = reduce.sortedTopDocs().scoreDocs(); assertEquals(5, scoreDocs.length); assertEquals(100.f, reduce.maxScore(), 0.0f); - assertEquals(12, reduce.totalHits().value); + assertEquals(12, reduce.totalHits().value()); assertEquals(95.0f, scoreDocs[0].score, 0.0f); assertEquals(94.0f, scoreDocs[1].score, 0.0f); assertEquals(93.0f, scoreDocs[2].score, 0.0f); @@ -1022,7 +1022,7 @@ public void testConsumerSortByField() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(Math.min(expectedNumResults, size), reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertTrue(reduce.sortedTopDocs().isSortedByField()); assertEquals(1, reduce.sortedTopDocs().sortFields().length); @@ -1079,7 +1079,7 @@ public void testConsumerFieldCollapsing() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(3, reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(a, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertEquals(b, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[1]).fields[0]); assertEquals(c, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[2]).fields[0]); @@ -1199,7 +1199,7 @@ public void testConsumerSuggestions() throws Exception { assertEquals(maxScoreCompletion, reduce.sortedTopDocs().scoreDocs()[0].score, 0f); assertEquals(0, reduce.sortedTopDocs().scoreDocs()[0].doc); assertNotEquals(-1, reduce.sortedTopDocs().scoreDocs()[0].shardIndex); - assertEquals(0, reduce.totalHits().value); + assertEquals(0, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -1290,7 +1290,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 3c4976d9bfa86..52a4d7bbb0180 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -245,11 +245,11 @@ public void run() { SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); if (withScroll) { - assertThat(phase.totalHits().value, equalTo((long) numShards)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo((long) numShards)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } else { - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo(2L)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); } assertThat(phase.sortedTopDocs().scoreDocs().length, equalTo(1)); assertThat(phase.sortedTopDocs().scoreDocs()[0], instanceOf(FieldDoc.class)); @@ -535,8 +535,8 @@ public void run() { assertThat(successfulOps.get(), equalTo(2)); SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo(2L)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101932") @@ -700,8 +700,8 @@ public void run() { assertThat(successfulOps.get(), equalTo(2)); SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo(2L)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); SearchShardTarget searchShardTarget = new SearchShardTarget("node3", shardIt.shardId(), null); SearchActionListener listener = new SearchActionListener(searchShardTarget, 0) { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 2b0ed0552e594..51796f404c283 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -762,11 +762,11 @@ public void testMergeSearchHits() throws InterruptedException { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } - final int numDocs = totalHits == null || totalHits.value >= requestedSize ? requestedSize : (int) totalHits.value; + final int numDocs = totalHits == null || totalHits.value() >= requestedSize ? requestedSize : (int) totalHits.value(); int scoreFactor = randomIntBetween(1, numResponses); float maxScore = scoreSort ? numDocs * scoreFactor : Float.NaN; SearchHit[] hits = randomSearchHitArray( @@ -862,8 +862,8 @@ public void testMergeSearchHits() throws InterruptedException { assertNull(searchHits.getTotalHits()); } else { assertNotNull(searchHits.getTotalHits()); - assertEquals(expectedTotalHits.value, searchHits.getTotalHits().value); - assertSame(expectedTotalHits.relation, searchHits.getTotalHits().relation); + assertEquals(expectedTotalHits.value(), searchHits.getTotalHits().value()); + assertSame(expectedTotalHits.relation(), searchHits.getTotalHits().relation()); } if (expectedMaxScore == Float.NEGATIVE_INFINITY) { assertTrue(Float.isNaN(searchHits.getMaxScore())); @@ -910,9 +910,9 @@ public void testMergeNoResponsesAdded() { assertEquals(0, response.getNumReducePhases()); assertFalse(response.isTimedOut()); assertNotNull(response.getHits().getTotalHits()); - assertEquals(0, response.getHits().getTotalHits().value); + assertEquals(0, response.getHits().getTotalHits().value()); assertEquals(0, response.getHits().getHits().length); - assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation); + assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation()); assertNull(response.getScrollId()); assertSame(InternalAggregations.EMPTY, response.getAggregations()); assertNull(response.getSuggest()); @@ -1004,7 +1004,7 @@ public void testMergeEmptySearchHitsWithNonEmpty() { assertEquals(2, merger.numResponses()); SearchResponse mergedResponse = merger.getMergedResponse(clusters); try { - assertEquals(10, mergedResponse.getHits().getTotalHits().value); + assertEquals(10, mergedResponse.getHits().getTotalHits().value()); assertEquals(10, mergedResponse.getHits().getHits().length); assertEquals(2, mergedResponse.getTotalShards()); assertEquals(2, mergedResponse.getSuccessfulShards()); @@ -1032,8 +1032,8 @@ public void testMergeOnlyEmptyHits() { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } SearchResponse searchResponse = new SearchResponse( SearchHits.empty(totalHits, Float.NaN), @@ -1232,7 +1232,7 @@ public void testPartialAggsMixedWithFullResponses() { SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); // should be 2 hits from remote1 + assertThat(hits.getTotalHits().value(), equalTo(2L)); // should be 2 hits from remote1 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ { @@ -1273,7 +1273,7 @@ public void testPartialAggsMixedWithFullResponses() { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1414,7 +1414,7 @@ public void testPartialAggsMixedWithFullResponses() { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1483,7 +1483,7 @@ public void testPartialAggsMixedWithFullResponses() { private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Index[] indices) { TotalHits totalHits = new TotalHits(2, TotalHits.Relation.EQUAL_TO); - final int numDocs = (int) totalHits.value; + final int numDocs = (int) totalHits.value(); int scoreFactor = 1; float maxScore = numDocs; int numFields = 1; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index afc4c6e9eccbf..bbeae6b19b8ac 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -621,8 +621,8 @@ public void testSerialization() throws IOException { if (searchResponse.getHits().getTotalHits() == null) { assertNull(deserialized.getHits().getTotalHits()); } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + assertEquals(searchResponse.getHits().getTotalHits().value(), deserialized.getHits().getTotalHits().value()); + assertEquals(searchResponse.getHits().getTotalHits().relation(), deserialized.getHits().getTotalHits().relation()); } assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index c00fece686524..1b86e5b00000c 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -400,6 +400,6 @@ protected Fields getTermVectorsFromLucene(DirectoryReader directoryReader, TestD ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(1, scoreDocs.length); - return directoryReader.getTermVectors(scoreDocs[0].doc); + return directoryReader.termVectors().get(scoreDocs[0].doc); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 91b0c6502993c..f542c4c9c873c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -9,6 +9,10 @@ package org.elasticsearch.cluster.metadata; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.TransportVersion; @@ -602,6 +606,14 @@ public void testCalculateNumRoutingShards() { public void testValidateDotIndex() { List systemIndexDescriptors = new ArrayList<>(); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-one*", "test")); + Automaton patternAutomaton = new RegExp("\\.test-~(one.*)", RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); + assertTrue( + new CharacterRunAutomaton(Operations.determinize(patternAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)).run( + ".test-~(one.*)" + ) + ); + // TODO remove this smoke test ^^^ once the issue is fixed + systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-~(one*)", "test")); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".pattern-test*", "test-1")); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 8158917f08187..9300aa992b687 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexOrDocValuesQuery; @@ -44,7 +45,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.MockDirectoryWrapper; @@ -172,10 +172,10 @@ public void testPruneUnreferencedFiles() throws IOException { assertEquals(3, open.maxDoc()); IndexSearcher s = newSearcher(open); - assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value, 0); + assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value(), 0); for (String file : dir.listAll()) { assertFalse("unexpected file: " + file, file.equals("segments_3") || file.startsWith("_2")); @@ -403,11 +403,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { @@ -464,18 +459,6 @@ public void testAsSequentialBitsUsesRandomAccess() throws IOException { } } - /** - * Test that the "unmap hack" is detected as supported by lucene. - * This works around the following bug: https://bugs.openjdk.java.net/browse/JDK-4724038 - *

- * While not guaranteed, current status is "Critical Internal API": http://openjdk.java.net/jeps/260 - * Additionally this checks we did not screw up the security logic around the hack. - */ - public void testMMapHackSupported() throws Exception { - // add assume's here if needed for certain platforms, but we should know if it does not work. - assertTrue("MMapDirectory does not support unmapping: " + MMapDirectory.UNMAP_NOT_SUPPORTED_REASON, MMapDirectory.UNMAP_SUPPORTED); - } - public void testWrapAllDocsLive() throws Exception { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) @@ -508,8 +491,9 @@ public void testWrapAllDocsLive() throws Exception { IndexSearcher searcher = newSearcher(reader); Set actualDocs = new HashSet<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } @@ -554,8 +538,9 @@ public void testWrapLiveDocsNotExposeAbortedDocuments() throws Exception { IndexSearcher searcher = newSearcher(reader); List actualDocs = new ArrayList<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index 8332ff87a9d57..918dcc1bcbd42 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -127,8 +128,9 @@ public void setUp() throws Exception { // now go over each doc, build the relevant references and filter reader = DirectoryReader.open(iw); List filterTerms = new ArrayList<>(); + StoredFields storedFields = reader.storedFields(); for (int docId = 0; docId < reader.maxDoc(); docId++) { - Document doc = reader.document(docId); + Document doc = storedFields.document(docId); addFreqs(doc, referenceAll); if (deletedIds.contains(doc.getField("id").stringValue()) == false) { addFreqs(doc, referenceNotDeleted); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java index 7a8d43ebbfd18..55ca666d8588b 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java @@ -11,15 +11,11 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.apache.lucene.tests.search.AssertingScorer; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.ESTestCase; @@ -66,27 +62,8 @@ public int advance(int target) throws IOException { }; } - private static Weight fakeWeight() { - return new Weight(new MatchAllDocsQuery()) { - @Override - public Explanation explain(LeafReaderContext context, int doc) throws IOException { - return null; - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return null; - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; - } - }; - } - private static Scorer hideTwoPhaseIterator(Scorer in) { - return new Scorer(in.getWeight()) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return TwoPhaseIterator.asDocIdSetIterator(in.twoPhaseIterator()); @@ -111,7 +88,7 @@ public int docID() { private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) { final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs); - final Scorer scorer = new Scorer(fakeWeight()) { + final Scorer scorer = new Scorer() { int lastScoredDoc = -1; final float matchCost = (random().nextBoolean() ? 1000 : 0) + random().nextInt(2000); @@ -192,7 +169,7 @@ public void doTestRandom(boolean twoPhase) throws IOException { } Scorer scorer = scorer(maxDoc, docs, scores, twoPhase); final float minScore = random().nextFloat(); - Scorer minScoreScorer = new MinScoreScorer(fakeWeight(), scorer, minScore); + Scorer minScoreScorer = new MinScoreScorer(scorer, minScore); int doc = -1; while (doc != DocIdSetIterator.NO_MORE_DOCS) { final int target; @@ -250,7 +227,7 @@ public void testConjunction() throws Exception { final float minScore; if (randomBoolean()) { minScore = randomFloat(); - MinScoreScorer minScoreScorer = new MinScoreScorer(scorer.getWeight(), scorer, minScore); + MinScoreScorer minScoreScorer = new MinScoreScorer(scorer, minScore); scorers.add(minScoreScorer); } else { scorers.add(scorer); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java index b1df24f4db2ad..3894efd0b7d4c 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java @@ -117,7 +117,7 @@ public boolean incrementToken() throws IOException { final double boost10 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|10 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) @@ -125,7 +125,7 @@ public boolean incrementToken() throws IOException { final double boost1 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|1 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) @@ -178,7 +178,7 @@ public void testTopN() throws Exception { expectedTerms[idx++] = new Term("text", text); } for (BooleanClause clause : clauses) { - Term term = ((TermQuery) clause.getQuery()).getTerm(); + Term term = ((TermQuery) clause.query()).getTerm(); assertTrue(Arrays.asList(expectedTerms).contains(term)); } diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java index 6e8eb47035d43..e0e05c84b5649 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java @@ -84,12 +84,12 @@ public void testSimpleNumericOps() throws Exception { try (IndexReader reader = DirectoryReader.open(indexWriter)) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - Document doc = searcher.doc(topDocs.scoreDocs[0].doc); + Document doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); IndexableField f = doc.getField("test"); assertThat(f.numericValue(), equalTo(2)); topDocs = searcher.search(IntPoint.newExactQuery("test", 2), 1); - doc = searcher.doc(topDocs.scoreDocs[0].doc); + doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); } @@ -115,7 +115,7 @@ public void testOrdering() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); final ArrayList fieldsOrder = new ArrayList<>(); - searcher.doc(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { + searcher.storedFields().document(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { @Override public Status needsField(FieldInfo fieldInfo) throws IOException { fieldsOrder.add(fieldInfo.name); diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java index 97a6faaa5c6f6..01c4ac3c6fd6a 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java @@ -54,7 +54,7 @@ public void testVectorHighlighter() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -87,7 +87,7 @@ public void testVectorHighlighterPrefixQuery() throws Exception { IndexReader reader = searcher.getIndexReader(); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); @@ -143,7 +143,7 @@ public void testVectorHighlighterNoStore() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -170,7 +170,7 @@ public void testVectorHighlighterNoTermVector() throws Exception { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( diff --git a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java index 022a6994496ae..9419c63f9c48a 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java @@ -227,7 +227,7 @@ public static void corruptFile(Path fileToCorrupt, Logger logger) throws IOExcep } long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - 8); // one long is the checksum... 8 bytes checksumAfterCorruption = input.getChecksum(); diff --git a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java index db656b1fc5a94..450d123f551c8 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -1776,9 +1777,10 @@ private static void forEachDocument(DirectoryReader reader, Set types, C final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); document.add(new StringField(TYPE_FIELD_NAME, typeName, Field.Store.NO)); consumer.accept(document); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 7b264ac93511b..532a2ff024e8f 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -290,7 +290,7 @@ public void testRefreshActuallyWorks() throws Exception { // we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, search.totalHits.value); + assertEquals(1, search.totalHits.value()); } }); assertFalse(refreshTask.isClosed()); @@ -304,7 +304,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the force refresh we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(2, search.totalHits.value); + assertEquals(2, search.totalHits.value()); } }); prepareIndex("test").setId("2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); @@ -312,7 +312,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the scheduled refresh try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(3, search.totalHits.value); + assertEquals(3, search.totalHits.value()); } }); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 64160c83646fa..10b0b54d2d7e2 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -52,7 +52,7 @@ public void testResolveDefaultCodecs() throws Exception { assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Elasticsearch814Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch816Codec.class)); } public void testDefault() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java index d43a1e09d71a3..12a17f5c263a8 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java @@ -115,7 +115,6 @@ public void testSortedSetDocValuesSingleUniqueValue() throws IOException { assertEquals(0, field.nextOrd()); BytesRef scratch = field.lookupOrd(0); assertEquals("value", scratch.utf8ToString()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, field.nextOrd()); } assertEquals(DocIdSetIterator.NO_MORE_DOCS, field.nextDoc()); for (int i = 0; i < NUM_DOCS; i++) { @@ -126,7 +125,6 @@ public void testSortedSetDocValuesSingleUniqueValue() throws IOException { BytesRef scratch = fieldN.lookupOrd(0); assertEquals("value" + i, scratch.utf8ToString()); assertEquals(DocIdSetIterator.NO_MORE_DOCS, fieldN.nextDoc()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, fieldN.nextOrd()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java new file mode 100644 index 0000000000000..099b59808ef4a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.index.codec.tsdb; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.TestUtil; + +import java.io.IOException; +import java.util.Arrays; + +/** Tests ES87TSDBDocValuesFormat with custom skipper interval size. */ +public class ES87TSDBDocValuesFormatVariableSkipIntervalTests extends BaseDocValuesFormatTestCase { + + @Override + protected Codec getCodec() { + // small interval size to test with many intervals + return TestUtil.alwaysDocValuesFormat(new ES87TSDBDocValuesFormat(random().nextInt(4, 16))); + } + + public void testSkipIndexIntervalSize() { + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> new ES87TSDBDocValuesFormat(random().nextInt(Integer.MIN_VALUE, 2)) + ); + assertTrue(ex.getMessage().contains("skipIndexIntervalSize must be > 1")); + } + + public void testSkipperAllEqualValue() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int numDocs = atLeast(100); + for (int i = 0; i < numDocs; i++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + skipper.advance(0); + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs, skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on different value + public void testSkipperFewValuesSorted() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + boolean reverse = random().nextBoolean(); + config.setIndexSort(new Sort(new SortField("dv", SortField.Type.LONG, reverse))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int intervals = random().nextInt(2, 10); + final int[] numDocs = new int[intervals]; + for (int i = 0; i < intervals; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", i)); + writer.addDocument(doc); + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + if (reverse) { + for (int i = intervals - 1; i >= 0; i--) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } else { + for (int i = 0; i < intervals; i++) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on empty doc values + public void testSkipperAllEqualValueWithGaps() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + // add doc with empty "dv" + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on multi-values + public void testSkipperAllEqualValueWithMultiValues() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + int docs = random().nextInt(10) + 16; + numDocs[i] += docs; + for (int j = 0; j < docs; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + if (i != gaps - 1) { + // add doc with mutivalues + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + numDocs[i + 1] = 1; + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java index 8f0a306e1eb3b..86b60d9984de5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java @@ -19,6 +19,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; @@ -110,8 +111,9 @@ public void testRandom() throws Exception { totalSize += vectorValues.size(); StoredFields storedFields = ctx.reader().storedFields(); int docId; - while ((docId = vectorValues.nextDoc()) != NO_MORE_DOCS) { - byte[] v = vectorValues.vectorValue(); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + while ((docId = iterator.nextDoc()) != NO_MORE_DOCS) { + byte[] v = vectorValues.vectorValue(iterator.index()); assertEquals(dimension, v.length); String idString = storedFields.document(docId).getField("id").stringValue(); int id = Integer.parseInt(idString); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java index 5a00e90e6ffa8..aa50bc26c4443 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java index 2b70ad657ea3c..8cb927036588a 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813Int8FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813Int8FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java index 8d7c5b5e4343f..f89b481a13fd8 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.KnnFloatVectorField; @@ -19,6 +19,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.VectorSimilarityFunction; @@ -41,7 +42,7 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES814HnswScalarQuantizedVectorsFormat(); @@ -68,9 +69,10 @@ public void testAddIndexesDirectory0FS() throws Exception { try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); - assertEquals(0, vectorValues.vectorValue()[0], 0); - assertEquals(NO_MORE_DOCS, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertEquals(0, vectorValues.vectorValue(iterator.index())[0], 0); + assertEquals(NO_MORE_DOCS, iterator.nextDoc()); } } } @@ -110,12 +112,13 @@ private void testAddIndexesDirectory01FS(VectorSimilarityFunction similarityFunc try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); // The merge order is randomized, we might get 1 first, or 2 - float value = vectorValues.vectorValue()[0]; + float value = vectorValues.vectorValue(iterator.index())[0]; assertTrue(value == 1 || value == 2); - assertEquals(1, vectorValues.nextDoc()); - value += vectorValues.vectorValue()[0]; + assertEquals(1, iterator.nextDoc()); + value += vectorValues.vectorValue(iterator.index())[0]; assertEquals(3f, value, 0); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java index bae73cc40f5d4..90d2584feb3f2 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815BitFlatVectorFormatTests extends BaseKnnBitVectorsFormatTestCa @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815BitFlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java index 2561d17965bc4..add90ea271fa1 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java @@ -11,7 +11,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815HnswBitVectorsFormatTests extends BaseKnnBitVectorsFormatTestC @Override protected Codec getCodec() { - return new Lucene99Codec() { + return new Lucene912Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815HnswBitVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java index a56d5f1c8084b..c3fea6c7a189b 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; @@ -35,13 +35,13 @@ public class StoredFieldCodecDuelTests extends ESTestCase { private static final String DOUBLE_FIELD = "double_field_5"; public void testDuelBestSpeed() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } public void testDuelBestCompression() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java index 211c564650317..71c7464657e72 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch814Codec; +import org.elasticsearch.index.codec.Elasticsearch816Codec; public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java index 077569d150daa..02a1b10697907 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch814Codec; +import org.elasticsearch.index.codec.Elasticsearch816Codec; public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 9837eba25f5b7..6565a11a860ec 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -9,12 +9,12 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.elasticsearch.ElasticsearchException; @@ -44,8 +44,8 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion99PostingsFormat(); - indexWriterConfig.setCodec(new Lucene99Codec() { + final PostingsFormat postingsFormat = new Completion912PostingsFormat(); + indexWriterConfig.setCodec(new Lucene912Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 883723de31d46..61a388b53bb7e 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -266,7 +266,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("test", luceneDoc.get("value")); } @@ -279,7 +279,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("updated", luceneDoc.get("value")); } @@ -748,7 +748,7 @@ public void testTranslogRecoveryWithMultipleGenerations() throws IOException { recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), docs); - assertEquals(docs, topDocs.totalHits.value); + assertEquals(docs, topDocs.totalHits.value()); } } finally { IOUtils.close(initialEngine, recoveringEngine, store); @@ -2433,7 +2433,7 @@ class OpAndVersion { Engine.Get engineGet = new Engine.Get(true, false, doc.id()); try (Engine.GetResult get = engine.get(engineGet, mappingLookup, documentParser, randomSearcherWrapper())) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString())); String removed = op % 3 == 0 && values.size() > 0 ? values.remove(0) : null; String added = "v_" + idGenerator.incrementAndGet(); @@ -2479,7 +2479,7 @@ class OpAndVersion { Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id()), mappingLookup, documentParser, randomSearcherWrapper()) ) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString())); assertThat(currentValues, equalTo(new HashSet<>(values))); } @@ -3423,7 +3423,7 @@ public void testSkipTranslogReplay() throws IOException { engine.skipTranslogRecovery(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); - assertThat(topDocs.totalHits.value, equalTo(0L)); + assertThat(topDocs.totalHits.value(), equalTo(0L)); } } } @@ -3513,7 +3513,7 @@ public void testTranslogReplay() throws IOException { assertThat(result.getVersion(), equalTo(2L)); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } engine.close(); @@ -3522,7 +3522,7 @@ public void testTranslogReplay() throws IOException { engine.refresh("warm_up"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } assertEquals(flush ? 1 : 2, translogHandler.appliedOperations()); engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), primaryTerm.get())); @@ -3533,7 +3533,7 @@ public void testTranslogReplay() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs); - assertThat(topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), equalTo((long) numDocs)); } } @@ -3889,7 +3889,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } operation = appendOnlyPrimary(doc, false, 1, create); retry = appendOnlyPrimary(doc, true, 1, create); @@ -3924,7 +3924,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -3982,7 +3982,7 @@ public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } } @@ -4006,7 +4006,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } final boolean create = randomBoolean(); @@ -4046,7 +4046,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4091,12 +4091,12 @@ public void testDoubleDeliveryReplica() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { List ops = readAllOperationsInLucene(engine); @@ -4171,7 +4171,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } index = new Engine.Index( @@ -4193,7 +4193,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4263,7 +4263,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } Engine.Index secondIndexRequestReplica = new Engine.Index( @@ -4284,7 +4284,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -5677,7 +5677,7 @@ public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException, try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs search = searcher.search(new MatchAllDocsQuery(), searcher.getIndexReader().numDocs()); for (int i = 0; i < search.scoreDocs.length; i++) { - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[i].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[i].doc); assertEquals("updated", luceneDoc.get("value")); } int totalNumDocs = numDocs - numDeletes.get(); @@ -6665,7 +6665,7 @@ public void testStoreHonorsLuceneVersion() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { LeafReader leafReader = getOnlyLeafReader(searcher.getIndexReader()); - assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().getCreatedVersionMajor()); + assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().createdVersionMajor()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index a3a21fc32e546..b6be13b9f2513 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.Constants; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Tuple; @@ -71,22 +70,16 @@ public void testRamBytesUsed() throws Exception { } actualRamBytesUsed = RamUsageTester.ramUsed(map); estimatedRamBytesUsed = map.ramBytesUsed(); - long tolerance; - if (Constants.JRE_IS_MINIMUM_JAVA9) { - // With Java 9, RamUsageTester computes the memory usage of maps as - // the memory usage of an array that would contain exactly all keys - // and values. This is an under-estimation of the actual memory - // usage since it ignores the impact of the load factor and of the - // linked list/tree that is used to resolve collisions. So we use a - // bigger tolerance. - // less than 50% off - tolerance = actualRamBytesUsed / 2; - } else { - // Java 8 is more accurate by doing reflection into the actual JDK classes - // so we give it a lower error bound. - // less than 25% off - tolerance = actualRamBytesUsed / 4; - } + + // Since Java 9, RamUsageTester computes the memory usage of maps as + // the memory usage of an array that would contain exactly all keys + // and values. This is an under-estimation of the actual memory + // usage since it ignores the impact of the load factor and of the + // linked list/tree that is used to resolve collisions. So we use a + // bigger tolerance. + // less than 50% off + long tolerance = actualRamBytesUsed / 2; + assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, tolerance); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java index e7e668415cdd4..c0e365909429a 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.MatchAllDocsQuery; @@ -69,8 +70,9 @@ public void testPruneAll() throws IOException { writer.forceMerge(1); writer.commit(); try (DirectoryReader reader = DirectoryReader.open(writer)) { + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); if (pruneIdField) { assertEquals(1, document.getFields().size()); assertEquals("source", document.getFields().get(0).name()); @@ -151,8 +153,9 @@ public void testPruneSome() throws IOException { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("even")); @@ -192,8 +195,9 @@ public void testPruneNone() throws IOException { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("extra_source")); diff --git a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java index a6e56c4137028..49036324e722e 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java @@ -80,7 +80,7 @@ static Segment randomSegment() { segment.sizeInBytes = randomNonNegativeLong(); segment.docCount = randomIntBetween(1, Integer.MAX_VALUE); segment.delDocCount = randomIntBetween(0, segment.docCount); - segment.version = Version.LUCENE_8_0_0; + segment.version = Version.LUCENE_9_0_0; segment.compound = randomBoolean(); segment.mergeId = randomAlphaOfLengthBetween(1, 10); segment.segmentSort = randomIndexSort(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index b7793a644f8b8..9d0a9cdeb1968 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -113,7 +113,7 @@ public void testSingleValueAllSet() throws Exception { TopFieldDocs topDocs; SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(toString(((FieldDoc) topDocs.scoreDocs[0]).fields[0]), equalTo(one())); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); @@ -123,7 +123,7 @@ public void testSingleValueAllSet() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(1)); @@ -193,14 +193,14 @@ public void testMultiValueAllSet() throws Exception { IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(2)); sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); @@ -258,7 +258,7 @@ public void testSortMultiValuesFields() throws Exception { IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(7)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("!08")); @@ -279,7 +279,7 @@ public void testSortMultiValuesFields() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(6)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("10")); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index ac77f147a7ce6..48d6cabefe345 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -262,10 +262,10 @@ public void testActualMissingValue(boolean reverse) throws IOException { randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField) ); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); final BytesRef value = new BytesRef(docValue == null ? missingValue : docValue); if (reverse) { assertTrue(previousValue.compareTo(value) >= 0); @@ -321,10 +321,10 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException { new Sort(sortField) ); - assertThat(topDocs.totalHits.value, lessThanOrEqualTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), lessThanOrEqualTo((long) numDocs)); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); if (first && docValue == null) { assertNull(previousValue); } else if (first == false && docValue != null) { @@ -414,7 +414,7 @@ public void testNestedSorting(MultiValueMode sortMode) throws IOException { assertTrue("expected " + docID + " to be a parent", parents.get(docID)); BytesRef cmpValue = null; for (int child = parents.prevSetBit(docID - 1) + 1; child < docID; ++child) { - String[] sVals = searcher.doc(child).getValues("text"); + String[] sVals = searcher.storedFields().document(child).getValues("text"); final BytesRef[] vals; if (sVals.length == 0) { vals = new BytesRef[0]; @@ -498,15 +498,11 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(5L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(1)); assertTrue(values.advanceExact(2)); ord = values.nextOrd(); assertThat(ord, equalTo(4L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("03")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Second segment leaf = topLevelReader.leaves().get(1); @@ -522,8 +518,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(7L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertTrue(values.advanceExact(1)); ord = values.nextOrd(); assertThat(ord, equalTo(7L)); @@ -534,8 +528,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(9L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(2)); assertTrue(values.advanceExact(3)); ord = values.nextOrd(); @@ -547,8 +539,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(11L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("10")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Third segment leaf = topLevelReader.leaves().get(2); @@ -564,8 +554,6 @@ public void testGlobalOrdinals() throws Exception { ord = values.nextOrd(); assertThat(ord, equalTo(2L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!10")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); } public void testTermsEnum() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java index cb6732ce8bb7d..aa23dc6da19df 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java @@ -108,7 +108,6 @@ public int compare(OrdAndId o1, OrdAndId o2) { for (Long ord : docOrds) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, docs.nextOrd()); } for (int i = docId + 1; i < ordAndId.id; i++) { assertFalse(singleOrds.advanceExact(i)); @@ -257,7 +256,6 @@ private void assertEquals(SortedSetDocValues docs, long[][] ordinalPlan) throws for (long ord : ords) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertThat(docs.nextOrd(), equalTo(SortedSetDocValues.NO_MORE_ORDS)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index 86ddbbbc97599..81be71aec23c8 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -47,7 +47,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r1 -> assertThat(r1.getHits().getTotalHits().value, equalTo(3L)) + r1 -> assertThat(r1.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(0, 1); @@ -57,7 +57,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r2 -> assertThat(r2.getHits().getTotalHits().value, equalTo(3L)) + r2 -> assertThat(r2.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(1, 1); @@ -72,7 +72,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r3 -> assertThat(r3.getHits().getTotalHits().value, equalTo(5L)) + r3 -> assertThat(r3.getHits().getTotalHits().value(), equalTo(5L)) ); assertRequestCacheStats(1, 2); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index ddba993fd41cc..4aa983a78b07b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -121,8 +122,15 @@ public void testSort() throws IOException { BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [false]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [true]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [false]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [true]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index f9f4cba7848a5..134d21ba475b7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -16,7 +16,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -151,7 +151,7 @@ public void testPostingsFormat() throws IOException { Codec codec = codecService.codec("default"); if (CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { assertThat(codec, instanceOf(PerFieldMapperCodec.class)); - assertThat(((PerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(Completion99PostingsFormat.class)); + assertThat(((PerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(Completion912PostingsFormat.class)); } else { if (codec instanceof CodecService.DeduplicateFieldInfosCodec deduplicateFieldInfosCodec) { codec = deduplicateFieldInfosCodec.delegate(); @@ -159,7 +159,7 @@ public void testPostingsFormat() throws IOException { assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); assertThat( ((LegacyPerFieldMapperCodec) codec).getPostingsFormatForField("field"), - instanceOf(Completion99PostingsFormat.class) + instanceOf(Completion912PostingsFormat.class) ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java index 2627ae9a39839..e454a4ffa0c8d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; @@ -231,20 +232,26 @@ public void testTermIntervals() throws IOException { public void testPrefixIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.prefix(new BytesRef("foo")), prefixIntervals); + assertEquals(Intervals.prefix(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), prefixIntervals); } public void testWildcardIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); + } + + public void testRegexpIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource regexpIntervals = ft.regexpIntervals(new BytesRef("foo"), MOCK_CONTEXT); + assertEquals(Intervals.regexp(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), regexpIntervals); } public void testFuzzyIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); FuzzyQuery fq = new FuzzyQuery(new Term("field", "foo"), 1, 2, 128, true); - IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), "foo"); + IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), "foo"); assertEquals(expectedIntervals, fuzzyIntervals); } @@ -259,6 +266,15 @@ public void testWildcardIntervalsWithIndexedPrefixes() { ConstantScoreTextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); + } + + public void testRangeIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource rangeIntervals = ft.rangeIntervals(new BytesRef("foo"), new BytesRef("foo1"), true, true, MOCK_CONTEXT); + assertEquals( + Intervals.range(new BytesRef("foo"), new BytesRef("foo1"), true, true, IndexSearcher.getMaxClauseCount()), + rangeIntervals + ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 2c78f5f7fee20..f55d213bea581 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -38,25 +38,25 @@ public void testDoubleIndexingSameDoc() throws Exception { }, reader -> { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); }); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index 48879cdd0d77e..140137015d98a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -120,9 +121,19 @@ public void testSort() throws IOException { DoubleScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1.1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2.1]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4.2]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4.2]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 4284bc00cfc15..0182da8ade48a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -78,5 +78,4 @@ public void testUsingEnabledSettingThrows() { ex.getMessage() ); } - } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java index 72055940b8970..4cc447d97291c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java @@ -19,6 +19,8 @@ import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; +import java.util.List; + public class IdFieldTypeTests extends ESTestCase { public void testRangeQuery() { @@ -49,7 +51,7 @@ public void testTermsQuery() { Mockito.when(context.indexVersionCreated()).thenReturn(IndexVersion.current()); MappedFieldType ft = new ProvidedIdFieldMapper.IdFieldType(() -> false); Query query = ft.termQuery("id", context); - assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query); + assertEquals(new TermInSetQuery("_id", List.of(Uid.encodeId("id"))), query); } public void testIsAggregatable() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java index 4170adf0a8508..8f209fb78fc64 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java @@ -230,13 +230,8 @@ public void testAutomatonFromIPv6Group() throws UnknownHostException { } private static CompiledAutomaton compileAutomaton(Automaton automaton) { - CompiledAutomaton compiledAutomaton = new CompiledAutomaton( - automaton, - null, - false, - Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, - true - ); + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + CompiledAutomaton compiledAutomaton = new CompiledAutomaton(automaton, false, false, true); return compiledAutomaton; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index acbfe8b8f9b38..281d2993fa29c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -125,16 +126,17 @@ public void testSort() throws IOException { BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); + StoredFields storedFields = reader.storedFields(); assertThat( - reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.1\"]}") ); assertThat( - reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.2\"]}") ); assertThat( - reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.4\"]}") ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index b4c7ea0ed9508..e3bdb3d45818f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -110,7 +110,7 @@ protected TokenStream normalize(String fieldName, TokenStream in) { public void testTermsQuery() { MappedFieldType ft = new KeywordFieldType("field"); - BytesRef[] terms = new BytesRef[] { new BytesRef("foo"), new BytesRef("bar") }; + List terms = List.of(new BytesRef("foo"), new BytesRef("bar")); assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), MOCK_CONTEXT)); MappedFieldType ft2 = new KeywordFieldType("field", false, true, Map.of()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index f2e788918010c..57d52991a6442 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -115,9 +116,19 @@ public void testSort() throws IOException { BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"a\"]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"b\"]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"d\"]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"a\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"b\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"d\"]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index 40357399cab5b..a8cb4d51c5efa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; @@ -132,9 +133,19 @@ public void testSort() throws IOException { LongScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 5360215b5b05b..836b791af23c1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -80,7 +80,7 @@ public void testBytesAndNumericRepresentation() throws Exception { "field10" ); CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); - searcher.doc(0, fieldsVisitor); + searcher.storedFields().document(0, fieldsVisitor); fieldsVisitor.postProcess(mapperService::fieldType); assertThat(fieldsVisitor.fields().size(), equalTo(10)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index c8fcf486068c4..86914cfe9ced7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -879,7 +879,7 @@ protected TokenStreamComponents createComponents(String fieldName) { IndexSearcher searcher = newSearcher(ir); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "Prio 1"); TopDocs td = searcher.search(queryBuilder.toQuery(searchExecutionContext), 1); - assertEquals(1, td.totalHits.value); + assertEquals(1, td.totalHits.value()); }); Exception e = expectThrows( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java index d73e8546a726a..4d246d3c557a6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; @@ -243,20 +244,26 @@ public void testTermIntervals() throws IOException { public void testPrefixIntervals() throws IOException { MappedFieldType ft = createFieldType(); IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.prefix(new BytesRef("foo")), prefixIntervals); + assertEquals(Intervals.prefix(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), prefixIntervals); } - public void testWildcardIntervals() throws IOException { + public void testWildcardIntervals() { MappedFieldType ft = createFieldType(); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); } - public void testFuzzyIntervals() throws IOException { + public void testRegexpIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource regexpIntervals = ft.regexpIntervals(new BytesRef("foo"), MOCK_CONTEXT); + assertEquals(Intervals.regexp(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), regexpIntervals); + } + + public void testFuzzyIntervals() { MappedFieldType ft = createFieldType(); IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); FuzzyQuery fq = new FuzzyQuery(new Term("field", "foo"), 1, 2, 128, true); - IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), "foo"); + IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), "foo"); assertEquals(expectedIntervals, fuzzyIntervals); } @@ -271,6 +278,15 @@ public void testWildcardIntervalsWithIndexedPrefixes() { TextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); - assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); + assertEquals(Intervals.wildcard(new BytesRef("foo"), IndexSearcher.getMaxClauseCount()), wildcardIntervals); + } + + public void testRangeIntervals() { + MappedFieldType ft = createFieldType(); + IntervalsSource rangeIntervals = ft.rangeIntervals(new BytesRef("foo"), new BytesRef("foo1"), true, true, MOCK_CONTEXT); + assertEquals( + Intervals.range(new BytesRef("foo"), new BytesRef("foo1"), true, true, IndexSearcher.getMaxClauseCount()), + rangeIntervals + ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java index 5a34886d73db7..c8d7ad8127b55 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java @@ -81,7 +81,7 @@ public void testTermQuery() { public void testTermsQuery() { KeyedFlattenedFieldType ft = createFieldType(); - Query expected = new TermInSetQuery(ft.name(), new BytesRef("key\0value1"), new BytesRef("key\0value2")); + Query expected = new TermInSetQuery(ft.name(), List.of(new BytesRef("key\0value1"), new BytesRef("key\0value2"))); List terms = new ArrayList<>(); terms.add("value1"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java index f494af259c504..b52192d6e47b4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java @@ -23,8 +23,6 @@ import java.io.IOException; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - public class KeyedFlattenedLeafFieldDataTests extends ESTestCase { private LeafOrdinalsFieldData delegate; @@ -121,7 +119,8 @@ public void testNextOrd() throws IOException { docValues.advanceExact(0); int retrievedOrds = 0; - for (long ord = docValues.nextOrd(); ord != NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); assertTrue(0 <= ord && ord < 10); retrievedOrds++; @@ -190,9 +189,7 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (index == documentOrds.length) { - return NO_MORE_ORDS; - } + assertTrue(index < documentOrds.length); return documentOrds[index++]; } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java index b2ffb779be00b..de4ab0bc5df30 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper.vectors; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.test.ESTestCase; @@ -25,7 +26,7 @@ public void testEmptyVectors() throws IOException { wrap(new float[0][0]), wrapMagnitudes(new float[0]) ); - assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.nextDoc()); + assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.iterator().nextDoc()); } public void testRandomVectors() throws IOException { @@ -47,9 +48,10 @@ public void testRandomVectors() throws IOException { wrapMagnitudes(magnitudes) ); + KnnVectorValues.DocIndexIterator iterator = normalizedCosineFloatVectorValues.iterator(); for (int i = 0; i < numVectors; i++) { - assertEquals(i, normalizedCosineFloatVectorValues.advance(i)); - assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(), (float) 1e-6); + assertEquals(i, iterator.advance(i)); + assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(iterator.index()), (float) 1e-6); assertEquals(magnitudes[i], normalizedCosineFloatVectorValues.magnitude(), (float) 1e-6); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java index c007156c806eb..baade683a90fd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java @@ -208,7 +208,41 @@ public int size() { } @Override - public byte[] vectorValue() { + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; + } + + @Override + public byte[] vectorValue(int ord) { + assert ord == index; for (int i = 0; i < byteVector.length; i++) { byteVector[i] = (byte) vectors[index][i]; } @@ -216,25 +250,12 @@ public byte[] vectorValue() { } @Override - public int docID() { - return index; - } - - @Override - public int nextDoc() { + public ByteVectorValues copy() { throw new UnsupportedOperationException(); } @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; - } - - @Override - public VectorScorer scorer(byte[] floats) throws IOException { + public VectorScorer scorer(byte[] floats) { throw new UnsupportedOperationException(); } }; @@ -256,30 +277,51 @@ public int size() { } @Override - public float[] vectorValue() { - return vectors[index]; - } - - @Override - public int docID() { - return index; + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() throws IOException { + return advance(index + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; } @Override - public int nextDoc() { - return advance(index + 1); + public float[] vectorValue(int ord) { + assert ord == index; + return vectors[index]; } @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; + public FloatVectorValues copy() { + throw new UnsupportedOperationException(); } @Override - public VectorScorer scorer(float[] floats) throws IOException { + public VectorScorer scorer(float[] floats) { throw new UnsupportedOperationException(); } }; diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 25d4c1008ba7d..458e817a43677 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -207,15 +207,15 @@ public void testMinShouldMatchFilterWithoutShouldClauses() throws Exception { assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(booleanQuery.clauses().size(), equalTo(1)); BooleanClause booleanClause = booleanQuery.clauses().get(0); - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.FILTER)); - assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class)); - BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.getQuery(); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.FILTER)); + assertThat(booleanClause.query(), instanceOf(BooleanQuery.class)); + BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.query(); // we didn't set minimum should match initially, there are no should clauses so it should be 0 assertThat(innerBooleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(innerBooleanQuery.clauses().size(), equalTo(1)); BooleanClause innerBooleanClause = innerBooleanQuery.clauses().get(0); - assertThat(innerBooleanClause.getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(innerBooleanClause.getQuery(), instanceOf(MatchAllDocsQuery.class)); + assertThat(innerBooleanClause.occur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(innerBooleanClause.query(), instanceOf(MatchAllDocsQuery.class)); } public void testMinShouldMatchBiggerThanNumberOfShouldClauses() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java index daaa0c4653d7a..f09e524faf8ff 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CombinedFieldsQueryParsingTests.java @@ -143,8 +143,8 @@ public void testWildcardFieldPattern() throws Exception { BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.clauses().size(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(CombinedFieldQuery.class)); - assertThat(booleanQuery.clauses().get(1).getQuery(), instanceOf(CombinedFieldQuery.class)); + assertThat(booleanQuery.clauses().get(0).query(), instanceOf(CombinedFieldQuery.class)); + assertThat(booleanQuery.clauses().get(1).query(), instanceOf(CombinedFieldQuery.class)); }); } @@ -164,8 +164,8 @@ public void testOperator() throws Exception { assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(minimumShouldMatch)); assertThat(booleanQuery.clauses().size(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(occur)); - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(occur)); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(occur)); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(occur)); } public void testQueryBoost() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java index 49c646d243a55..ef5088eef84a7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.document.LatLonPoint; -import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.LongField; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -81,7 +81,7 @@ protected void doAssertLuceneQuery(DistanceFeatureQueryBuilder queryBuilder, Que } else { // NANOSECONDS pivotLong = pivotVal.getNanos(); } - expectedQuery = LongPoint.newDistanceFeatureQuery(fieldName, 1.0f, originLong, pivotLong); + expectedQuery = LongField.newDistanceFeatureQuery(fieldName, 1.0f, originLong, pivotLong); } assertEquals(expectedQuery, query); } diff --git a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index ba3350bca8e2c..afa8fc1529604 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -68,7 +68,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, Collection childFields = context.getMatchingFieldNames(field + ".*"); assertThat(booleanQuery.clauses().size(), equalTo(childFields.size())); for (BooleanClause booleanClause : booleanQuery) { - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.SHOULD)); } } else if (context.getFieldType(field).hasDocValues() || context.getFieldType(field).getTextSearchInfo().hasNorms()) { assertThat(constantScoreQuery.getQuery(), instanceOf(FieldExistsQuery.class)); @@ -87,7 +87,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, assertThat(booleanQuery.clauses().size(), equalTo(fields.size())); for (int i = 0; i < fields.size(); i++) { BooleanClause booleanClause = booleanQuery.clauses().get(i); - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.SHOULD)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 6d78f5fffd4b1..aad8275f4749d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -9,14 +9,22 @@ package org.elasticsearch.index.query; +import org.apache.lucene.analysis.core.KeywordAnalyzer; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.queries.intervals.IntervalQuery; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -34,7 +42,9 @@ import java.util.Collections; import java.util.List; +import static java.util.Collections.singleton; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -606,7 +616,7 @@ public void testPrefixes() throws IOException { } }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.prefix(new BytesRef("term"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.prefix(new BytesRef("term"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String no_positions_json = Strings.format(""" @@ -667,7 +677,13 @@ public void testPrefixes() throws IOException { builder = (IntervalQueryBuilder) parseQuery(short_prefix_json); expected = new IntervalQuery( PREFIXED_FIELD, - Intervals.or(Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.wildcard(new BytesRef("t?"))), Intervals.term("t")) + Intervals.or( + Intervals.fixField( + PREFIXED_FIELD + "._index_prefix", + Intervals.wildcard(new BytesRef("t?"), IndexSearcher.getMaxClauseCount()) + ), + Intervals.term("t") + ) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); @@ -726,8 +742,109 @@ public void testPrefixes() throws IOException { assertEquals(expected, builder.toQuery(createSearchExecutionContext())); } - public void testWildcard() throws IOException { + public void testRegexp() throws IOException { + String json = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m" + } + } + } + }""", TEXT_FIELD_NAME); + + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.regexp(new BytesRef("te.*m"), IndexSearcher.getMaxClauseCount())); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + String no_positions_json = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m" + } + } + } + } + """, NO_POSITIONS_FIELD); + expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(no_positions_json); + builder1.toQuery(createSearchExecutionContext()); + }); + + String fixed_field_json = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m", + "use_field": "masked_field" + } + } + } + }""", TEXT_FIELD_NAME); + + builder = (IntervalQueryBuilder) parseQuery(fixed_field_json); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.fixField(MASKED_FIELD, Intervals.regexp(new BytesRef("te.*m"), IndexSearcher.getMaxClauseCount())) + ); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + String fixed_field_json_no_positions = Strings.format(""" + { + "intervals": { + "%s": { + "regexp": { + "pattern": "Te.*m", + "use_field": "%s" + } + } + } + }""", TEXT_FIELD_NAME, NO_POSITIONS_FIELD); + expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(fixed_field_json_no_positions); + builder1.toQuery(createSearchExecutionContext()); + }); + } + + public void testMaxExpansionExceptionFailure() throws Exception { + IntervalsSourceProvider provider1 = new IntervalsSourceProvider.Prefix("bar", "keyword", null); + IntervalsSourceProvider provider2 = new IntervalsSourceProvider.Wildcard("bar*", "keyword", null); + IntervalsSourceProvider provider3 = new IntervalsSourceProvider.Fuzzy("bar", 0, true, Fuzziness.fromEdits(1), "keyword", null); + IntervalsSourceProvider provider4 = new IntervalsSourceProvider.Regexp("bar.*", "keyword", null); + IntervalsSourceProvider provider5 = new IntervalsSourceProvider.Range("bar", "bar2", true, true, "keyword", null); + IntervalsSourceProvider provider = randomFrom(provider1, provider2, provider3, provider4, provider5); + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory, new KeywordAnalyzer())) { + for (int i = 0; i < 3; i++) { + iw.addDocument(singleton(new TextField(TEXT_FIELD_NAME, "bar" + i, Field.Store.NO))); + } + try (IndexReader reader = iw.getReader()) { + int origBoolMaxClauseCount = IndexSearcher.getMaxClauseCount(); + IndexSearcher.setMaxClauseCount(1); + try { + + IntervalQueryBuilder queryBuilder = new IntervalQueryBuilder(TEXT_FIELD_NAME, provider); + IndexSearcher searcher = newSearcher(reader); + Query query = queryBuilder.toQuery(createSearchExecutionContext(searcher)); + RuntimeException exc = expectThrows( + RuntimeException.class, + () -> query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f).scorer(searcher.getLeafContexts().get(0)) + ); + assertThat(exc.getMessage(), containsString("expanded to too many terms (limit 1)")); + } finally { + IndexSearcher.setMaxClauseCount(origBoolMaxClauseCount); + } + } + } + } + } + + public void testWildcard() throws IOException { String json = Strings.format(""" { "intervals": { @@ -740,7 +857,7 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); - Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("te?m"))); + Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("te?m"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String no_positions_json = Strings.format(""" @@ -772,7 +889,7 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(keyword_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("Te?m"))); + expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("Te?m"), IndexSearcher.getMaxClauseCount())); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String fixed_field_json = Strings.format(""" @@ -788,7 +905,10 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(fixed_field_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("te?m")))); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("te?m"), IndexSearcher.getMaxClauseCount())) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); String fixed_field_json_no_positions = Strings.format(""" @@ -821,13 +941,22 @@ public void testWildcard() throws IOException { }""", TEXT_FIELD_NAME); builder = (IntervalQueryBuilder) parseQuery(fixed_field_analyzer_json); - expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("Te?m")))); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("Te?m"), IndexSearcher.getMaxClauseCount())) + ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); } private static IntervalsSource buildFuzzySource(String term, String label, int prefixLength, boolean transpositions, int editDistance) { - FuzzyQuery fq = new FuzzyQuery(new Term("field", term), editDistance, prefixLength, 128, transpositions); - return Intervals.multiterm(fq.getAutomata(), label); + FuzzyQuery fq = new FuzzyQuery( + new Term("field", term), + editDistance, + prefixLength, + IndexSearcher.getMaxClauseCount(), + transpositions + ); + return Intervals.multiterm(fq.getAutomata(), IndexSearcher.getMaxClauseCount(), label); } public void testFuzzy() throws IOException { @@ -932,7 +1061,77 @@ public void testFuzzy() throws IOException { Intervals.fixField(MASKED_FIELD, buildFuzzySource("term", "term", 2, true, Fuzziness.ONE.asDistance("term"))) ); assertEquals(expected, builder.toQuery(createSearchExecutionContext())); - } + public void testRange() throws IOException { + String json = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "gte": "aaa", + "lte": "aab" + } + } + } + }""", TEXT_FIELD_NAME); + IntervalQueryBuilder builder = (IntervalQueryBuilder) parseQuery(json); + Query expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), true, true, IndexSearcher.getMaxClauseCount()) + ); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + json = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "gt": "aaa", + "lt": "aab" + } + } + } + }""", TEXT_FIELD_NAME); + builder = (IntervalQueryBuilder) parseQuery(json); + expected = new IntervalQuery( + TEXT_FIELD_NAME, + Intervals.range(new BytesRef("aaa"), new BytesRef("aab"), false, false, IndexSearcher.getMaxClauseCount()) + ); + assertEquals(expected, builder.toQuery(createSearchExecutionContext())); + + String incomplete_range = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "gt": "aaa" + } + } + } + } + """, TEXT_FIELD_NAME); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(incomplete_range); + builder1.toQuery(createSearchExecutionContext()); + }); + assertEquals("Either [lte] or [lt], one of them must be provided", exc.getCause().getMessage()); + + String incomplete_range2 = Strings.format(""" + { + "intervals": { + "%s": { + "range": { + "lt": "aaa" + } + } + } + } + """, TEXT_FIELD_NAME); + exc = expectThrows(IllegalArgumentException.class, () -> { + IntervalQueryBuilder builder1 = (IntervalQueryBuilder) parseQuery(incomplete_range2); + builder1.toQuery(createSearchExecutionContext()); + }); + assertEquals("Either [gte] or [gt], one of them must be provided", exc.getCause().getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java index e471858ce9c5a..79f2dcb61e508 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -37,7 +37,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalToIgnoringCase; -import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.hasSize; public class MatchBoolPrefixQueryBuilderTests extends AbstractQueryTestCase { @@ -101,7 +100,7 @@ protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Que // all queries except the last should be TermQuery or SynonymQuery final Set allQueriesExceptLast = IntStream.range(0, booleanQuery.clauses().size() - 1) .mapToObj(booleanQuery.clauses()::get) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .collect(Collectors.toSet()); assertThat( allQueriesExceptLast, @@ -122,13 +121,13 @@ protected void doAssertLuceneQuery(MatchBoolPrefixQueryBuilder queryBuilder, Que }); // the last query should be PrefixQuery - final Query shouldBePrefixQuery = booleanQuery.clauses().get(booleanQuery.clauses().size() - 1).getQuery(); + final Query shouldBePrefixQuery = booleanQuery.clauses().get(booleanQuery.clauses().size() - 1).query(); assertThat(shouldBePrefixQuery, instanceOf(PrefixQuery.class)); if (queryBuilder.minimumShouldMatch() != null) { final int optionalClauses = (int) booleanQuery.clauses() .stream() - .filter(clause -> clause.getOccur() == BooleanClause.Occur.SHOULD) + .filter(clause -> clause.occur() == BooleanClause.Occur.SHOULD) .count(); final int expected = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch()); assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(expected)); @@ -266,10 +265,12 @@ private static void assertBooleanQuery(Query actual, List expectedClauseQ assertThat(actual, instanceOf(BooleanQuery.class)); final BooleanQuery actualBooleanQuery = (BooleanQuery) actual; assertThat(actualBooleanQuery.clauses(), hasSize(expectedClauseQueries.size())); - assertThat(actualBooleanQuery.clauses(), everyItem(hasProperty("occur", equalTo(BooleanClause.Occur.SHOULD)))); for (int i = 0; i < actualBooleanQuery.clauses().size(); i++) { - final Query clauseQuery = actualBooleanQuery.clauses().get(i).getQuery(); + BooleanClause clause = actualBooleanQuery.clauses().get(i); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + final Query clauseQuery = clause.query(); + assertThat(clauseQuery, equalTo(expectedClauseQueries.get(i))); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index e71485647913c..ba46bf76efbfe 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; @@ -163,7 +164,7 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, // calculate expected minimumShouldMatch value int optionalClauses = 0; for (BooleanClause c : bq.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { optionalClauses++; } } @@ -527,9 +528,9 @@ public void testAliasWithSynonyms() throws Exception { public void testMaxBooleanClause() { MatchQueryParser query = new MatchQueryParser(createSearchExecutionContext()); query.setAnalyzer(new MockGraphAnalyzer(createGiantGraph(40))); - expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); query.setAnalyzer(new MockGraphAnalyzer(createGiantGraphMultiTerms())); - expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); } private static class MockGraphAnalyzer extends Analyzer { @@ -567,7 +568,7 @@ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraph(int numPos } /** - * Creates a graph token stream with {@link BooleanQuery#getMaxClauseCount()} + * Creates a graph token stream with {@link IndexSearcher#getMaxClauseCount()} * expansions at the last position. **/ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms() { @@ -578,7 +579,7 @@ private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms( tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 2)); tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); - for (int i = 0; i < BooleanQuery.getMaxClauseCount(); i++) { + for (int i = 0; i < IndexSearcher.getMaxClauseCount(); i++) { tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 1)); } return tokens.toArray(new CannedBinaryTokenStream.BinaryToken[0]); diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 7209ee77cb70d..7c21751b4b332 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -246,7 +246,7 @@ private static Fields generateFields(String[] fieldNames, String text) throws IO for (String fieldName : fieldNames) { index.addField(fieldName, text, new WhitespaceAnalyzer()); } - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } @Override @@ -255,7 +255,7 @@ protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; for (BooleanClause booleanClause : booleanQuery) { - if (booleanClause.getQuery() instanceof MoreLikeThisQuery moreLikeThisQuery) { + if (booleanClause.query() instanceof MoreLikeThisQuery moreLikeThisQuery) { assertThat(moreLikeThisQuery.getLikeFields().length, greaterThan(0)); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index f4405229e857e..3dcf00e4f22f5 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -749,7 +749,7 @@ public void testToQueryRegExpQueryTooComplex() throws Exception { TooComplexToDeterminizeException.class, () -> queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10000 effort.")); } @@ -775,7 +775,7 @@ public void testToQueryRegExpQueryMaxDeterminizedStatesParsing() throws Exceptio TooComplexToDeterminizeException.class, () -> queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10 effort.")); } @@ -925,10 +925,10 @@ public void testToQueryBooleanQueryMultipleBoosts() throws Exception { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(2)); - assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - assertThat(booleanQuery.clauses().get(0).getQuery(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "foo")))); - assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - assertThat(booleanQuery.clauses().get(1).getQuery(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "bar")))); + assertThat(booleanQuery.clauses().get(0).occur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanQuery.clauses().get(0).query(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "foo")))); + assertThat(booleanQuery.clauses().get(1).occur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanQuery.clauses().get(1).query(), equalTo(new TermQuery(new Term(TEXT_FIELD_NAME, "bar")))); } public void testToQueryPhraseQueryBoostAndSlop() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java new file mode 100644 index 0000000000000..e170faf8043be --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/RangeIntervalsSourceProviderTests.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.index.query.IntervalsSourceProvider.Range; + +public class RangeIntervalsSourceProviderTests extends AbstractXContentSerializingTestCase { + + @Override + protected Range createTestInstance() { + return createRandomRange(); + } + + static Range createRandomRange() { + return new Range( + "a" + randomAlphaOfLengthBetween(1, 10), + "z" + randomAlphaOfLengthBetween(1, 10), + randomBoolean(), + randomBoolean(), + randomBoolean() ? randomAlphaOfLength(10) : null, + randomBoolean() ? randomAlphaOfLength(10) : null + ); + } + + @Override + protected Range mutateInstance(Range instance) { + String lowerTerm = instance.getLowerTerm(); + String upperTerm = instance.getUpperTerm(); + boolean includeLower = instance.getIncludeLower(); + boolean includeUpper = instance.getIncludeUpper(); + String analyzer = instance.getAnalyzer(); + String useField = instance.getUseField(); + switch (between(0, 5)) { + case 0 -> lowerTerm = "a" + lowerTerm; + case 1 -> upperTerm = "z" + upperTerm; + case 2 -> includeLower = includeLower == false; + case 3 -> includeUpper = includeUpper == false; + case 4 -> analyzer = randomAlphaOfLength(5); + case 5 -> useField = useField == null ? randomAlphaOfLength(5) : null; + } + return new Range(lowerTerm, upperTerm, includeLower, includeUpper, analyzer, useField); + } + + @Override + protected Writeable.Reader instanceReader() { + return Range::new; + } + + @Override + protected Range doParseInstance(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + parser.nextToken(); + } + Range range = (Range) IntervalsSourceProvider.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return range; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java new file mode 100644 index 0000000000000..ace7350d8d796 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/RegexpIntervalsSourceProviderTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.index.query.IntervalsSourceProvider.Regexp; + +public class RegexpIntervalsSourceProviderTests extends AbstractXContentSerializingTestCase { + + @Override + protected Regexp createTestInstance() { + return createRandomRegexp(); + } + + static Regexp createRandomRegexp() { + return new Regexp( + randomAlphaOfLengthBetween(1, 10), + randomBoolean() ? randomAlphaOfLength(10) : null, + randomBoolean() ? randomAlphaOfLength(10) : null + ); + } + + @Override + protected Regexp mutateInstance(Regexp instance) { + String regexp = instance.getPattern(); + String analyzer = instance.getAnalyzer(); + String useField = instance.getUseField(); + switch (between(0, 2)) { + case 0 -> regexp += "a"; + case 1 -> analyzer = randomAlphaOfLength(5); + case 2 -> useField = useField == null ? randomAlphaOfLength(5) : null; + } + return new Regexp(regexp, analyzer, useField); + } + + @Override + protected Writeable.Reader instanceReader() { + return Regexp::new; + } + + @Override + protected Regexp doParseInstance(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + parser.nextToken(); + } + Regexp regexp = (Regexp) IntervalsSourceProvider.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return regexp; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 79c4c9ec5bb20..a84cd60a99e45 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -315,7 +315,7 @@ protected void doAssertLuceneQuery(SimpleQueryStringBuilder queryBuilder, Query private static int shouldClauses(BooleanQuery query) { int result = 0; for (BooleanClause c : query.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { result++; } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index cb314472e35b3..283bbbc9b100d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -18,7 +18,6 @@ import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; @@ -220,7 +219,7 @@ public void testToQueryInnerTermQuery() throws IOException { assertThat(prefixQuery.getPrefix().text(), equalTo("f")); assertThat(wrapper.getRewriteMethod(), instanceOf(SpanBooleanQueryRewriteWithMaxClause.class)); SpanBooleanQueryRewriteWithMaxClause rewrite = (SpanBooleanQueryRewriteWithMaxClause) wrapper.getRewriteMethod(); - assertThat(rewrite.getMaxExpansions(), equalTo(BooleanQuery.getMaxClauseCount())); + assertThat(rewrite.getMaxExpansions(), equalTo(IndexSearcher.getMaxClauseCount())); assertTrue(rewrite.isHardLimit()); } } @@ -265,8 +264,8 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { iw.addDocument(singleton(new TextField("body", "foo bar" + Integer.toString(i), Field.Store.NO))); } try (IndexReader reader = iw.getReader()) { - int origBoolMaxClauseCount = BooleanQuery.getMaxClauseCount(); - BooleanQuery.setMaxClauseCount(1); + int origBoolMaxClauseCount = IndexSearcher.getMaxClauseCount(); + IndexSearcher.setMaxClauseCount(1); try { QueryBuilder queryBuilder = new SpanMultiTermQueryBuilder(QueryBuilders.prefixQuery("body", "bar")); IndexSearcher searcher = newSearcher(reader); @@ -274,7 +273,7 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { RuntimeException exc = expectThrows(RuntimeException.class, () -> query.rewrite(searcher)); assertThat(exc.getMessage(), containsString("maxClauseCount")); } finally { - BooleanQuery.setMaxClauseCount(origBoolMaxClauseCount); + IndexSearcher.setMaxClauseCount(origBoolMaxClauseCount); } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index 3edf150688384..589019093075d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -238,7 +238,7 @@ public void testDoToQuery() throws Exception { 10, new Sort(SortField.FIELD_DOC) ); - assertThat(topDocsWithMinimumShouldMatchField.totalHits.value, equalTo(3L)); + assertThat(topDocsWithMinimumShouldMatchField.totalHits.value(), equalTo(3L)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[0].doc, equalTo(1)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[1].doc, equalTo(3)); assertThat(topDocsWithMinimumShouldMatchField.scoreDocs[2].doc, equalTo(4)); @@ -249,7 +249,7 @@ public void testDoToQuery() throws Exception { ).doToQuery(context); searcher = newSearcher(ir); TopDocs topDocsWithMinimumShouldMatch = searcher.search(queryWithMinimumShouldMatch, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocsWithMinimumShouldMatch.totalHits.value, equalTo(5L)); + assertThat(topDocsWithMinimumShouldMatch.totalHits.value(), equalTo(5L)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[0].doc, equalTo(1)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[1].doc, equalTo(2)); assertThat(topDocsWithMinimumShouldMatch.scoreDocs[2].doc, equalTo(3)); @@ -266,7 +266,7 @@ public void testDoToQuery() throws Exception { 10, new Sort(SortField.FIELD_DOC) ); - assertThat(topDocsWithMinimumShouldMatchNegative.totalHits.value, equalTo(1L)); + assertThat(topDocsWithMinimumShouldMatchNegative.totalHits.value(), equalTo(1L)); assertThat(topDocsWithMinimumShouldMatchNegative.scoreDocs[0].doc, equalTo(5)); } } @@ -310,7 +310,7 @@ public void testDoToQuery_msmScriptField() throws Exception { .doToQuery(context); IndexSearcher searcher = newSearcher(ir); TopDocs topDocs = searcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); assertThat(topDocs.scoreDocs[2].doc, equalTo(4)); diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 47c75ee38ee1b..49b1362436ec7 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -305,7 +305,7 @@ public void testConflictingOpsOnReplica() throws Exception { for (IndexShard shard : shards) { try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("f", "2")), 10); - assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits.value); + assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits.value()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java index 5308f5f5d1f04..e8652e3a0f6d6 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java @@ -225,7 +225,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(7)); @@ -240,7 +240,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(13)); @@ -258,7 +258,7 @@ public void testNestedSorting() throws Exception { query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(12)); @@ -273,7 +273,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(15)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(3)); @@ -289,7 +289,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, 127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(127)); @@ -305,7 +305,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, -127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-127)); @@ -340,7 +340,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java index cecf20360178c..ca176a5402c06 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java @@ -73,7 +73,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java index 49d9c0eedd121..60e7473a2101a 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java @@ -76,7 +76,7 @@ protected void assertAvgScoreMode( ); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index e088e8569bf8a..cd6f596cfda05 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -17,6 +17,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -310,7 +311,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("a")); @@ -332,7 +333,7 @@ public void testNestedSorting() throws Exception { ); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(7L)); + assertThat(topDocs.totalHits.value(), equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("o")); @@ -358,7 +359,7 @@ public void testNestedSorting() throws Exception { query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits.value, equalTo(6L)); + assertThat(topDocs.totalHits.value(), equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("m")); @@ -620,42 +621,43 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.setNestedSort(new NestedSortBuilder("chapters").setNestedSort(new NestedSortBuilder("chapters.paragraphs"))); QueryBuilder queryBuilder = new MatchAllQueryBuilder(); TopFieldDocs topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(5L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(5L)); + StoredFields storedFields = searcher.storedFields(); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(234L)); - assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); + assertThat(storedFields.document(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(976L)); - assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(storedFields.document(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MAX_VALUE)); // Specific genre { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(234L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } @@ -664,16 +666,16 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.order(SortOrder.DESC); queryBuilder = new MatchAllQueryBuilder(); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(5L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(5L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(849L)); - assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(180L)); - assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(storedFields.document(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MIN_VALUE)); } @@ -681,26 +683,26 @@ public void testMultiLevelNestedSorting() throws IOException { { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(849L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(180L)); } @@ -717,10 +719,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); sortBuilder.order(SortOrder.DESC); @@ -730,10 +732,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(76L)); } @@ -755,10 +757,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MAX_VALUE)); sortBuilder.order(SortOrder.DESC); @@ -768,10 +770,10 @@ public void testMultiLevelNestedSorting() throws IOException { searchExecutionContext, searcher ); - assertThat(topFields.totalHits.value, equalTo(2L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(2L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); - assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(storedFields.document(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MIN_VALUE)); } @@ -785,26 +787,26 @@ public void testMultiLevelNestedSorting() throws IOException { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, searchExecutionContext, searcher); - assertThat(topFields.totalHits.value, equalTo(1L)); - assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(topFields.totalHits.value(), equalTo(1L)); + assertThat(storedFields.document(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java index 8355f0156d0c3..7c7313d6b6516 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexReaderWrapperTests.java @@ -52,7 +52,7 @@ public void testReaderCloseListenerIsCalled() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); final AtomicInteger closeCalls = new AtomicInteger(0); CheckedFunction wrapper = reader -> new FieldMaskingReader( "field", @@ -82,7 +82,7 @@ public void testReaderCloseListenerIsCalled() throws IOException { } outerCount.incrementAndGet(); }); - assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); wrap.close(); assertFalse("wrapped reader is closed", wrap.getIndexReader().tryIncRef()); assertEquals(sourceRefCount, open.getRefCount()); @@ -106,7 +106,7 @@ public void testIsCacheable() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); final AtomicInteger closeCalls = new AtomicInteger(0); CheckedFunction wrapper = reader -> new FieldMaskingReader( @@ -153,7 +153,7 @@ public void testAlwaysWrapWithFieldUsageTrackingDirectoryReader() throws IOExcep writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); CheckedFunction wrapper = directoryReader -> directoryReader; try ( diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index f15506676dc39..d480f7bfc8d7f 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2793,9 +2793,9 @@ public void testReaderWrapperIsUsed() throws IOException { } try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } CheckedFunction wrapper = reader -> new FieldMaskingReader("foo", reader); closeShards(shard); @@ -2815,9 +2815,9 @@ public void testReaderWrapperIsUsed() throws IOException { try (Engine.Searcher searcher = newShard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 0); + assertEquals(search.totalHits.value(), 0); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } try (Engine.GetResult getResult = newShard.get(new Engine.Get(false, false, "1"))) { assertTrue(getResult.exists()); diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 6fa405c091da1..ccf0bbebcc354 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -437,9 +437,8 @@ public void testLotsOfThreads() throws Exception { ) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); - org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.document( - getResult.docIdAndVersion().docId - ); + org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.storedFields() + .document(getResult.docIdAndVersion().docId); assertThat(document.getValues("test"), arrayContaining(testFieldValue)); } } catch (Exception t) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java index 70e5143552235..aa89f31757ef4 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; @@ -172,6 +173,7 @@ void assertSplit(Directory dir, IndexMetadata metadata, int targetShardId, boole int doc; int numActual = 0; int lastDoc = 0; + StoredFields storedFields = reader.storedFields(); while ((doc = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { lastDoc = doc; while (shard_id.nextDoc() < doc) { @@ -181,7 +183,7 @@ void assertSplit(Directory dir, IndexMetadata metadata, int targetShardId, boole } assertEquals(shard_id.docID(), doc); long shardID = shard_id.nextValue(); - BytesRef id = reader.document(doc).getBinaryValue("_id"); + BytesRef id = storedFields.document(doc).getBinaryValue("_id"); String actualId = Uid.decodeId(id.bytes, id.offset, id.length); assertNotEquals(ctx.reader() + " docID: " + doc + " actualID: " + actualId, shardID, targetShardId); } diff --git a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java index b05f287e628a3..fa5f713dfd672 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java @@ -144,7 +144,7 @@ public double execute( 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); r.close(); @@ -236,7 +236,7 @@ public double execute( searcher.setSimilarity(sim); Query query = new BoostQuery(new TermQuery(new Term("f", "foo")), 3.2f); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(initCalled.get()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); diff --git a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java index 394ce35c6b493..38e6ca0be0647 100644 --- a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java @@ -33,10 +33,16 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import java.util.HashMap; import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactoryTests extends ESTestCase { + final PreLoadExposingFsDirectoryFactory fsDirectoryFactory = new PreLoadExposingFsDirectoryFactory(); + public void testPreload() throws IOException { doTestPreload(); doTestPreload("nvd", "dvd", "tim"); @@ -60,10 +66,11 @@ public void testPreload() throws IOException { assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.tmp", newIOContext(random()))); assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.fdt__0.tmp", newIOContext(random()))); MMapDirectory delegate = hybridDirectory.getDelegate(); - assertThat(delegate, Matchers.instanceOf(FsDirectoryFactory.PreLoadMMapDirectory.class)); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) delegate; - assertTrue(preLoadMMapDirectory.useDelegate("foo.dvd")); - assertTrue(preLoadMMapDirectory.useDelegate("foo.tmp")); + assertThat(delegate, Matchers.instanceOf(MMapDirectory.class)); + var func = fsDirectoryFactory.preLoadFuncMap.get(delegate); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); + fsDirectoryFactory.preLoadFuncMap.clear(); } } @@ -72,7 +79,21 @@ private Directory newDirectory(Settings settings) throws IOException { Path tempDir = createTempDir().resolve(idxSettings.getUUID()).resolve("0"); Files.createDirectories(tempDir); ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(idxSettings.getIndex(), 0)); - return new FsDirectoryFactory().newDirectory(idxSettings, path); + return fsDirectoryFactory.newDirectory(idxSettings, path); + } + + static class PreLoadExposingFsDirectoryFactory extends FsDirectoryFactory { + + // expose for testing + final Map> preLoadFuncMap = new HashMap<>(); + + @Override + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + var preLoadFunc = FsDirectoryFactory.getPreloadFunc(preLoadExtensions); + mMapDirectory.setPreload(preLoadFunc); + preLoadFuncMap.put(mMapDirectory, preLoadFunc); + return mMapDirectory; + } } private void doTestPreload(String... preload) throws IOException { @@ -85,26 +106,23 @@ private void doTestPreload(String... preload) throws IOException { assertSame(dir, directory); // prevent warnings assertFalse(directory instanceof SleepingLockWrapper); var mmapDirectory = FilterDirectory.unwrap(directory); + assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); if (preload.length == 0) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertFalse(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); } else if (Arrays.asList(preload).contains("*")) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertTrue(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); } else { - assertTrue(directory.toString(), mmapDirectory instanceof FsDirectoryFactory.PreLoadMMapDirectory); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) mmapDirectory; + var func = fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); for (String ext : preload) { - assertTrue("ext: " + ext, preLoadMMapDirectory.useDelegate("foo." + ext)); - assertTrue("ext: " + ext, preLoadMMapDirectory.getDelegate().getPreload()); + assertTrue("ext: " + ext, func.test("foo." + ext, newIOContext(random()))); } - assertFalse(preLoadMMapDirectory.useDelegate("XXX")); - assertFalse(preLoadMMapDirectory.getPreload()); - preLoadMMapDirectory.close(); - expectThrows( - AlreadyClosedException.class, - () -> preLoadMMapDirectory.getDelegate().openInput("foo.tmp", IOContext.DEFAULT) - ); + assertFalse(func.test("XXX", newIOContext(random()))); + mmapDirectory.close(); + expectThrows(AlreadyClosedException.class, () -> mmapDirectory.openInput("foo.tmp", IOContext.DEFAULT)); } } expectThrows( @@ -148,7 +166,7 @@ private void doTestStoreDirectory(Path tempDir, String typeSettingValue, IndexMo ); break; case FS: - if (Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (Constants.JRE_IS_64BIT) { assertTrue(FsDirectoryFactory.isHybridFs(directory)); } else { assertTrue(directory.toString(), directory instanceof NIOFSDirectory); diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 0b936384bf343..36ece00ccc0ca 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -274,7 +274,7 @@ public IndexInput openInput(String name, IOContext context) throws IOException { metadata = store.getMetadata(randomBoolean() ? indexCommit : null); assertThat(metadata.fileMetadataMap().isEmpty(), is(false)); for (StoreFileMetadata meta : metadata) { - try (IndexInput input = store.directory().openInput(meta.name(), IOContext.DEFAULT)) { + try (IndexInput input = store.directory().openInput(meta.name(), IOContext.READONCE)) { String checksum = Store.digestToString(CodecUtil.retrieveChecksum(input)); assertThat("File: " + meta.name() + " has a different checksum", meta.checksum(), equalTo(checksum)); assertThat(meta.writtenBy(), equalTo(Version.LATEST.toString())); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index ebffd54a742ce..4f73672471942 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -70,8 +70,9 @@ public String toString(String field) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + return new DefaultScorerSupplier(scorer); } @Override @@ -348,16 +349,22 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return weight.explain(context, doc); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - scorerCalled = true; - return weight.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { scorerSupplierCalled = true; - return weight.scorerSupplier(context); + ScorerSupplier inScorerSupplier = weight.scorerSupplier(context); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + scorerCalled = true; + return inScorerSupplier.get(leadCost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } @Override diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index dfd71bba0208c..773c660caa1c6 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -421,8 +421,8 @@ public BytesReference get() { try (BytesStreamOutput out = new BytesStreamOutput()) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(id))), 1); - assertEquals(1, topDocs.totalHits.value); - Document document = reader.document(topDocs.scoreDocs[0].doc); + assertEquals(1, topDocs.totalHits.value()); + Document document = reader.storedFields().document(topDocs.scoreDocs[0].doc); out.writeString(document.get("value")); loadedFromCache = false; return out.bytes(); diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java index baa35101c1c87..1c1a9a645b99b 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java @@ -114,11 +114,11 @@ private > void assertSearchCollapse( TopFieldDocs topDocs = searcher.search(query, topFieldCollectorManager); TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(sortField.getField(), collapseTopFieldDocs.field); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); + assertEquals(totalHits, topDocs.totalHits.value()); Object currentValue = null; int topDocsIndex = 0; diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java index 90adb2d0ffcce..30c68fe708c83 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java @@ -49,11 +49,11 @@ public class SinglePassGroupingCollectorTests extends ESTestCase { private static class SegmentSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; SegmentSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -62,7 +62,7 @@ public void search(Weight weight, Collector collector) throws IOException { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } @@ -140,10 +140,10 @@ private > void assertSearchCollapse( TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(collapseField.getField(), collapseTopFieldDocs.field); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); assertEquals(totalHits, topDocs.scoreDocs.length); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(totalHits, topDocs.totalHits.value()); Set seen = new HashSet<>(); // collapse field is the last sort diff --git a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java index 71a2703555318..076cd0af1bf26 100644 --- a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java @@ -88,7 +88,7 @@ public void testDismaxQuery() throws IOException { query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { BooleanQuery.Builder query = new BooleanQuery.Builder(); @@ -110,7 +110,7 @@ public void testDismaxQuery() throws IOException { query.add(gen, BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 4); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(1), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { @@ -120,8 +120,8 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("unknown_field")) { assertThat(termStates.docFreq(), equalTo(0)); @@ -131,7 +131,7 @@ public void testDismaxQuery() throws IOException { assertThat(termStates.totalTermFreq(), greaterThan(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo((long) iters + username.length)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo((long) iters + username.length)); } { // test with an unknown field and an unknown term @@ -140,13 +140,13 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); assertThat(termStates.docFreq(), equalTo(0)); assertThat(termStates.totalTermFreq(), equalTo(0L)); } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(0L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(0L)); } { // test with an unknown field and a term that is present in only one field @@ -155,8 +155,8 @@ public void testDismaxQuery() throws IOException { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("username")) { assertThat(termStates.docFreq(), equalTo(1)); @@ -166,7 +166,7 @@ public void testDismaxQuery() throws IOException { assertThat(termStates.totalTermFreq(), equalTo(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(1L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(1L)); } reader.close(); w.close(); @@ -250,7 +250,7 @@ public void testMinTTF() throws IOException { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } reader.close(); w.close(); @@ -298,7 +298,7 @@ public void testRandomFields() throws IOException { String[] fieldNames = fields.keySet().toArray(new String[0]); Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fieldNames, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); - assertTrue(search.totalHits.value > 0); + assertTrue(search.totalHits.value() > 0); assertTrue(search.scoreDocs.length > 0); } reader.close(); @@ -332,7 +332,7 @@ public void testMissingFields() throws IOException { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); reader.close(); w.close(); diff --git a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index f5266568e6fdf..126641037fde7 100644 --- a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -141,7 +141,7 @@ private void assertHighlightOneDoc( IndexSearcher searcher = newSearcher(reader); iw.close(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, analyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java index b1b6a11764120..ab9bbf7040bfd 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -336,10 +337,11 @@ private void assertAllDocs(Set terms, Function fun withIndexSearcher(searcher -> { for (LeafReaderContext leafReaderContext : searcher.getLeafContexts()) { IndexReader reader = leafReaderContext.reader(); + StoredFields storedFields = reader.storedFields(); DocIdSetIterator docIdSetIterator = DocIdSetIterator.all(reader.maxDoc()); ScriptTermStats termStats = new ScriptTermStats(searcher, leafReaderContext, docIdSetIterator::docID, terms); while (docIdSetIterator.nextDoc() <= reader.maxDoc()) { - String docId = reader.document(docIdSetIterator.docID()).get("id"); + String docId = storedFields.document(docIdSetIterator.docID()).get("id"); if (expectedValues.containsKey(docId)) { assertThat(function.apply(termStats), expectedValues.get(docId)); } diff --git a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index 6a6f5dc44ef6f..663b39d116913 100644 --- a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -714,11 +714,8 @@ public void testMultiValuedOrds() throws Exception { @Override public long nextOrd() { - if (i < array[doc].length) { - return array[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < array[doc].length; + return array[doc][i++]; } @Override @@ -762,7 +759,8 @@ private void verifySortedSet(Supplier supplier, int maxDoc) } int expected = -1; if (values.advanceExact(i)) { - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int j = 0; j < values.docValueCount(); j++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { @@ -810,7 +808,8 @@ private void verifySortedSet( if (++count > maxChildren) { break; } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index fff5dcb4bb80b..f3357a72c9243 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.PointValues; @@ -205,15 +206,17 @@ public void testExitableDirectoryReaderVectors() throws IOException { cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues vectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); cancelled.set(true); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); // On the first doc when already canceled, it throws - expectThrows(TaskCancelledException.class, vectorValues::nextDoc); + expectThrows(TaskCancelledException.class, iterator::nextDoc); cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues uncancelledVectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); + uncancelledVectorValues.iterator(); cancelled.set(true); searcher.removeQueryCancellation(cancellation); // On the first doc when already canceled, it throws, but with the cancellation removed, it should not - uncancelledVectorValues.nextDoc(); + iterator.nextDoc(); } private static class PointValuesIntersectVisitor implements PointValues.IntersectVisitor { diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 2cf45e463346b..5dc07a41b3f8c 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -772,7 +772,7 @@ public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) ), (response) -> { SearchHits hits = response.getHits(); - assertEquals(hits.getTotalHits().value, numDocs); + assertEquals(hits.getTotalHits().value(), numDocs); assertEquals(hits.getHits().length, 2); int index = 0; for (SearchHit hit : hits.getHits()) { @@ -2505,7 +2505,7 @@ public void testWaitOnRefresh() throws ExecutionException, InterruptedException ); PlainActionFuture future = new PlainActionFuture<>(); service.executeQueryPhase(request, task, future.delegateFailure((l, r) -> { - assertEquals(1, r.queryResult().getTotalHits().value); + assertEquals(1, r.queryResult().getTotalHits().value()); l.onResponse(null); })); future.get(); @@ -2714,7 +2714,7 @@ public void testEnableSearchWorkerThreads() throws IOException { SearchShardTask task = new SearchShardTask(0, "type", "action", "description", null, emptyMap()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } try { @@ -2725,7 +2725,7 @@ public void testEnableSearchWorkerThreads() throws IOException { .get(); assertTrue(response.isAcknowledged()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNull(searchContext.searcher().getExecutor()); + assertFalse(searchContext.searcher().hasExecutor()); } } finally { // reset original default setting @@ -2735,20 +2735,20 @@ public void testEnableSearchWorkerThreads() throws IOException { .setPersistentSettings(Settings.builder().putNull(SEARCH_WORKER_THREADS_ENABLED.getKey()).build()) .get(); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } } } } /** - * Verify that a single slice is created for requests that don't support parallel collection, while computation - * is still offloaded to the worker threads. Also ensure multiple slices are created for requests that do support + * Verify that a single slice is created for requests that don't support parallel collection, while an executor is still + * provided to the searcher to parallelize other operations. Also ensure multiple slices are created for requests that do support * parallel collection. */ public void testSlicingBehaviourForParallelCollection() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY); - ThreadPoolExecutor executor = (ThreadPoolExecutor) indexService.getThreadPool().executor(ThreadPool.Names.SEARCH_WORKER); + ThreadPoolExecutor executor = (ThreadPoolExecutor) indexService.getThreadPool().executor(ThreadPool.Names.SEARCH); final int configuredMaxPoolSize = 10; executor.setMaximumPoolSize(configuredMaxPoolSize); // We set this explicitly to be independent of CPU cores. int numDocs = randomIntBetween(50, 100); @@ -2778,7 +2778,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2795,11 +2795,11 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "DFS supports parallel collection, so the number of slices should be > 1.", - expectedSlices, + expectedSlices - 1, // one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2808,7 +2808,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2825,11 +2825,11 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", - expectedSlices, + expectedSlices - 1, // one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2838,13 +2838,14 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.FETCH, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( - "The number of slices should be 1 as FETCH does not support parallel collection.", - 1, + "The number of slices should be 1 as FETCH does not support parallel collection and thus runs on the calling" + + " thread.", + 0, executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2853,13 +2854,13 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.NONE, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 as NONE does not support parallel collection.", - 1, + 0, // zero since one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2876,13 +2877,13 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 when QUERY parallel collection is disabled.", - 1, + 0, // zero since one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); @@ -2898,7 +2899,7 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2915,11 +2916,11 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", - expectedSlices, + expectedSlices - 1, // one slice executes on the calling thread executor.getCompletedTaskCount() - priorExecutorTaskCount ) ); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index 6b4618bf3257a..ac5d886c9ba10 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -33,14 +33,8 @@ import static org.hamcrest.Matchers.equalTo; public class MultiBucketCollectorTests extends ESTestCase { - private static class ScoreAndDoc extends Scorable { + private static class Score extends Scorable { float score; - int doc = -1; - - @Override - public int docID() { - return doc; - } @Override public float score() { @@ -247,7 +241,7 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { collector1 = new TerminateAfterBucketCollector(collector1, 1); collector2 = new TerminateAfterBucketCollector(collector2, 2); - Scorable scorer = new ScoreAndDoc(); + Scorable scorer = new Score(); List collectors = Arrays.asList(collector1, collector2); Collections.shuffle(collectors, random()); @@ -275,4 +269,78 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { assertFalse(setScorerCalled1.get()); assertFalse(setScorerCalled2.get()); } + + public void testCacheScores() throws IOException { + ScoringBucketCollector scoringBucketCollector1 = new ScoringBucketCollector(); + ScoringBucketCollector scoringBucketCollector2 = new ScoringBucketCollector(); + + DummyScorable scorable = new DummyScorable(); + + // First test the tester + LeafBucketCollector leafBucketCollector1 = scoringBucketCollector1.getLeafCollector(null); + LeafBucketCollector leafBucketCollector2 = scoringBucketCollector2.getLeafCollector(null); + leafBucketCollector1.setScorer(scorable); + leafBucketCollector2.setScorer(scorable); + leafBucketCollector1.collect(0, 0); + leafBucketCollector2.collect(0, 0); + assertEquals(2, scorable.numScoreCalls); + + // reset + scorable.numScoreCalls = 0; + LeafBucketCollector leafBucketCollector = MultiBucketCollector.wrap( + randomBoolean(), + Arrays.asList(scoringBucketCollector1, scoringBucketCollector2) + ).getLeafCollector(null); + leafBucketCollector.setScorer(scorable); + leafBucketCollector.collect(0, 0); + // Even though both leaf collectors called scorable.score(), it only got called once thanks to caching + assertEquals(1, scorable.numScoreCalls); + } + + private static class ScoringBucketCollector extends BucketCollector { + @Override + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; // needs scores + } + + @Override + public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx) throws IOException { + return new ScoringLeafBucketCollector(); + } + + @Override + public void preCollection() throws IOException { + + } + + @Override + public void postCollection() throws IOException { + + } + } + + private static class ScoringLeafBucketCollector extends LeafBucketCollector { + + private Scorable scorable; + + @Override + public void setScorer(Scorable scorer) throws IOException { + this.scorable = scorer; + } + + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + scorable.score(); + } + } + + private static class DummyScorable extends Scorable { + int numScoreCalls = 0; + + @Override + public float score() throws IOException { + numScoreCalls++; + return 42f; + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index 879c7e6aeff7f..eb5fa734a8c91 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -80,12 +80,12 @@ protected void indexData() throws Exception { indexRandom(true, docs); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()), resp -> { - long totalOnOne = resp.getHits().getTotalHits().value; + long totalOnOne = resp.getHits().getTotalHits().value(); assertThat(totalOnOne, is(15L)); }); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()), resp -> { assertNoFailures(resp); - long totalOnTwo = resp.getHits().getTotalHits().value; + long totalOnTwo = resp.getHits().getTotalHits().value(); assertThat(totalOnTwo, is(12L)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index bbd12726ac4e3..28a032e7281e6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -2571,19 +2571,19 @@ public void testWithKeywordAndTopHits() throws Exception { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); assertEquals(2L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); topHits = result.getBuckets().get(2).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new FieldExistsQuery("keyword")), dataset, () -> { @@ -2598,13 +2598,13 @@ public void testWithKeywordAndTopHits() throws Exception { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 71b93888ba243..8a72f8af7035c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -15,14 +15,29 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.TextField; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafMetaData; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.StoredFields; +import org.apache.lucene.index.TermVectors; +import org.apache.lucene.index.Terms; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.search.KnnCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -54,8 +69,6 @@ import static org.elasticsearch.index.mapper.NumberFieldMapper.NumberType.LONG; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class CompositeValuesCollectorQueueTests extends AggregatorTestCase { static class ClassAndName { @@ -71,11 +84,8 @@ static class ClassAndName { private IndexReader indexReader; @Before - public void setUpMocks() { - indexReader = mock(IndexReader.class); - IndexReaderContext indexReaderContext = mock(IndexReaderContext.class); - when(indexReaderContext.leaves()).thenReturn(List.of()); - when(indexReader.getContext()).thenReturn(indexReaderContext); + public void set() { + indexReader = new DummyReader(); } public void testRandomLong() throws IOException { @@ -425,4 +435,126 @@ private static void createListCombinations( } } } + + static class DummyReader extends LeafReader { + @Override + public CacheHelper getCoreCacheHelper() { + return null; + } + + @Override + public Terms terms(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public BinaryDocValues getBinaryDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedDocValues getSortedDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedSetDocValues getSortedSetDocValues(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNormValues(String field) throws IOException { + return null; + } + + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return null; + } + + @Override + public FloatVectorValues getFloatVectorValues(String field) throws IOException { + return null; + } + + @Override + public ByteVectorValues getByteVectorValues(String field) throws IOException { + return null; + } + + @Override + public void searchNearestVectors(String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public void searchNearestVectors(String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public FieldInfos getFieldInfos() { + return null; + } + + @Override + public Bits getLiveDocs() { + return null; + } + + @Override + public PointValues getPointValues(String field) throws IOException { + return null; + } + + @Override + public void checkIntegrity() throws IOException { + + } + + @Override + public LeafMetaData getMetaData() { + return null; + } + + @Override + public TermVectors termVectors() throws IOException { + return null; + } + + @Override + public int numDocs() { + return 0; + } + + @Override + public int maxDoc() { + return 0; + } + + @Override + public StoredFields storedFields() throws IOException { + return null; + } + + @Override + protected void doClose() throws IOException { + + } + + @Override + public CacheHelper getReaderCacheHelper() { + return null; + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java index 4b9a72bacc97d..3d1ed0704acf9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -258,7 +259,7 @@ public void testNumericSorted() { } private static IndexReader mockIndexReader(int maxDoc, int numDocs) { - IndexReader reader = mock(IndexReader.class); + IndexReader reader = mock(LeafReader.class); when(reader.hasDeletions()).thenReturn(maxDoc - numDocs > 0); when(reader.maxDoc()).thenReturn(maxDoc); when(reader.numDocs()).thenReturn(numDocs); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java index 26e643510859c..06f1db352e8f0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java @@ -42,9 +42,7 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (i == ords.length) { - return NO_MORE_ORDS; - } + assert i < ords.length; return ords[i++]; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 4a08295bd7bcd..48aabb61371e9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -414,7 +414,7 @@ public void testWithNestedScoringAggregations() throws IOException { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); TotalHits hits = topHits.getHits().getTotalHits(); assertNotNull(hits); - assertThat(hits.value, equalTo(counter)); + assertThat(hits.value(), equalTo(counter)); assertThat(topHits.getHits().getMaxScore(), equalTo(Float.NaN)); counter += 1; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 2c76ed96da488..b267cb2e656b6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -1730,8 +1730,8 @@ private void assertNestedTopHitsScore(InternalMultiBucketAggregation terms int ptr = 9; for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); - assertThat(topHits.getHits().getTotalHits().value, equalTo((long) ptr)); - assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation); + assertThat(topHits.getHits().getTotalHits().value(), equalTo((long) ptr)); + assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation()); if (withScore) { assertThat(topHits.getHits().getMaxScore(), equalTo(1f)); } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 87eb2bdc29fbd..07d535167b318 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -220,8 +220,8 @@ protected void assertReduced(InternalTopHits reduced, List inpu TotalHits.Relation relation = TotalHits.Relation.EQUAL_TO; for (int input = 0; input < inputs.size(); input++) { SearchHits internalHits = inputs.get(input).getHits(); - totalHits += internalHits.getTotalHits().value; - if (internalHits.getTotalHits().relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHits += internalHits.getTotalHits().value(); + if (internalHits.getTotalHits().relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { relation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } maxScore = max(maxScore, internalHits.getMaxScore()); @@ -379,14 +379,14 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { case 2 -> size += between(1, 100); case 3 -> topDocs = new TopDocsAndMaxScore( new TopDocs( - new TotalHits(topDocs.topDocs.totalHits.value + between(1, 100), topDocs.topDocs.totalHits.relation), + new TotalHits(topDocs.topDocs.totalHits.value() + between(1, 100), topDocs.topDocs.totalHits.relation()), topDocs.topDocs.scoreDocs ), topDocs.maxScore + randomFloat() ); case 4 -> { TotalHits totalHits = new TotalHits( - searchHits.getTotalHits().value + between(1, 100), + searchHits.getTotalHits().value() + between(1, 100), randomFrom(TotalHits.Relation.values()) ); searchHits = SearchHits.unpooled(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 0c5217ded982b..6fb147e3ffc89 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -55,7 +55,7 @@ public void testTopLevel() throws Exception { result = testCase(query, topHits("_name")); } SearchHits searchHits = ((TopHits) result).getHits(); - assertEquals(3L, searchHits.getTotalHits().value); + assertEquals(3L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("2", searchHits.getAt(1).getId()); assertEquals("1", searchHits.getAt(2).getId()); @@ -65,7 +65,7 @@ public void testTopLevel() throws Exception { public void testNoResults() throws Exception { TopHits result = (TopHits) testCase(new MatchNoDocsQuery(), topHits("_name").sort("string", SortOrder.DESC)); SearchHits searchHits = result.getHits(); - assertEquals(0L, searchHits.getTotalHits().value); + assertEquals(0L, searchHits.getTotalHits().value()); assertFalse(AggregationInspectionHelper.hasValue(((InternalTopHits) result))); } @@ -89,27 +89,27 @@ public void testInsideTerms() throws Exception { // The "a" bucket TopHits hits = (TopHits) terms.getBucketByKey("a").getAggregations().get("top"); SearchHits searchHits = (hits).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("a").getAggregations().get("top")))); // The "b" bucket searchHits = ((TopHits) terms.getBucketByKey("b").getAggregations().get("top")).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("b").getAggregations().get("top")))); // The "c" bucket searchHits = ((TopHits) terms.getBucketByKey("c").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("c").getAggregations().get("top")))); // The "d" bucket searchHits = ((TopHits) terms.getBucketByKey("d").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("d").getAggregations().get("top")))); } @@ -179,7 +179,7 @@ public void testSetScorer() throws Exception { .build(); AggregationBuilder agg = AggregationBuilders.topHits("top_hits"); TopHits result = searchAndReduce(reader, new AggTestConfig(agg, STRING_FIELD_TYPE).withQuery(query)); - assertEquals(3, result.getHits().getTotalHits().value); + assertEquals(3, result.getHits().getTotalHits().value()); reader.close(); directory.close(); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index e47614145e924..7fa2732191cd1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -88,12 +88,9 @@ public boolean advanceExact(int docID) { @Override public long nextOrd() { - if (consumed) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - consumed = true; - return 0; - } + assert consumed == false; + consumed = true; + return 0; } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java index bc6b72d9ddd3e..2a36887cc459a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java @@ -124,11 +124,8 @@ public long getValueCount() { @Override public long nextOrd() { - if (i < ords[doc].length) { - return ords[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < ords[doc].length; + return ords[doc][i++]; } @Override @@ -153,10 +150,8 @@ public int docValueCount() { for (int ord : ords[i]) { assertEquals(values[ord], withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } else { assertEquals(missing, withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java index caf8df55ce528..71fd3a4761cbe 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java @@ -201,7 +201,10 @@ public void testCollectFromMiddle() throws IOException { BucketCollector collector = getBucketCollector(2 * DOC_COUNTS); // skip the first doc of segment 1 and 2 - indexSearcher.search(SortedSetDocValuesField.newSlowSetQuery("_tsid", new BytesRef("tsid0"), new BytesRef("tsid1")), collector); + indexSearcher.search( + SortedSetDocValuesField.newSlowSetQuery("_tsid", List.of(new BytesRef("tsid0"), new BytesRef("tsid1"))), + collector + ); collector.postCollection(); reader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java index 5cf40309f9bc0..0abf34d800dca 100644 --- a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java @@ -40,7 +40,7 @@ public class DfsPhaseTests extends ESTestCase { @Before public final void init() { threadPool = new TestThreadPool(DfsPhaseTests.class.getName()); - threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH_WORKER); + threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH); } @After diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index 14e81c0414865..9957d8c92b955 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -28,7 +28,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BoostQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.ConstantScoreQuery; @@ -47,6 +46,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHitCountCollectorManager; @@ -76,7 +76,6 @@ import java.io.IOException; import java.io.UncheckedIOException; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.IdentityHashMap; @@ -224,7 +223,8 @@ public void testConcurrentRewrite() throws Exception { int numSegments = directoryReader.getContext().leaves().size(); KnnFloatVectorQuery vectorQuery = new KnnFloatVectorQuery("float_vector", new float[] { 0, 0, 0 }, 10, null); vectorQuery.rewrite(searcher); - assertBusy(() -> assertEquals(numSegments, executor.getCompletedTaskCount())); + // 1 task gets executed on the caller thread + assertBusy(() -> assertEquals(numSegments - 1, executor.getCompletedTaskCount())); } } finally { terminate(executor); @@ -250,11 +250,12 @@ public void testConcurrentCollection() throws Exception { Integer.MAX_VALUE, 1 ); - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertEquals(numDocs, totalHits.intValue()); int numExpectedTasks = ContextIndexSearcher.computeSlices(searcher.getIndexReader().leaves(), Integer.MAX_VALUE, 1).length; - // check that each slice goes to the executor, no matter the queue size or the number of slices - assertBusy(() -> assertEquals(numExpectedTasks, executor.getCompletedTaskCount())); + // check that each slice except for one that executes on the calling thread goes to the executor, no matter the queue size + // or the number of slices + assertBusy(() -> assertEquals(numExpectedTasks - 1, executor.getCompletedTaskCount())); } } finally { terminate(executor); @@ -365,7 +366,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery()))); TopDocs topDocs = searcher.search(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("foo", "bar"))), 3f), 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3f, topDocs.scoreDocs[0].score, 0); @@ -404,7 +405,7 @@ private static void assertSlices(LeafSlice[] slices, int numDocs, int numThreads int sumDocs = 0; assertThat(slices.length, lessThanOrEqualTo(numThreads)); for (LeafSlice slice : slices) { - int sliceDocs = Arrays.stream(slice.leaves).mapToInt(l -> l.reader().maxDoc()).sum(); + int sliceDocs = slice.getMaxDocs(); assertThat(sliceDocs, greaterThanOrEqualTo((int) (0.1 * numDocs))); sumDocs += sliceDocs; } @@ -495,9 +496,14 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo } return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { contextIndexSearcher.throwTimeExceededException(); - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + Scorer scorer = new ConstantScoreScorer( + score(), + scoreMode, + DocIdSetIterator.all(context.reader().maxDoc()) + ); + return new DefaultScorerSupplier(scorer); } @Override @@ -581,7 +587,10 @@ public Query rewrite(IndexSearcher indexSearcher) { return null; } }; - Integer hitCount = contextIndexSearcher.search(testQuery, new TotalHitCountCollectorManager()); + Integer hitCount = contextIndexSearcher.search( + testQuery, + new TotalHitCountCollectorManager(contextIndexSearcher.getSlices()) + ); assertEquals(0, hitCount.intValue()); assertTrue(contextIndexSearcher.timeExceeded()); } finally { @@ -745,15 +754,9 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.scorer(context); - } - - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.bulkScorer(context); + return weight.scorerSupplier(context); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java index c42f3156c6d29..b728d40900570 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java @@ -125,14 +125,14 @@ public void testManagerWithSearcher() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); String profileReason = "profiler_reason"; ProfileCollectorManager profileCollectorManager = new ProfileCollectorManager<>(topDocsManager, profileReason); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), profileCollectorManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); CollectorResult result = profileCollectorManager.getCollectorTree(); assertEquals("profiler_reason", result.getReason()); assertEquals("SimpleTopScoreDocCollector", result.getName()); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java index e868293ef4a1c..98d79df63db8e 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java @@ -10,14 +10,12 @@ package org.elasticsearch.search.profile.query; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.MultiReader; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Matches; import org.apache.lucene.search.MatchesIterator; import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -34,10 +32,6 @@ private static class FakeScorer extends Scorer { public float maxScore, minCompetitiveScore; - protected FakeScorer(Weight weight) { - super(weight); - } - @Override public DocIdSetIterator iterator() { throw new UnsupportedOperationException(); @@ -75,22 +69,14 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return Explanation.match(1, "fake_description"); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - FakeScorer fakeScorer = new FakeScorer(this); - fakeScorer.maxScore = 42f; - return fakeScorer; - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - Weight weight = this; return new ScorerSupplier() { private long cost = 0; @Override public Scorer get(long leadCost) { - return new Scorer(weight) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return null; @@ -187,23 +173,17 @@ public Iterator iterator() { } public void testPropagateMinCompetitiveScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); assertEquals(0.42f, fakeScorer.minCompetitiveScore, 0f); } public void testPropagateMaxScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); fakeScorer.maxScore = 42f; assertEquals(42f, profileScorer.getMaxScore(DocIdSetIterator.NO_MORE_DOCS), 0f); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index 9df03905f7be2..44c46e3f692ba 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -241,11 +241,6 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java index d33c033ecef2d..de6218e912953 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java @@ -120,7 +120,7 @@ public void testTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topScoreDocManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -133,7 +133,7 @@ public void testTopDocsOnly() throws IOException { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } } @@ -150,7 +150,7 @@ public void testWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -165,7 +165,7 @@ public void testWithAggs() throws IOException { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numField2Docs, result.aggs.intValue()); } } @@ -184,7 +184,7 @@ public void testPostFilterTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -199,7 +199,7 @@ public void testPostFilterTopDocsOnly() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } } @@ -218,7 +218,7 @@ public void testPostFilterWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -235,7 +235,7 @@ public void testPostFilterWithAggs() throws IOException { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // post_filter is not applied to aggs assertEquals(reader.maxDoc(), result.aggs.intValue()); } @@ -251,7 +251,7 @@ public void testMinScoreTopDocsOnly() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -266,7 +266,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -279,7 +279,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -292,7 +292,7 @@ public void testMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -306,7 +306,7 @@ public void testMinScoreWithAggs() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -322,7 +322,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // min_score is applied to aggs as well as top docs assertEquals(numField2Docs, result.aggs.intValue()); } @@ -338,7 +338,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -353,7 +353,7 @@ public void testMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -370,7 +370,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -385,7 +385,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -398,7 +398,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -411,7 +411,7 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -427,7 +427,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -443,7 +443,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); assertEquals(numField3Docs, result.aggs.intValue()); } { @@ -458,7 +458,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -473,7 +473,7 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -623,7 +623,7 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -638,7 +638,7 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -651,7 +651,7 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -667,7 +667,7 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); assertEquals(terminateAfter, result.aggs.intValue()); } } @@ -683,7 +683,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -698,7 +698,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -713,7 +713,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -729,7 +729,7 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); // aggs see more documents because the filter is not applied to them assertThat(result.aggs, Matchers.greaterThanOrEqualTo(terminateAfter)); } @@ -1139,11 +1139,6 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { public float score() { return 0; } - - @Override - public int docID() { - return 0; - } }; QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( @@ -1473,11 +1468,6 @@ public float score() throws IOException { return 0; } - @Override - public int docID() { - return 0; - } - @Override public void setMinCompetitiveScore(float minScore) { setMinCompetitiveScoreCalled = true; @@ -1521,7 +1511,7 @@ public void setScorer(Scorable scorer) throws IOException { setScorerCalled = true; if (expectedScorable != null) { while (expectedScorable.equals(scorer.getClass()) == false && scorer instanceof FilterScorable) { - scorer = scorer.getChildren().iterator().next().child; + scorer = scorer.getChildren().iterator().next().child(); } assertEquals(expectedScorable, scorer.getClass()); } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 56a8b0f3a8c30..1f74668158e0e 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -144,7 +144,7 @@ private void countTestCase(Query query, IndexReader reader, boolean shouldCollec QueryPhase.addCollectorsAndSearch(context); ContextIndexSearcher countSearcher = shouldCollectCount ? newContextSearcher(reader) : noCollectionContextSearcher(reader); - assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value()); } } @@ -233,15 +233,15 @@ public void testPostFilterDisablesHitCountShortcut() throws Exception { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when a post_filter is provided, hence it forces collection despite @@ -249,16 +249,16 @@ public void testPostFilterDisablesHitCountShortcut() throws Exception { context.setSize(0); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -269,8 +269,8 @@ public void testTerminateAfterWithFilter() throws Exception { context.setSize(10); context.parsedPostFilter(new ParsedQuery(new TermQuery(new Term("foo", "bar")))); QueryPhase.addCollectorsAndSearch(context); - assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } } @@ -280,15 +280,15 @@ public void testMinScoreDisablesHitCountShortcut() throws Exception { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when min_score is provided, hence it forces collection despite @@ -296,16 +296,16 @@ public void testMinScoreDisablesHitCountShortcut() throws Exception { context.setSize(0); context.minimumScore(100); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.minimumScore(100); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -336,17 +336,17 @@ public void testInOrderScrollOptimization() throws Exception { context.setSize(size); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); context.setSearcher(earlyTerminationContextSearcher(reader, size)); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0].doc, greaterThanOrEqualTo(size)); } } @@ -364,8 +364,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -375,8 +375,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { @@ -387,8 +387,8 @@ public void testTerminateAfterSize0HitCountShortcut() throws Exception { QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); // Given that total hit count does not require collection, PartialHitCountCollector does not early terminate. - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -407,8 +407,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.setSize(0); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -419,8 +419,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } { @@ -434,9 +434,9 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(trackTotalHits); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) trackTotalHits)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) trackTotalHits)); assertThat( - context.queryResult().topDocs().topDocs.totalHits.relation, + context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); @@ -449,8 +449,8 @@ public void testTerminateAfterSize0NoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(11, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(10L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(10L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -468,7 +468,7 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.setSize(10); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(10)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 1), new MatchAllDocsQuery())) { @@ -477,8 +477,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -489,8 +489,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), new MatchAllDocsQuery())) { @@ -500,8 +500,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } { @@ -515,8 +515,8 @@ public void testTerminateAfterWithHitsHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(size)); } } @@ -535,8 +535,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -546,8 +546,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -558,8 +558,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(1, 6)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -572,8 +572,8 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { // depending on docs distribution we may or may not be able to honor terminate_after: low scoring hits are skipped via // setMinCompetitiveScore, which bypasses terminate_after until the next leaf collector is pulled, when that happens. assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(5)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -584,9 +584,9 @@ public void testTerminateAfterWithHitsNoHitCountShortcut() throws Exception { context.trackTotalHitsUpTo(randomIntBetween(8, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); // TODO this looks off, it should probably be GREATER_THAN_OR_EQUAL_TO - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } } @@ -599,8 +599,8 @@ public void testIndexSortingEarlyTermination() throws Exception { context.setSize(1); context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -612,7 +612,7 @@ public void testIndexSortingEarlyTermination() throws Exception { context.parsedPostFilter(new ParsedQuery(new MinDocQuery(1))); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(numDocs - 1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(numDocs - 1L)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -623,7 +623,7 @@ public void testIndexSortingEarlyTermination() throws Exception { context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.executeQuery(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -686,18 +686,18 @@ public void testIndexSortScrollOptimization() throws Exception { context.sort(searchSortAndFormat); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); int sizeMinus1 = context.queryResult().topDocs().topDocs.scoreDocs.length - 1; FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[sizeMinus1]; context.setSearcher(earlyTerminationContextSearcher(reader, 10)); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; for (int i = 0; i < searchSortAndFormat.sort.getSort().length; i++) { @SuppressWarnings("unchecked") @@ -746,8 +746,8 @@ public void testDisableTopScoreCollection() throws Exception { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.COMPLETE); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.EQUAL_TO); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.EQUAL_TO); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); } try (TestSearchContext context = createContext(newContextSearcher(reader), q)) { @@ -764,9 +764,9 @@ public void testDisableTopScoreCollection() throws Exception { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.TOP_DOCS); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } } @@ -881,8 +881,8 @@ public void testNumericSortOptimization() throws Exception { QueryPhase.addCollectorsAndSearch(searchContext); assertTrue(searchContext.sort().sort.getSort()[0].getOptimizeSortWithPoints()); assertThat(searchContext.queryResult().topDocs().topDocs.scoreDocs, arrayWithSize(0)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } // 7. Test that sort optimization doesn't break a case where from = 0 and size= 0 @@ -950,8 +950,8 @@ public void testMaxScoreQueryVisitor() { // assert score docs are in order and their number is as expected private static void assertSortResults(TopDocs topDocs, long totalNumDocs, boolean isDoubleSort) { - assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation); - assertThat(topDocs.totalHits.value, lessThan(totalNumDocs)); // we collected less docs than total number + assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation()); + assertThat(topDocs.totalHits.value(), lessThan(totalNumDocs)); // we collected less docs than total number long cur1, cur2; long prev1 = Long.MIN_VALUE; long prev2 = Long.MIN_VALUE; @@ -990,7 +990,7 @@ public void testMinScore() throws Exception { context.trackTotalHitsUpTo(5); QueryPhase.addCollectorsAndSearch(context); - assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value()); } } @@ -1136,7 +1136,7 @@ private static ContextIndexSearcher earlyTerminationContextSearcher(IndexReader ) { @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(LeafReaderContextPartition[] partitions, Weight weight, Collector collector) throws IOException { final Collector in = new FilterCollector(collector) { @Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { @@ -1153,7 +1153,7 @@ public void collect(int doc) throws IOException { }; } }; - super.search(leaves, weight, in); + super.search(partitions, weight, in); } }; } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java index 3bf9514cad547..b417f7adbc8b7 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -132,7 +133,7 @@ private void scorerTimeoutTest(int size, CheckedConsumer context = mapping.parseContext(document); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 320e3fce2e832..7791073ef36fa 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -211,8 +211,8 @@ public void testFrequencyThreshold() throws Exception { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); @@ -227,8 +227,8 @@ public void testFrequencyThreshold() throws Exception { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java index dc9d026af0135..72ae45fd26143 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java @@ -226,7 +226,7 @@ public void testRandom() throws IOException { int n = random().nextInt(100) + 1; TopDocs results = searcher.search(query, n); assert reader.hasDeletions() == false; - assertTrue(results.totalHits.value >= results.scoreDocs.length); + assertTrue(results.totalHits.value() >= results.scoreDocs.length); // verify the results are in descending score order float last = Float.MAX_VALUE; for (ScoreDoc scoreDoc : results.scoreDocs) { diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java index 18d5c8c85fbec..bef0bbfd27ff6 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java @@ -282,8 +282,8 @@ public void testScoreDocQuery() throws IOException { final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); TopDocs topDocs = searcher.search(query, 100); - assertEquals(scoreDocs.length, topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, topDocs.totalHits.relation); + assertEquals(scoreDocs.length, topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, topDocs.totalHits.relation()); Arrays.sort(topDocs.scoreDocs, Comparator.comparingInt(scoreDoc -> scoreDoc.doc)); assertEquals(scoreDocs.length, topDocs.scoreDocs.length); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java index 8d9fa847a988c..f2ead93ebb6e1 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java @@ -62,14 +62,14 @@ public void testSimpleEuclidean() throws Exception { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 3f, 0.25f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 1f, 0.5f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } @@ -138,14 +138,14 @@ public void testSimpleCosine() throws IOException { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .8f, .9f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .9f, 0.95f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 1509cfa08b400..c46d98fe1cd8b 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -372,7 +372,7 @@ public void testSuccessfulSnapshotAndRestore() { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseListener, r -> { - assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value); + assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value()); documentCountVerified.set(true); }); @@ -816,7 +816,10 @@ public void testConcurrentSnapshotRestoreAndDeleteOther() { var response = safeResult(searchResponseListener); try { - assertEquals(documentsFirstSnapshot + documentsSecondSnapshot, Objects.requireNonNull(response.getHits().getTotalHits()).value); + assertEquals( + documentsFirstSnapshot + documentsSecondSnapshot, + Objects.requireNonNull(response.getHits().getTotalHits()).value() + ); } finally { response.decRef(); } @@ -1177,7 +1180,7 @@ public void testSuccessfulSnapshotWithConcurrentDynamicMappingUpdates() { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseStepListener, r -> { - final long hitCount = r.getHits().getTotalHits().value; + final long hitCount = r.getHits().getTotalHits().value(); assertThat( "Documents were restored but the restored index mapping was older than some documents and misses some of their fields", (int) hitCount, @@ -1850,8 +1853,6 @@ private Environment createEnvironment(String nodeName) { Settings.builder() .put(NODE_NAME_SETTING.getKey(), nodeName) .put(PATH_HOME_SETTING.getKey(), tempDir.resolve(nodeName).toAbsolutePath()) - // test uses the same executor service for all thread pools, search worker would need to be a different one - .put(SearchService.SEARCH_WORKER_THREADS_ENABLED.getKey(), false) .put(Environment.PATH_REPO_SETTING.getKey(), tempDir.resolve("repo").toAbsolutePath()) .putList( ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey(), diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java index 310cf467a8391..808c0a5b88b7e 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java @@ -25,8 +25,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; -import java.util.concurrent.LinkedTransferQueue; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.util.concurrent.EsExecutors.TaskTrackingConfig.DEFAULT; @@ -371,25 +369,6 @@ public void testWriteThreadPoolUsesTaskExecutionTimeTrackingEsThreadPoolExecutor } } - public void testSearchWorkedThreadPool() { - final int allocatedProcessors = randomIntBetween(1, EsExecutors.allocatedProcessors(Settings.EMPTY)); - final ThreadPool threadPool = new TestThreadPool( - "test", - Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), allocatedProcessors).build() - ); - try { - ExecutorService executor = threadPool.executor(ThreadPool.Names.SEARCH_WORKER); - assertThat(executor, instanceOf(ThreadPoolExecutor.class)); - ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) executor; - int expectedPoolSize = allocatedProcessors * 3 / 2 + 1; - assertEquals(expectedPoolSize, threadPoolExecutor.getCorePoolSize()); - assertEquals(expectedPoolSize, threadPoolExecutor.getMaximumPoolSize()); - assertThat(threadPoolExecutor.getQueue(), instanceOf(LinkedTransferQueue.class)); - } finally { - assertTrue(terminate(threadPool)); - } - } - public void testScheduledOneShotRejection() { final var name = "fixed-bounded"; final var threadPool = new TestThreadPool( diff --git a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java index 9e5b9dd0be547..7f2cb85919d10 100644 --- a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java +++ b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java @@ -140,7 +140,7 @@ public void testRetrieveSnapshots() throws Exception { logger.info("--> run a search"); assertResponse(client.prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("text", "sometext")), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(COUNTS.intValue(), greaterThan(0)); }); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index 98f18829966c7..47a227cebc956 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -502,7 +502,7 @@ private void assertQueryOnlyOnText(String queryName, ThrowingRunnable buildQuery } protected final String readSource(IndexReader reader, int docId) throws IOException { - return reader.document(docId).getBinaryValue("_source").utf8ToString(); + return reader.storedFields().document(docId).getBinaryValue("_source").utf8ToString(); } protected final void checkExpensiveQuery(BiConsumer queryBuilder) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index b6aa3f97241e1..1c4cfa4ec7ff9 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -139,6 +140,7 @@ public FieldInfo getFieldInfoWithName(String name) { randomBoolean(), IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, new HashMap<>(), 1, diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 2ddd153b8a936..c76967e5d00ac 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -198,7 +198,9 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { entry("daitchmokotoffsoundex", Void.class), entry("persianstem", Void.class), // not exposed - entry("word2vecsynonym", Void.class) + entry("word2vecsynonym", Void.class), + // not exposed + entry("romaniannormalization", Void.class) ); static final Map> KNOWN_CHARFILTERS = Map.of( diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index cc4aac686a02d..df1ea6b756405 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -100,7 +100,7 @@ public static TotalHits getTotalHits(SearchRequestBuilder request) { } public static long getTotalHitsValue(SearchRequestBuilder request) { - return getTotalHits(request).value; + return getTotalHits(request).value(); } public static SearchResponse responseAsSearchResponse(Response searchResponse) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index f885442373d70..5f64d123c1bed 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -33,6 +33,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -158,6 +159,7 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -212,7 +214,7 @@ public abstract class AggregatorTestCase extends ESTestCase { @Before public final void initPlugins() { threadPool = new TestThreadPool(AggregatorTestCase.class.getName()); - threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH_WORKER); + threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(ThreadPool.Names.SEARCH); List plugins = new ArrayList<>(getSearchPlugins()); plugins.add(new AggCardinalityUpperBoundPlugin()); SearchModule searchModule = new SearchModule(Settings.EMPTY, plugins); @@ -831,11 +833,8 @@ protected void debugTestCase( QueryCachingPolicy queryCachingPolicy, MappedFieldType... fieldTypes ) throws IOException { - // Don't use searchAndReduce because we only want a single aggregator. - IndexSearcher searcher = newIndexSearcher( - reader, - aggregationBuilder.supportsParallelCollection(field -> getCardinality(reader, field)) - ); + // Don't use searchAndReduce because we only want a single aggregator, disable parallel collection too. + IndexSearcher searcher = newIndexSearcher(reader, false); if (queryCachingPolicy != null) { searcher.setQueryCachingPolicy(queryCachingPolicy); } @@ -854,7 +853,21 @@ protected void debugTestCase( try { Aggregator aggregator = createAggregator(builder, context); aggregator.preCollection(); - searcher.search(context.query(), aggregator.asCollector()); + searcher.search(context.query(), new CollectorManager() { + boolean called = false; + + @Override + public Collector newCollector() { + assert called == false : "newCollector called multiple times"; + called = true; + return aggregator.asCollector(); + } + + @Override + public Void reduce(Collection collectors) { + return null; + } + }); InternalAggregation r = aggregator.buildTopLevel(); r = doReduce( List.of(r), @@ -959,11 +972,11 @@ protected DirectoryReader wrapDirectoryReader(DirectoryReader reader) throws IOE } private static class ShardSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; ShardSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -972,7 +985,7 @@ public void search(Weight weight, Collector collector) throws IOException { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index b1b75c1790287..29112b4bd8f5f 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -232,7 +232,7 @@ public void setupSuiteScopeCluster() throws Exception { .setSize(5000), response -> { assertNoFailures(response); - long totalHits = response.getHits().getTotalHits().value; + long totalHits = response.getHits().getTotalHits().value(); XContentBuilder builder = XContentFactory.jsonBuilder(); ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java index f191012fb4ef8..f87a87c5ddbc8 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java @@ -40,7 +40,7 @@ public void testEmptyAggregation() { .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)), response -> { CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(geoCentroid, notNullValue()); assertThat(geoCentroid.getName(), equalTo(aggName())); assertThat(geoCentroid.centroid(), equalTo(null)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java index cb6a58ed65a02..a0fdb0bfabf98 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java @@ -155,7 +155,7 @@ public void testEmptyAggregation() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(EMPTY_IDX_NAME).setQuery(matchAllQuery()).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); SpatialBounds geoBounds = response.getAggregations().get(aggName()); assertThat(geoBounds, notNullValue()); assertThat(geoBounds.getName(), equalTo(aggName())); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 71956b431d9b7..fd41213dcd81d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -104,7 +104,7 @@ public void testIndexPointsFilterRectangle() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -113,7 +113,7 @@ public void testIndexPointsFilterRectangle() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -170,7 +170,7 @@ public void testIndexPointsPolygon() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), response -> { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getId(), equalTo("1")); } ); @@ -209,7 +209,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -219,7 +219,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.WITHIN)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -229,7 +229,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.DISJOINT)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -238,7 +238,7 @@ public void testIndexPointsMultiPolygon() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.CONTAINS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); @@ -264,7 +264,7 @@ public void testIndexPointsRectangle() throws Exception { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -319,7 +319,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .indexedShapePath(indexedShapePath) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("point2")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java index 91c7a25682ae0..6dca91170c7a5 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java @@ -206,7 +206,7 @@ public void testRandomGeoCollectionQuery() throws Exception { QueryBuilder intersects = queryBuilder().intersectionQuery(defaultFieldName, queryCollection); assertNoFailuresAndResponse(client().prepareSearch(defaultIndexName).setQuery(intersects), response -> { - assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value > 0); + assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value() > 0); }); } @@ -352,7 +352,7 @@ public void testEdgeCases() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, query)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("blakely")); } @@ -457,7 +457,7 @@ public void testIndexedShapeReference() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, "Big_Rectangle")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -466,7 +466,7 @@ public void testIndexedShapeReference() throws Exception { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, "Big_Rectangle")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java index 4e7378d3a9606..3cd52124d8556 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java @@ -68,7 +68,7 @@ public void testRectangleSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); }); @@ -112,7 +112,7 @@ public void testPolygonSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("4", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); @@ -155,7 +155,7 @@ public void testMultiPolygonSpanningDateline(BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("3", searchHits.getAt(0).getId()); assertNotEquals("3", searchHits.getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java index c84d8612b1d4b..97e21f64e2648 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java @@ -102,7 +102,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -114,7 +114,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -126,7 +126,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // top == bottom && left == right .setQuery(geoBoundingBoxQuery("location").setCorners(40.7143528, -74.0059731, 40.7143528, -74.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("1")); @@ -138,7 +138,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // top == bottom .setQuery(geoBoundingBoxQuery("location").setCorners(40.759011, -74.00009, 40.759011, -73.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -150,7 +150,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // left == right .setQuery(geoBoundingBoxQuery("location").setCorners(41.8, -73.9844722, 40.7, -73.9844722)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -163,7 +163,7 @@ public void testSimpleBoundingBoxTest() throws Exception { client().prepareSearch() // from NY .setQuery(geoDistanceQuery("location").point(40.5, -73.9).distance(25, DistanceUnit.KILOMETERS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("7"), equalTo("4"))); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java index 4e47b0c51177c..bb57cb132daa2 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java @@ -143,7 +143,7 @@ public void testEnvelopeSpanningDateline() throws Exception { } ); assertResponse(client().prepareSearch(defaultIndexName).setQuery(querySupplier.get()), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertNotEquals("1", response.getHits().getAt(0).getId()); assertNotEquals("1", response.getHits().getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index f1ff3492426aa..a93f3b7eaf109 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -73,7 +73,7 @@ public static void corruptFile(Random random, Path... files) throws IOException long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - CodecUtil.footerLength()); checksumAfterCorruption = input.getChecksum(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index cca3443c28e3a..5fb44a9847d71 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -907,9 +907,11 @@ public void waitNoPendingTasksOnAll() throws Exception { /** Ensures the result counts are as expected, and logs the results if different */ public void assertResultsAndLogOnFailure(long expectedResults, SearchResponse searchResponse) { final TotalHits totalHits = searchResponse.getHits().getTotalHits(); - if (totalHits.value != expectedResults || totalHits.relation != TotalHits.Relation.EQUAL_TO) { + if (totalHits.value() != expectedResults || totalHits.relation() != TotalHits.Relation.EQUAL_TO) { StringBuilder sb = new StringBuilder("search result contains ["); - String value = Long.toString(totalHits.value) + (totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO ? "+" : ""); + String value = Long.toString(totalHits.value()) + (totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO + ? "+" + : ""); sb.append(value).append("] results. expected [").append(expectedResults).append("]"); String failMsg = sb.toString(); for (SearchHit hit : searchResponse.getHits().getHits()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 7021ea47aa8dd..840c222e73554 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -33,6 +33,8 @@ import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.tests.util.TestRuleMarkFailure; @@ -208,6 +210,7 @@ import java.util.stream.LongStream; import java.util.stream.Stream; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.anyOf; @@ -2633,4 +2636,43 @@ public static void ensureAllContextsReleased(SearchService searchService) { throw new AssertionError("Failed to verify search contexts", e); } } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r) { + return newSearcher(r, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) { + return newSearcher(r, maybeWrap, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions) { + return newSearcher(r, maybeWrap, wrapWithAssertions, randomBoolean()); + } + + /** + * Create a new searcher over the reader. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions, boolean useThreads) { + if (useThreads) { + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.INTER_SEGMENT); + } + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.NONE); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index db30f6e91f039..42439b5d5785d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -86,13 +86,6 @@ public Terms terms(String field) throws IOException { return terms; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields fields = super.getTermVectors(docID); - thrower.maybeThrow(Flags.TermVectors); - return fields == null ? null : new ThrowingFields(fields, thrower); - } - /** * Wraps a Fields but with additional asserts */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index ad61a63f7c46e..ddd90f3c7cca4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -312,7 +312,7 @@ public static void assertHitCount(ActionFuture responseFuture, l public static void assertHitCount(SearchResponse countResponse, long expectedHitCount) { final TotalHits totalHits = countResponse.getHits().getTotalHits(); - if (totalHits.relation != TotalHits.Relation.EQUAL_TO || totalHits.value != expectedHitCount) { + if (totalHits.relation() != TotalHits.Relation.EQUAL_TO || totalHits.value() != expectedHitCount) { fail("Count is " + totalHits + " but " + expectedHitCount + " was expected. " + formatShardStatus(countResponse)); } } @@ -344,7 +344,7 @@ public static void assertFourthHit(SearchResponse searchResponse, Matcher matcher) { assertThat("SearchHit number must be greater than 0", number, greaterThan(0)); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo((long) number)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo((long) number)); assertThat(searchResponse.getHits().getAt(number - 1), matcher); } @@ -407,13 +407,13 @@ public static void assertScrollResponsesAndHitCount( responses.add(scrollResponse); int retrievedDocsCount = 0; try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); while (scrollResponse.getHits().getHits().length > 0) { scrollResponse = client.prepareSearchScroll(scrollResponse.getScrollId()).setScroll(keepAlive).get(); responses.add(scrollResponse); - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); } @@ -702,8 +702,8 @@ public static T assertBooleanSubQuery(Query query, Class su assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery q = (BooleanQuery) query; assertThat(q.clauses(), hasSize(greaterThan(i))); - assertThat(q.clauses().get(i).getQuery(), instanceOf(subqueryType)); - return subqueryType.cast(q.clauses().get(i).getQuery()); + assertThat(q.clauses().get(i).query(), instanceOf(subqueryType)); + return subqueryType.cast(q.clauses().get(i).query()); } /** diff --git a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java index b73066c6f4d38..c9c9759bd0cec 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java @@ -9,33 +9,22 @@ package org.elasticsearch.search.internal; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.io.IOException; - public class ConcurrentSearchSingleNodeTests extends ESSingleNodeTestCase { private final boolean concurrentSearch = randomBoolean(); - public void testConcurrentSearch() throws IOException { + public void testConcurrentSearch() { client().admin().indices().prepareCreate("index").get(); - IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexService indexService = indicesService.iterator().next(); - IndexShard shard = indexService.getShard(0); - SearchService searchService = getInstanceFromNode(SearchService.class); - ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shard.shardId(), 0L, AliasFilter.EMPTY); - try (SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, TimeValue.MINUS_ONE)) { - ContextIndexSearcher searcher = searchContext.searcher(); - if (concurrentSearch) { - assertEquals(1, searcher.getMinimumDocsPerSlice()); - } else { - assertEquals(50_000, searcher.getMinimumDocsPerSlice()); - } + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + int minDocsPerSlice = SearchService.MINIMUM_DOCS_PER_SLICE.get(clusterService.getSettings()); + if (concurrentSearch) { + assertEquals(1, minDocsPerSlice); + } else { + assertEquals(50_000, minDocsPerSlice); } } diff --git a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java index f99efe33af09b..6b983d47bdf42 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java @@ -9,34 +9,23 @@ package org.elasticsearch.search.internal; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.ESIntegTestCase; -import java.io.IOException; - @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 1) public class ConcurrentSearchTestPluginTests extends ESIntegTestCase { private final boolean concurrentSearch = randomBoolean(); - public void testConcurrentSearch() throws IOException { + public void testConcurrentSearch() { client().admin().indices().prepareCreate("index").get(); - IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class); - IndexService indexService = indicesService.iterator().next(); - IndexShard shard = indexService.getShard(0); - SearchService searchService = internalCluster().getDataNodeInstance(SearchService.class); - ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shard.shardId(), 0L, AliasFilter.EMPTY); - try (SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, TimeValue.MINUS_ONE)) { - ContextIndexSearcher searcher = searchContext.searcher(); - if (concurrentSearch) { - assertEquals(1, searcher.getMinimumDocsPerSlice()); - } else { - assertEquals(50_000, searcher.getMinimumDocsPerSlice()); - } + ClusterService clusterService = internalCluster().getDataNodeInstance(ClusterService.class); + int minDocsPerSlice = SearchService.MINIMUM_DOCS_PER_SLICE.get(clusterService.getSettings()); + if (concurrentSearch) { + assertEquals(1, minDocsPerSlice); + } else { + assertEquals(50_000, minDocsPerSlice); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java index 885e02a8b5e6a..f517c03468bc2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java @@ -69,8 +69,12 @@ public void testSimple() throws IOException { }; AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")); testCase(iw -> { - iw.addDocuments(docs(1000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(1000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } @@ -109,8 +113,12 @@ public void testNestedWithinDateHistogram() throws IOException { AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")) .withSplitLeavesIntoSeperateAggregators(false); testCase(iw -> { - iw.addDocuments(docs(2000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(2000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java index 20da254657c1a..04f0563e433a2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java @@ -170,7 +170,7 @@ public void testNoMatchingField() throws IOException { } public void testQueryFiltering() throws IOException { - testAggregation(new TermInSetQuery("text", new BytesRef("test0"), new BytesRef("test1")), iw -> { + testAggregation(new TermInSetQuery("text", List.of(new BytesRef("test0"), new BytesRef("test1"))), iw -> { for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java index 52ecc40c957b7..aee344777779b 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java @@ -314,9 +314,9 @@ private AsyncSearchResponse doNext() throws Exception { assertThat(newResponse.getSearchResponse().getShardFailures().length, equalTo(numFailures)); assertNull(newResponse.getSearchResponse().getAggregations()); assertNotNull(newResponse.getSearchResponse().getHits().getTotalHits()); - assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value, equalTo(0L)); + assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value(), equalTo(0L)); assertThat( - newResponse.getSearchResponse().getHits().getTotalHits().relation, + newResponse.getSearchResponse().getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); } else { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java index 302bb68af6c61..fd4463df07a73 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java @@ -62,7 +62,7 @@ public void testFetchFailuresAllShards() throws Exception { assertEquals(10, searchResponse.getSuccessfulShards()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(0, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); @@ -106,7 +106,7 @@ public void testFetchFailuresOnlySomeShards() throws Exception { assertEquals(10, searchResponse.getTotalShards()); assertEquals(5, searchResponse.getSuccessfulShards()); assertEquals(5, searchResponse.getFailedShards()); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(5, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java index 95b2324d03b52..16645e7523c36 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java @@ -390,12 +390,11 @@ public IndexInput clone() { /** Returns default buffer sizes for the given {@link IOContext} */ public static int bufferSize(IOContext context) { - switch (context.context) { + switch (context.context()) { case MERGE: return MERGE_BUFFER_SIZE; case DEFAULT: case FLUSH: - case READ: default: return BUFFER_SIZE; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java index fa9438353779f..164e6ed5406ae 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -244,9 +245,10 @@ private Store.MetadataSnapshot getMetadata() throws IOException { private long readFileBytes(String fileName, ByteArray reference) throws IOException { try (Releasable ignored = keyedLock.acquire(fileName)) { + var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.DEFAULT; final IndexInput indexInput = cachedInputs.computeIfAbsent(fileName, f -> { try { - return commitRef.getIndexCommit().getDirectory().openInput(fileName, IOContext.READONCE); + return commitRef.getIndexCommit().getDirectory().openInput(fileName, context); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -256,7 +258,7 @@ private long readFileBytes(String fileName, ByteArray reference) throws IOExcept long offsetAfterRead = indexInput.getFilePointer(); - if (offsetAfterRead == indexInput.length()) { + if (offsetAfterRead == indexInput.length() || context == IOContext.READONCE) { cachedInputs.remove(fileName); IOUtils.close(indexInput); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 1b7875e4a36b4..618489abd687e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -703,7 +703,7 @@ protected void atLeastDocsIndexed(Client client, String index, long numDocsRepli request.source(new SearchSourceBuilder().size(0)); assertResponse(client.search(request), response -> { assertNotNull(response.getHits().getTotalHits()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(numDocsReplicated)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(numDocsReplicated)); }); }, 60, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java index 0fea3c0d3b74f..1bf52b663b30f 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java @@ -274,7 +274,7 @@ private void assertHits(String index, int numDocsExpected, boolean sourceHadDele }; assertResponse(prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC).setSize(numDocsExpected), searchResponse -> { assertConsumer.accept(searchResponse, sourceHadDeletions); - assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value()); }); SearchResponse searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) .setScroll(TimeValue.timeValueMinutes(1)) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java index e66d41d089437..12864dd66a857 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java @@ -9,9 +9,9 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; @@ -23,7 +23,6 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.TermVectors; import org.apache.lucene.index.Terms; @@ -214,6 +213,11 @@ public NumericDocValues getNormValues(String field) { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -257,11 +261,6 @@ public LeafMetaData getMetaData() { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docId) { - throw new UnsupportedOperationException(); - } - @Override public TermVectors termVectors() throws IOException { throw new UnsupportedOperationException(); @@ -282,11 +281,6 @@ public int maxDoc() { return maxDoc; } - @Override - public void document(int docID, StoredFieldVisitor visitor) { - throw new UnsupportedOperationException(); - } - @Override protected void doClose() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 093ec031d0b30..421a306babf29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -254,6 +254,7 @@ private SegmentCommitInfo syncSegment( false, IndexOptions.NONE, DocValuesType.NONE, + fieldInfo.docValuesSkipIndexType(), -1, fieldInfo.attributes(), 0, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java index dea158b425071..d315f09ebda88 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java @@ -105,7 +105,7 @@ protected void searchResources(AbstractGetResourcesRequest request, TaskId paren listener.delegateFailure((l, response) -> { List docs = new ArrayList<>(); Set foundResourceIds = new HashSet<>(); - long totalHitCount = response.getHits().getTotalHits().value; + long totalHitCount = response.getHits().getTotalHits().value(); for (SearchHit hit : response.getHits().getHits()) { try ( XContentParser parser = XContentHelper.createParserNotCompressed( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java index de43f744c307b..4e5f97acacf64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java @@ -145,7 +145,7 @@ default SearchSourceBuilder buildSearch(EvaluationParameters parameters, QueryBu */ default void process(SearchResponse searchResponse) { Objects.requireNonNull(searchResponse); - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { String requiredFieldsString = String.join(", ", getRequiredFields()); throw ExceptionsHelper.badRequestException("No documents found containing all the required fields [{}]", requiredFieldsString); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java index 3154fe5999b8e..129619f6976e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java @@ -79,19 +79,19 @@ public void onResponse(SearchResponse resp) { } } - if (results.size() > resp.getHits().getTotalHits().value) { + if (results.size() > resp.getHits().getTotalHits().value()) { clearScroll.accept(lastResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + results.size() + "] than expected [" - + resp.getHits().getTotalHits().value + + resp.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) ); - } else if (results.size() == resp.getHits().getTotalHits().value) { + } else if (results.size() == resp.getHits().getTotalHits().value()) { clearScroll.accept(resp); // Finally, return the list of the entity listener.onResponse(Collections.unmodifiableList(results)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java index 0061870c73cc9..32b12c834dd9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java @@ -55,8 +55,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("took", tookInMillis); builder.startObject("total"); { - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); } builder.endObject(); builder.startArray("profiles"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java index 09a49c53ee1a5..908f58c5f9147 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java @@ -155,17 +155,6 @@ public FieldInfos getFieldInfos() { return fieldInfos; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f == null) { - return null; - } - f = new FieldFilterFields(f); - // we need to check for emptyness, so we can return null: - return f.iterator().hasNext() ? f : null; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -264,11 +253,6 @@ private static int step(CharacterRunAutomaton automaton, String key, int state) return state; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - super.document(docID, new FieldSubsetStoredFieldVisitor(visitor)); - } - @Override protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) { return new FieldSubsetStoredFieldsReader(reader); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index c85a648761ca7..5ba5c1fd1218a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -187,7 +187,7 @@ private PermissionEntry(ApplicationPrivilege privilege, Set resourceName } private boolean grants(ApplicationPrivilege other, Automaton resource) { - return matchesPrivilege(other) && Operations.subsetOf(resource, this.resourceAutomaton); + return matchesPrivilege(other) && Automatons.subsetOf(resource, this.resourceAutomaton); } private boolean matchesPrivilege(ApplicationPrivilege other) { @@ -202,7 +202,7 @@ private boolean matchesPrivilege(ApplicationPrivilege other) { } return Operations.isEmpty(privilege.getAutomaton()) == false && Operations.isEmpty(other.getAutomaton()) == false - && Operations.subsetOf(other.getAutomaton(), privilege.getAutomaton()); + && Automatons.subsetOf(other.getAutomaton(), privilege.getAutomaton()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 9c41786f39eeb..4e608281a7858 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.permission; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RestrictedIndices; @@ -215,7 +214,7 @@ public final boolean check(final String action, final TransportRequest request, @Override public final boolean implies(final PermissionCheck permissionCheck) { if (permissionCheck instanceof ActionBasedPermissionCheck) { - return Operations.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) + return Automatons.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) && doImplies((ActionBasedPermissionCheck) permissionCheck); } return false; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index f3c2d9f62e40f..235d7419d2bf0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.common.Strings; @@ -34,8 +33,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.apache.lucene.util.automaton.Operations.subsetOf; - /** * Stores patterns to fields which access is granted or denied to and maintains an automaton that can be used to check if permission is * allowed for a specific field. @@ -175,10 +172,14 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel deniedFieldsAutomaton = Automatons.patterns(deniedFields); } - grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + grantedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); + deniedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); - if (subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { + if (Automatons.subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { throw new ElasticsearchSecurityException( "Exceptions for field permissions must be a subset of the " + "granted fields but " diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index e1b72cc43b38e..558f8e6f22ac1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -288,7 +288,7 @@ public boolean checkResourcePrivileges( if (false == Operations.isEmpty(checkIndexAutomaton)) { Automaton allowedIndexPrivilegesAutomaton = null; for (var indexAndPrivilegeAutomaton : indexGroupAutomatons.entrySet()) { - if (Operations.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { + if (Automatons.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { if (allowedIndexPrivilegesAutomaton != null) { allowedIndexPrivilegesAutomaton = Automatons.unionAndMinimize( Arrays.asList(allowedIndexPrivilegesAutomaton, indexAndPrivilegeAutomaton.getKey()) @@ -301,7 +301,7 @@ public boolean checkResourcePrivileges( for (String privilege : checkForPrivileges) { IndexPrivilege indexPrivilege = IndexPrivilege.get(Collections.singleton(privilege)); if (allowedIndexPrivilegesAutomaton != null - && Operations.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { + && Automatons.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { if (resourcePrivilegesMapBuilder != null) { resourcePrivilegesMapBuilder.addResourcePrivilege(forIndexPattern, privilege, Boolean.TRUE); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java index 68e3f11751aac..7434128f03129 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.core.security.support.Automatons; @@ -90,7 +89,7 @@ public static SortedMap sortByAccessLevel(Map subsetCount.put( name, - privileges.values().stream().filter(p2 -> p2 != priv && Operations.subsetOf(priv.automaton, p2.automaton)).count() + privileges.values().stream().filter(p2 -> p2 != priv && Automatons.subsetOf(priv.automaton, p2.automaton)).count() ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index a6347d8b7ec77..201cb4b69e472 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -9,9 +9,10 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.apache.lucene.util.automaton.StatePair; +import org.apache.lucene.util.automaton.Transition; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.Setting; @@ -20,6 +21,7 @@ import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -223,7 +225,10 @@ private static Automaton buildAutomaton(String pattern) { ); } String regex = pattern.substring(1, pattern.length() - 1); - return new RegExp(regex).toAutomaton(); + return Operations.determinize( + new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + DEFAULT_DETERMINIZE_WORK_LIMIT + ); } else if (pattern.equals("*")) { return MATCH_ALL; } else { @@ -269,7 +274,7 @@ static Automaton wildcard(String text) { } i += length; } - return concatenate(automata); + return Operations.determinize(concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public static Automaton unionAndMinimize(Collection automata) { @@ -288,7 +293,7 @@ public static Automaton intersectAndMinimize(Automaton a1, Automaton a2) { } private static Automaton minimize(Automaton automaton) { - return MinimizationOperations.minimize(automaton, maxDeterminizedStates); + return Operations.determinize(automaton, maxDeterminizedStates); } public static Predicate predicate(String... patterns) { @@ -329,7 +334,8 @@ private static Predicate predicate(Automaton automaton, final String toS } else if (automaton == EMPTY) { return Predicates.never(); } - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, maxDeterminizedStates); + automaton = Operations.determinize(automaton, maxDeterminizedStates); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); return new Predicate() { @Override public boolean test(String s) { @@ -368,4 +374,72 @@ static List getPatterns(Automaton automaton) { throw new IllegalArgumentException("recordPatterns is set to false"); } } + + /** + * Returns true if the language of a1 is a subset of the language of a2. + * Both automata must be determinized and must have no dead states. + * + *

Complexity: quadratic in number of states. + * Copied of Lucene's AutomatonTestUtil + */ + public static boolean subsetOf(Automaton a1, Automaton a2) { + if (a1.isDeterministic() == false) { + throw new IllegalArgumentException("a1 must be deterministic"); + } + if (a2.isDeterministic() == false) { + throw new IllegalArgumentException("a2 must be deterministic"); + } + assert Operations.hasDeadStatesFromInitial(a1) == false; + assert Operations.hasDeadStatesFromInitial(a2) == false; + if (a1.getNumStates() == 0) { + // Empty language is alwyas a subset of any other language + return true; + } else if (a2.getNumStates() == 0) { + return Operations.isEmpty(a1); + } + + // TODO: cutover to iterators instead + Transition[][] transitions1 = a1.getSortedTransitions(); + Transition[][] transitions2 = a2.getSortedTransitions(); + ArrayDeque worklist = new ArrayDeque<>(); + HashSet visited = new HashSet<>(); + StatePair p = new StatePair(0, 0); + worklist.add(p); + visited.add(p); + while (worklist.size() > 0) { + p = worklist.removeFirst(); + if (a1.isAccept(p.s1) && a2.isAccept(p.s2) == false) { + return false; + } + Transition[] t1 = transitions1[p.s1]; + Transition[] t2 = transitions2[p.s2]; + for (int n1 = 0, b2 = 0; n1 < t1.length; n1++) { + while (b2 < t2.length && t2[b2].max < t1[n1].min) { + b2++; + } + int min1 = t1[n1].min, max1 = t1[n1].max; + + for (int n2 = b2; n2 < t2.length && t1[n1].max >= t2[n2].min; n2++) { + if (t2[n2].min > min1) { + return false; + } + if (t2[n2].max < Character.MAX_CODE_POINT) { + min1 = t2[n2].max + 1; + } else { + min1 = Character.MAX_CODE_POINT; + max1 = Character.MIN_CODE_POINT; + } + StatePair q = new StatePair(t1[n1].dest, t2[n2].dest); + if (visited.contains(q) == false) { + worklist.add(q); + visited.add(q); + } + } + if (min1 <= max1) { + return false; + } + } + } + return true; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java index b4952373dfdd3..92568c4f31c18 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -69,6 +70,11 @@ public AttributeSource attributes() { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java index e39ddc170c0a9..54390365c62af 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.MatchAllDocsQuery; @@ -400,14 +401,14 @@ public void onFailure(Exception e) { try (Engine.Searcher searcher = restoredShard.acquireSearcher("test")) { assertEquals(searcher.getIndexReader().maxDoc(), seqNoStats.getLocalCheckpoint()); TopDocs search = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); search = searcher.search( new MatchAllDocsQuery(), Integer.MAX_VALUE, new Sort(new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG)), false ); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); long previous = -1; for (ScoreDoc doc : search.scoreDocs) { FieldDoc fieldDoc = (FieldDoc) doc; @@ -430,8 +431,9 @@ public void onFailure(Exception e) { assertEquals(original.exists(), restored.exists()); if (original.exists()) { - Document document = original.docIdAndVersion().reader.document(original.docIdAndVersion().docId); - Document restoredDocument = restored.docIdAndVersion().reader.document(restored.docIdAndVersion().docId); + StoredFields storedFields = original.docIdAndVersion().reader.storedFields(); + Document document = storedFields.document(original.docIdAndVersion().docId); + Document restoredDocument = storedFields.document(restored.docIdAndVersion().docId); for (IndexableField field : document) { assertEquals(document.get(field.name()), restoredDocument.get(field.name())); } @@ -470,7 +472,7 @@ public IndexShard reindex(DirectoryReader reader, MappingMetadata mapping) throw for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { rootFieldsVisitor.reset(); - leafReader.document(i, rootFieldsVisitor); + leafReader.storedFields().document(i, rootFieldsVisitor); rootFieldsVisitor.postProcess(targetShard.mapperService()::fieldType); String id = rootFieldsVisitor.id(); BytesReference source = rootFieldsVisitor.source(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java index 65d057408f8bd..8433f38e40a0c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.IndexSearcher; @@ -108,8 +109,10 @@ DirectoryReader wrapReader(DirectoryReader reader) throws IOException { logger.warn(snapReader + " " + reader); assertEquals(snapReader.maxDoc(), reader.maxDoc()); assertEquals(snapReader.numDocs(), reader.numDocs()); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < snapReader.maxDoc(); i++) { - assertEquals(snapReader.document(i).get("_source"), reader.document(i).get("_source")); + assertEquals(snapStoredFields.document(i).get("_source"), storedFields.document(i).get("_source")); } for (LeafReaderContext ctx : snapReader.leaves()) { if (ctx.reader() instanceof SegmentReader) { @@ -188,12 +191,14 @@ public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 3); assertEquals(snapReader.numDocs(), 2); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < 3; i++) { - assertEquals(snapReader.document(i).get("src"), reader.document(i).get("src")); + assertEquals(snapStoredFields.document(i).get("src"), storedFields.document(i).get("src")); } IndexSearcher searcher = newSearcher(snapReader); TopDocs id = searcher.search(new TermQuery(new Term("id", "1")), 10); - assertEquals(0, id.totalHits.value); + assertEquals(0, id.totalHits.value()); } targetDir = newDirectory(targetDir); @@ -321,7 +326,7 @@ public boolean keepFullyDeletedSegment(IOSupplier readerIOSupplier) try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 1); assertEquals(snapReader.numDocs(), 1); - assertEquals("3", snapReader.document(0).getField("rank").stringValue()); + assertEquals("3", snapReader.storedFields().document(0).getField("rank").stringValue()); } try (IndexReader writerReader = DirectoryReader.open(writer)) { assertEquals(writerReader.maxDoc(), 2); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java index bb727204e2651..114ad90354c61 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java @@ -279,7 +279,7 @@ private void assertCorrectLuceneQuery(String name, Query query, List exp booleanClauses.size() ); for (int i = 0; i < booleanClauses.size(); i++) { - Query clauseQuery = booleanClauses.get(i).getQuery(); + Query clauseQuery = booleanClauses.get(i).query(); assertTrue(name + " query " + query + " expected to be a BoostQuery", clauseQuery instanceof BoostQuery); // FeatureQuery is not visible so we check the String representation assertTrue(name + " query " + query + " expected to be a FeatureQuery", clauseQuery.toString().contains("FeatureQuery")); @@ -353,8 +353,8 @@ protected void doAssertLuceneQuery(WeightedTokensQueryBuilder queryBuilder, Quer Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java index c40dd00e0e350..6fe271d1b05e3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java @@ -96,25 +96,25 @@ public void testSearch() throws Exception { ); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); TopDocs result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(0)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value2")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(1)); // this doc has been marked as deleted: indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value3")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(0)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(0L)); + assertThat(result.totalHits.value(), equalTo(0L)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value4")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(3)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java index 560dee9b5843c..db250b16eab16 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; @@ -205,8 +206,9 @@ public void testKnnVectors() throws Exception { FloatVectorValues vectorValues = leafReader.getFloatVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); @@ -215,7 +217,7 @@ public void testKnnVectors() throws Exception { // Check that we can't see fieldB assertNull(leafReader.getFloatVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); @@ -239,8 +241,9 @@ public void testKnnByteVectors() throws Exception { ByteVectorValues vectorValues = leafReader.getByteVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); @@ -249,7 +252,7 @@ public void testKnnByteVectors() throws Exception { // Check that we can't see fieldB assertNull(leafReader.getByteVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); @@ -274,11 +277,6 @@ public void testStoredFieldsString() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("testA", d2.get("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -306,11 +304,6 @@ public void testStoredFieldsBinary() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(new BytesRef("testA"), d2.getBinaryValue("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -338,11 +331,6 @@ public void testStoredFieldsInt() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -370,11 +358,6 @@ public void testStoredFieldsLong() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1L, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -402,11 +385,6 @@ public void testStoredFieldsFloat() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1F, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -434,11 +412,6 @@ public void testStoredFieldsDouble() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1D, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -468,7 +441,7 @@ public void testVectors() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - Fields vectors = ir.getTermVectors(0); + Fields vectors = ir.termVectors().get(0); Set seenFields = new HashSet<>(); for (String field : vectors) { seenFields.add(field); @@ -615,7 +588,6 @@ public void testSortedSetDocValues() throws Exception { assertNotNull(dv); assertTrue(dv.advanceExact(0)); assertEquals(0, dv.nextOrd()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, dv.nextOrd()); assertEquals(new BytesRef("testA"), dv.lookupOrd(0)); assertNull(segmentReader.getSortedSetDocValues("fieldB")); @@ -702,11 +674,6 @@ public void testSourceFilteringIntegration() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("{\"fieldA\":\"testA\"}", d2.getBinaryValue(SourceFieldMapper.NAME).utf8ToString()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -1201,7 +1168,7 @@ public void testFilterAwayAllVectors() throws Exception { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldB"))); // sees no fields - assertNull(ir.getTermVectors(0)); + assertNull(ir.termVectors().get(0)); TestUtil.checkReader(ir); IOUtils.close(ir, iw, dir); @@ -1229,14 +1196,9 @@ public void testEmpty() throws Exception { assertNull(segmentReader.terms("foo")); // see no vectors - assertNull(segmentReader.getTermVectors(0)); assertNull(segmentReader.termVectors().get(0)); // see no stored fields - { - Document document = segmentReader.document(0); - assertEquals(0, document.getFields().size()); - } { Document document = segmentReader.storedFields().document(0); assertEquals(0, document.getFields().size()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java index 265714ee6ea16..073b3b92a43a5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -17,6 +16,7 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import java.util.Collection; @@ -83,7 +83,7 @@ public void testPrivilegesForGetCheckPointAction() { public void testRelationshipBetweenPrivileges() { assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("view_index_metadata")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -91,12 +91,12 @@ public void testRelationshipBetweenPrivileges() { ); assertThat( - Operations.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(true) ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create", "create_doc", "index", "delete")).automaton, IndexPrivilege.get(Set.of("write")).automaton ), @@ -104,7 +104,7 @@ public void testRelationshipBetweenPrivileges() { ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create_index", "delete_index")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -122,7 +122,7 @@ public void testCrossClusterReplicationPrivileges() { "indices:admin/seq_no/renew_retention_lease" ).forEach(action -> assertThat(crossClusterReplication.predicate.test(action + randomAlphaOfLengthBetween(0, 8)), is(true))); assertThat( - Operations.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), + Automatons.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), is(true) ); @@ -139,10 +139,10 @@ public void testCrossClusterReplicationPrivileges() { ); assertThat( - Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(false) ); - assertThat(Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); + assertThat(Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 6f3c435eb12f6..a58acf82ea44e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; @@ -218,13 +218,13 @@ public void testIndexCollapse() throws Exception { Set name = Sets.newHashSet(first.name().iterator().next(), second.name().iterator().next()); IndexPrivilege index = IndexPrivilege.get(name); - if (Operations.subsetOf(second.getAutomaton(), first.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - } else if (Operations.subsetOf(first.getAutomaton(), second.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + if (Automatons.subsetOf(second.getAutomaton(), first.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + } else if (Automatons.subsetOf(first.getAutomaton(), second.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } else { - assertFalse(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - assertFalse(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java index 0b2e48bd20dfe..94f91f427e19a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java @@ -20,7 +20,6 @@ import java.util.Arrays; import java.util.List; -import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; import static org.elasticsearch.xpack.core.security.support.Automatons.pattern; import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; import static org.elasticsearch.xpack.core.security.support.Automatons.predicate; @@ -115,12 +114,12 @@ public void testPatternComplexity() { } private void assertMatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertTrue(runAutomaton.run(text)); } private void assertMismatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertFalse(runAutomaton.run(text)); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index d3dcd7ae36f59..65d53d3adabe7 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -125,7 +125,7 @@ public void testRunner() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = searchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -176,7 +176,7 @@ public void testRunner() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -202,7 +202,7 @@ public void testRunnerGeoMatchType() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("location"), is(equalTo("POINT(10.0 10.0)"))); @@ -244,7 +244,7 @@ public void testRunnerGeoMatchType() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -286,7 +286,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("range"), is(equalTo(Map.of("lt", 10, "gt", 1)))); @@ -330,7 +330,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -376,7 +376,7 @@ public void testRunnerRangeTypeWithIpRange() throws Exception { new SearchRequest(sourceIndexName).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("subnet"), is(equalTo("10.0.0.0/8"))); @@ -421,7 +421,7 @@ public void testRunnerRangeTypeWithIpRange() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -460,7 +460,7 @@ public void testRunnerMultiSource() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -522,7 +522,7 @@ public void testRunnerMultiSource() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -564,7 +564,7 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -633,7 +633,7 @@ public void testRunnerMultiSourceDocIdCollisions() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -688,7 +688,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -749,7 +749,7 @@ public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -943,7 +943,7 @@ public void testRunnerObjectSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -993,7 +993,7 @@ public void testRunnerObjectSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1051,7 +1051,7 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1100,7 +1100,7 @@ public void testRunnerExplicitObjectSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1158,7 +1158,7 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1209,7 +1209,7 @@ public void testRunnerExplicitObjectSourceMappingRangePolicy() throws Exception ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1273,7 +1273,7 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1329,7 +1329,7 @@ public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1395,7 +1395,7 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1450,7 +1450,7 @@ public void testRunnerTwoObjectLevelsSourceMappingRangePolicy() throws Exception new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1519,7 +1519,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1580,7 +1580,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021-08-19T14:00:00Z")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(0L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); assertResponse( @@ -1590,7 +1590,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E ) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1614,7 +1614,7 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021/08/20 at 14:00")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); // Validate segments @@ -1657,7 +1657,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("data.field1"), is(equalTo("value1"))); @@ -1704,7 +1704,7 @@ public void testRunnerDottedKeyNameSourceMapping() throws Exception { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -1736,7 +1736,7 @@ public void testRunnerWithForceMergeRetry() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -1868,7 +1868,7 @@ protected void afterRefreshEnrichIndex(ActionListener listener) { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -1901,7 +1901,7 @@ public void testRunnerWithEmptySegmentsResponse() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2007,7 +2007,7 @@ public void testRunnerWithShardFailuresInSegmentResponse() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2405,7 +2405,7 @@ public void testRunnerValidatesIndexIntegrity() throws Exception { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java index 00f22aca2cb92..8dbc9b0f4f43a 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java @@ -64,7 +64,7 @@ public void testExecute() throws Exception { assertThat(response.getResponses().length, equalTo(numSearches)); for (int i = 0; i < numSearches; i++) { assertThat(response.getResponses()[i].isFailure(), is(false)); - assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().size(), equalTo(1)); assertThat( response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().get("key1"), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index cd98b43adc159..5e1fde0dfb942 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -1073,7 +1073,7 @@ private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnecto final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) .toList(); - return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value); + return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value()); } private static ConnectorSearchResult hitToConnector(SearchHit searchHit) { @@ -1115,7 +1115,7 @@ private void isDataIndexNameAlreadyInUse(String indexName, String connectorId, A client.search(searchRequest, new ActionListener<>() { @Override public void onResponse(SearchResponse searchResponse) { - boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value > 0L; + boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value() > 0L; listener.onResponse(indexNameIsInUse); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 9ef895a3a5786..ce6f7f0dbf2b2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -417,7 +417,7 @@ private ConnectorSyncJobsResult mapSearchResponseToConnectorSyncJobsList(SearchR .map(ConnectorSyncJobIndexService::hitToConnectorSyncJob) .toList(); - return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value); + return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value()); } private static ConnectorSyncJobSearchResult hitToConnectorSyncJob(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index 2eec155ae8ea2..8bf4bbd5716b7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -436,7 +436,7 @@ private static QueryRulesetResult mapSearchResponseToQueryRulesetList(SearchResp final List rulesetResults = Arrays.stream(response.getHits().getHits()) .map(QueryRulesIndexService::hitToQueryRulesetListItem) .toList(); - return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value); + return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value()); } private static QueryRulesetListItem hitToQueryRulesetListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 9e8a8f750b764..30d533aeb9ae5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -416,7 +416,7 @@ private static SearchApplicationResult mapSearchResponse(SearchResponse response final List apps = Arrays.stream(response.getHits().getHits()) .map(SearchApplicationIndexService::hitToSearchApplicationListItem) .toList(); - return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value); + return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value()); } private static SearchApplicationListItem hitToSearchApplicationListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index be1d4c0871ca7..2b7b8b074fa71 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -582,8 +582,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(Fields.HITS); if (totalHits != null) { builder.startObject(Fields.TOTAL); - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); builder.endObject(); } if (events != null) { diff --git a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt index b04d28654f1d5..00c08096fd084 100644 --- a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt +++ b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt @@ -769,7 +769,7 @@ process where command_line regex "^.*?net.exe" regexSingleArgInsensitive process where command_line regex~ "^.*?net.exe" ; -"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":65791,"case_insensitive":true ; regexMultiArg @@ -781,7 +781,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe") regexMultiArgInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":65791,"case_insensitive":true ; regexMultiMultiArgVariant @@ -793,7 +793,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\ regexMultiMultiArgVariantInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\system32\\\\net1\\s+") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":65791,"case_insensitive":true ; regexMultiArgWithScript diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java index 52ce2636e914b..be1ad0391c8ca 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/LikePattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -51,8 +50,7 @@ public char escape() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java index f437dc5819dcb..4e559f564acb1 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,10 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java index 7cedbc4742138..3e9cbf92727c2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -39,8 +38,7 @@ public String pattern() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index 1c9c97a364fc7..9633051781f4a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -120,7 +120,7 @@ static List> docSlices(IndexReader indexReader, i } static List> segmentSlices(List leafContexts) { - IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE); - return Arrays.stream(gs).map(g -> Arrays.stream(g.leaves).map(PartialLeafReaderContext::new).toList()).toList(); + IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE, false); + return Arrays.stream(gs).map(g -> Arrays.stream(g.partitions).map(PartialLeafReaderContext::new).toList()).toList(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 2e32d20a2365e..0f600958b93b3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; @@ -230,8 +231,9 @@ static final class PerShardCollector { if (sortAndFormats.isEmpty()) { throw new IllegalStateException("sorts must not be disabled in TopN"); } + // We don't use CollectorManager here as we don't retrieve the total hits and sort by score. - this.topFieldCollector = TopFieldCollector.create(sortAndFormats.get().sort, limit, 0); + this.topFieldCollector = new TopFieldCollectorManager(sortAndFormats.get().sort, limit, null, 0, false).newCollector(); } LeafCollector getLeafCollector(LeafReaderContext leafReaderContext) throws IOException { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java index e9063c9597c5f..c92dc75397729 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; /** * A subset of a {@link LeafReaderContext}. @@ -16,6 +17,10 @@ * @param maxDoc one more than the last document */ public record PartialLeafReaderContext(LeafReaderContext leafReaderContext, int minDoc, int maxDoc) { + public PartialLeafReaderContext(IndexSearcher.LeafReaderContextPartition partition) { + this(partition.ctx, partition.minDocId, partition.maxDocId); + } + public PartialLeafReaderContext(LeafReaderContext leafReaderContext) { this(leafReaderContext, 0, leafReaderContext.reader().maxDoc()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java index 6937f1a8c7772..f70cfe1dc8a41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; @@ -89,7 +90,7 @@ public Page getOutput() { continue; } final DocCollector collector = new DocCollector(docsBuilder); - scorer.score(collector, leaf.reader().getLiveDocs()); + scorer.score(collector, leaf.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); int matches = collector.matches; if (segmentsBuilder != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java index 09166f0cff7a8..0af22a357aeca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java @@ -37,7 +37,7 @@ public static EvalOperator.ExpressionEvaluator.Factory toEvaluator( * we couldn't get a nice toDot - so we call UTF32ToUTF8 ourselves. */ Automaton automaton = Operations.determinize(new UTF32ToUTF8().convert(utf32Automaton), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - ByteRunAutomaton run = new ByteRunAutomaton(automaton, true, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + ByteRunAutomaton run = new ByteRunAutomaton(automaton, true); return new AutomataMatchEvaluator.Factory(source, field, run, toDot(automaton)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index cae0f3e084d54..023fc66a0d173 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -383,7 +383,7 @@ public NamedExpression visitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePat // use the fast run variant result = new UnresolvedNamePattern( src, - new CharacterRunAutomaton(Operations.concatenate(list)), + new CharacterRunAutomaton(Operations.determinize(Operations.concatenate(list), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)), patternString.toString(), nameString.toString() ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java index 386c983c8e6af..3694d6f478caa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java @@ -69,14 +69,6 @@ public String toString(String field) { @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - final ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { @@ -96,12 +88,12 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti * can't do that because we need the check the number of fields. */ if (lfd instanceof LeafNumericFieldData n) { - return scorerSupplier(context, n.getLongValues(), this, boost, scoreMode); + return scorerSupplier(context, n.getLongValues(), boost, scoreMode); } if (lfd instanceof LeafOrdinalsFieldData o) { - return scorerSupplier(context, o.getOrdinalsValues(), this, boost, scoreMode); + return scorerSupplier(context, o.getOrdinalsValues(), boost, scoreMode); } - return scorerSupplier(context, lfd.getBytesValues(), this, boost, scoreMode); + return scorerSupplier(context, lfd.getBytesValues(), boost, scoreMode); } @Override @@ -113,7 +105,6 @@ public boolean isCacheable(LeafReaderContext ctx) { private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedNumericDocValues sortedNumerics, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -122,16 +113,9 @@ private ScorerSupplier scorerSupplier( // check for dense field final PointValues points = context.reader().getPointValues(fieldData.getFieldName()); if (points != null && points.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { - return new PredicateScorerSupplier( - weight, - boost, - scoreMode, - maxDoc, - MULTI_VALUE_MATCH_COST, - sortedNumerics::advanceExact - ); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, sortedNumerics::advanceExact); } } final CheckedIntPredicate predicate = doc -> { @@ -144,13 +128,12 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedSetDocValues sortedSetDocValues, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -159,10 +142,9 @@ private ScorerSupplier scorerSupplier( // check for dense field final Terms terms = context.reader().terms(fieldData.getFieldName()); if (terms != null && terms.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -181,20 +163,18 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedBinaryDocValues sortedBinaryDocValues, - Weight weight, float boost, ScoreMode scoreMode ) { final int maxDoc = context.reader().maxDoc(); if (FieldData.unwrapSingleton(sortedBinaryDocValues) != null) { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -212,7 +192,7 @@ private ScorerSupplier scorerSupplier( } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } }; } @@ -266,13 +246,11 @@ public int hashCode() { private static class DocIdSetIteratorScorerSupplier extends ScorerSupplier { - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final DocIdSetIterator docIdSetIterator; - private DocIdSetIteratorScorerSupplier(Weight weight, float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { - this.weight = weight; + private DocIdSetIteratorScorerSupplier(float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { this.score = score; this.scoreMode = scoreMode; this.docIdSetIterator = docIdSetIterator; @@ -280,7 +258,7 @@ private DocIdSetIteratorScorerSupplier(Weight weight, float score, ScoreMode sco @Override public Scorer get(long leadCost) { - return new ConstantScoreScorer(weight, score, scoreMode, docIdSetIterator); + return new ConstantScoreScorer(score, scoreMode, docIdSetIterator); } @Override @@ -290,23 +268,13 @@ public long cost() { } private static class PredicateScorerSupplier extends ScorerSupplier { - - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final int maxDoc; private final int matchCost; private final CheckedIntPredicate predicate; - private PredicateScorerSupplier( - Weight weight, - float score, - ScoreMode scoreMode, - int maxDoc, - int matchCost, - CheckedIntPredicate predicate - ) { - this.weight = weight; + private PredicateScorerSupplier(float score, ScoreMode scoreMode, int maxDoc, int matchCost, CheckedIntPredicate predicate) { this.score = score; this.scoreMode = scoreMode; this.maxDoc = maxDoc; @@ -327,7 +295,7 @@ public float matchCost() { return matchCost; } }; - return new ConstantScoreScorer(weight, score, scoreMode, iterator); + return new ConstantScoreScorer(score, scoreMode, iterator); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java index 107c2af11c4f1..04da5d406fbb9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java @@ -108,7 +108,7 @@ public void testQueries() throws Exception { QueryList queryList = QueryList.termQueryList(uidField, mock(SearchExecutionContext.class), inputTerms, KEYWORD); assertThat(queryList.getPositionCount(), equalTo(6)); assertThat(queryList.getQuery(0), equalTo(new TermQuery(new Term("uid", new BytesRef("b2"))))); - assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", new BytesRef("c1"), new BytesRef("a2")))); + assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", List.of(new BytesRef("c1"), new BytesRef("a2"))))); assertThat(queryList.getQuery(2), equalTo(new TermQuery(new Term("uid", new BytesRef("z2"))))); assertNull(queryList.getQuery(3)); assertThat(queryList.getQuery(4), equalTo(new TermQuery(new Term("uid", new BytesRef("a3"))))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index 2ba397a3cb3de..95444c9b2423f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -232,7 +232,7 @@ private Object randomValue() { private List docFor(int i, Iterable values) { List fields = new ArrayList<>(); - fields.add(new LongField("i", i)); + fields.add(new LongField("i", i, Field.Store.NO)); fields.add(new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)); switch (fieldType) { case "long", "integer", "short", "byte" -> { @@ -270,7 +270,10 @@ public List> build(RandomIndexWriter iw) throws IOException { List> fieldValues = new ArrayList<>(100); for (int i = 0; i < 100; i++) { iw.addDocument( - List.of(new LongField("i", i), new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)) + List.of( + new LongField("i", i, Field.Store.NO), + new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO) + ) ); fieldValues.add(List.of()); } diff --git a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java index 3623d3671e83f..6d90b0e67ee83 100644 --- a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.graph.test; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; @@ -165,7 +165,7 @@ public void testLargeNumberTermsStartCrawl() { VertexRequest peopleNames = hop1.addVertexRequest("people").minDocCount(1); peopleNames.addInclude("john", 1); - for (int i = 0; i < BooleanQuery.getMaxClauseCount() + 1; i++) { + for (int i = 0; i < IndexSearcher.getMaxClauseCount() + 1; i++) { peopleNames.addInclude("unknown" + i, 1); } diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 36e8eaf94c8be..b60ce13e0228c 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.ExceptionsHelper; @@ -564,7 +564,7 @@ private static void addBigOrClause(Map> lastHopFindings, Boo for (Entry> entry : lastHopFindings.entrySet()) { numClauses += entry.getValue().size(); } - if (numClauses < BooleanQuery.getMaxClauseCount()) { + if (numClauses < IndexSearcher.getMaxClauseCount()) { // We can afford to build a Boolean OR query with individual // boosts for interesting terms for (Entry> entry : lastHopFindings.entrySet()) { @@ -755,7 +755,7 @@ private double getInitialTotalSignalStrength(Hop rootHop, Sampler sample) { private static void addNormalizedBoosts(BoolQueryBuilder includesContainer, VertexRequest vr) { TermBoost[] termBoosts = vr.includeValues(); - if ((includesContainer.should().size() + termBoosts.length) > BooleanQuery.getMaxClauseCount()) { + if ((includesContainer.should().size() + termBoosts.length) > IndexSearcher.getMaxClauseCount()) { // Too many terms - we need a cheaper form of query to execute this List termValues = new ArrayList<>(); for (TermBoost tb : termBoosts) { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 73c0f6d4c7685..54d83af8f5d95 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -135,7 +135,7 @@ public void testBulkOperations() throws Exception { SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true); SearchResponse searchResponse = client().search(new SearchRequest(INDEX_NAME).source(sourceBuilder)).get(); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(totalDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(totalDocs)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 1697b33fedd92..504cf00943ef5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -493,7 +493,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -501,7 +501,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a", "b")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -509,7 +509,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("d")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -517,7 +517,7 @@ public void testSuccessfulParse() throws IOException { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("z")), 10 ); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } }); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index 47ac33a5cf9ab..367d68979d307 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -225,9 +225,9 @@ private void assertSparseEmbeddingLuceneQuery(Query query) { BooleanQuery innerBooleanQuery = (BooleanQuery) innerQuery; assertThat(innerBooleanQuery.clauses().size(), equalTo(queryTokenCount)); innerBooleanQuery.forEach(c -> { - assertThat(c.getOccur(), equalTo(SHOULD)); - assertThat(c.getQuery(), instanceOf(BoostQuery.class)); - assertThat(((BoostQuery) c.getQuery()).getBoost(), equalTo(TOKEN_WEIGHT)); + assertThat(c.occur(), equalTo(SHOULD)); + assertThat(c.query(), instanceOf(BoostQuery.class)); + assertThat(((BoostQuery) c.query()).getBoost(), equalTo(TOKEN_WEIGHT)); }); } @@ -249,7 +249,7 @@ private Query assertOuterBooleanQuery(Query query) { List outerMustClauses = new ArrayList<>(); List outerFilterClauses = new ArrayList<>(); for (BooleanClause clause : outerBooleanQuery.clauses()) { - BooleanClause.Occur occur = clause.getOccur(); + BooleanClause.Occur occur = clause.occur(); if (occur == MUST) { outerMustClauses.add(clause); } else if (occur == FILTER) { @@ -262,7 +262,7 @@ private Query assertOuterBooleanQuery(Query query) { assertThat(outerMustClauses.size(), equalTo(1)); assertThat(outerFilterClauses.size(), equalTo(1)); - return outerMustClauses.get(0).getQuery(); + return outerMustClauses.get(0).query(); } @Override diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java index 081a170aac9f1..bb4464542a422 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java @@ -87,7 +87,7 @@ protected void doExecute(Task task, GetPipelineRequest request, ActionListener { - final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value); + final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value()); final Map pipelineSources = Maps.newMapWithExpectedSize(numHits); final Consumer clearScroll = (response) -> { if (response != null && response.getScrollId() != null) { @@ -148,14 +148,14 @@ private void handleFilteringSearchResponse( ActionListener listener ) { int numberOfHitsSeenSoFar = numberOfHitsSeenPreviously + searchResponse.getHits().getHits().length; - if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value) { + if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value()) { clearScroll.accept(searchResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + numberOfHitsSeenSoFar + "] than expected [" - + searchResponse.getHits().getTotalHits().value + + searchResponse.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) @@ -179,7 +179,7 @@ private void handleFilteringSearchResponse( } } - if (numberOfHitsSeenSoFar == searchResponse.getHits().getTotalHits().value) { + if (numberOfHitsSeenSoFar == searchResponse.getHits().getTotalHits().value()) { clearScroll.accept(searchResponse); listener.onResponse(new GetPipelineResponse(pipelineSources)); } else { diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index c594c9f553164..216f82552353b 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -19,6 +19,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.LevenshteinAutomata; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.logging.DeprecationCategory; @@ -291,7 +292,10 @@ public Query regexpQuery( return new MatchNoDocsQuery(); } - final Automaton automaton = new RegExp(regexp, syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates); + final Automaton automaton = Operations.determinize( + new RegExp(regexp, syntaxFlags, matchFlags).toAutomaton(), + maxDeterminizedStates + ); final CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); if (runAutomaton.run(this.value)) { return new MatchAllDocsQuery(); diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index e52237f4d507e..3a50cc8143485 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -242,11 +242,8 @@ public int docValueCount() { @Override public long nextOrd() { - if (ordsForThisDoc.hasNext()) { - return ordsForThisDoc.next(); - } else { - return NO_MORE_ORDS; - } + assert ordsForThisDoc.hasNext(); + return ordsForThisDoc.next(); } @Override diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java index c29e4513562fc..04599549cc3cc 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldTypeTests.java @@ -77,11 +77,7 @@ private CollectionBasedSortedSetDocValues(List docValues) { @Override public long nextOrd() { - currentOrd++; - if (currentOrd >= docValues.size()) { - return NO_MORE_ORDS; - } - return currentOrd; + return ++currentOrd; } @Override diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 5b04225cee105..b43d87c17e644 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -650,7 +650,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio List fields = new ArrayList<>(); if (indexed && hasDocValues) { - fields.add(new LongField(fieldType().name(), numericValue)); + fields.add(new LongField(fieldType().name(), numericValue, Field.Store.NO)); } else if (hasDocValues) { fields.add(new SortedNumericDocValuesField(fieldType().name(), numericValue)); } else if (indexed) { diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java index 00532d95574c0..4f42103bc4541 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.Locale; @@ -213,9 +212,9 @@ static CompiledAutomaton prefixAutomaton(String versionPrefix, boolean caseInsen a = Operations.concatenate(a, Automata.makeAnyBinary()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); + a = Operations.determinize(a, 0); - return new CompiledAutomaton(a, null, true, 0, true); + return new CompiledAutomaton(a, false, true, true); } static class EncodedVersion { diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java index 387a49a29dc23..1e5ecf19bdf81 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java @@ -40,11 +40,11 @@ class VersionFieldWildcardQuery extends AutomatonQuery { private static final byte WILDCARD_CHAR = '?'; VersionFieldWildcardQuery(Term term, boolean caseInsensitive) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true); + super(term, toAutomaton(term, caseInsensitive), true); } VersionFieldWildcardQuery(Term term, boolean caseInsensitive, RewriteMethod rewriteMethod) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true, rewriteMethod); + super(term, toAutomaton(term, caseInsensitive), true, rewriteMethod); } private static Automaton toAutomaton(Term wildcardquery, boolean caseInsensitive) { @@ -114,7 +114,7 @@ private static Automaton toAutomaton(Term wildcardquery, boolean caseInsensitive if (containsPreReleaseSeparator == false) { automata.add(Operations.optional(Automata.makeChar(VersionEncoder.NO_PRERELEASE_SEPARATOR_BYTE))); } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java index 17e1d70cbb471..01f0fdb256551 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java @@ -47,7 +47,8 @@ public VersionStringDocValuesField(SortedSetDocValues input, String name) { public void setNextDocId(int docId) throws IOException { count = 0; if (input.advanceExact(docId)) { - for (long ord = input.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = input.nextOrd()) { + for (int i = 0; i < input.docValueCount(); i++) { + long ord = input.nextOrd(); ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java index b49b4500ce7b7..6bf2917c601ac 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java @@ -187,7 +187,8 @@ public Query regexpQuery( matchFlags, DEFAULT_PROVIDER, maxDeterminizedStates, - method == null ? CONSTANT_SCORE_REWRITE : method + method == null ? CONSTANT_SCORE_REWRITE : method, + true ) { @Override diff --git a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java index 94d8a144b0bd6..c89d1f8493b6b 100644 --- a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java +++ b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java @@ -117,7 +117,7 @@ public void testPrefixQuery() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-A").caseInsensitive(true)), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[0].getSourceAsMap().get("version")); } ); @@ -134,7 +134,7 @@ public void testSort() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.DESC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("1.3.567#12", hits[0].getSortValues()[0]); assertEquals("1.2.3alpha", hits[1].getSortValues()[0]); @@ -150,7 +150,7 @@ public void testSort() throws IOException { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); var hits = response.getHits().getHits(); assertEquals("1.0.0", hits[0].getSortValues()[0]); assertEquals("1.3.0+build.1234567", hits[1].getSortValues()[0]); @@ -179,7 +179,7 @@ public void testRegexQuery() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "2.*0")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -187,21 +187,21 @@ public void testRegexQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "<0-10>.<0-10>.*al.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } ); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "1.[0-9].[0-9].*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("1.3.0+build.1234567", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); // test case sensitivity / insensitivity assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*alpha.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -211,7 +211,7 @@ public void testRegexQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*Alpha.*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -234,7 +234,7 @@ public void testFuzzyQuery() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.fuzzyQuery("version", "2.3.0")), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); assertEquals("2.a3.0", response.getHits().getHits()[2].getSourceAsMap().get("version")); @@ -288,7 +288,7 @@ public void testWildcardQuery() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", "*Alpha*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0-alpha.2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -297,7 +297,7 @@ public void testWildcardQuery() throws Exception { private void checkWildcardQuery(String indexName, String query, String... expectedResults) { assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", query)), response -> { - assertEquals(expectedResults.length, response.getHits().getTotalHits().value); + assertEquals(expectedResults.length, response.getHits().getTotalHits().value()); for (int i = 0; i < expectedResults.length; i++) { String expected = expectedResults[i]; Object actual = response.getHits().getHits()[i].getSourceAsMap().get("version"); @@ -321,7 +321,7 @@ public void testStoreMalformed() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addDocValueField("version"), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("1.invalid.0", response.getHits().getAt(0).field("version").getValue()); @@ -359,7 +359,7 @@ public void testStoreMalformed() throws Exception { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("2.2.0", hits[0].getSortValues()[0]); assertEquals("", hits[1].getSortValues()[0]); @@ -437,36 +437,36 @@ public void testMultiValues() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addSort("version", SortOrder.ASC), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("2", response.getHits().getAt(1).getId()); assertEquals("3", response.getHits().getAt(2).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "3.0.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "4.alpha.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2", response.getHits().getAt(0).getId()); }); // range assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("1.5.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.5.0")), - response -> assertEquals(3, response.getHits().getTotalHits().value) + response -> assertEquals(3, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("5.0.0").to("6.0.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 8c245a4543abe..39519dc7931d0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -345,7 +345,7 @@ private void testExpiredDeletion(Float customThrottle, int numUnusedState) throw assertResponse( prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false).setTrackTotalHits(true).setSize(10000), stateDocsResponse -> { - assertThat(stateDocsResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(5L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(5L)); int nonExistingJobDocsCount = 0; List nonExistingJobExampleIds = new ArrayList<>(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 2e096f3262cb6..9864c88d1405c 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -372,7 +372,7 @@ protected long countForecastDocs(String jobId, String forecastId) { .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) .filter(QueryBuilders.termQuery(Forecast.FORECAST_ID.getPreferredName(), forecastId)) ), - searchResponse -> count.set(searchResponse.getHits().getTotalHits().value) + searchResponse -> count.set(searchResponse.getHits().getTotalHits().value()) ); return count.get(); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java index 94bc3150cb12e..5f82d996c87fa 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java @@ -77,7 +77,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t ++numStateRecords; } } - assertThat(stateDocsResponse1.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse1.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } @@ -117,7 +117,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t } } - assertThat(stateDocsResponse2.getHits().getTotalHits().value, equalTo(3L)); + assertThat(stateDocsResponse2.getHits().getTotalHits().value(), equalTo(3L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(2)); @@ -154,7 +154,7 @@ public void testPersistJobOnGracefulShutdown_givenNoDataAndTimeAdvanced() throws ++numStateRecords; } } - assertThat(stateDocsResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java index f5d0b23b437f3..8a6499ec3bb6a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java @@ -164,7 +164,7 @@ public void testSingleNumericFeatureAndMixedTrainingAndNonTrainingRows() throws + testDocsWithEmptyFeatureImportance + "] test docs with empty feature importance" + " from " - + sourceData.getHits().getTotalHits().value + + sourceData.getHits().getTotalHits().value() + " hits.\n" + badDocuments, trainingDocsWithEmptyFeatureImportance + testDocsWithEmptyFeatureImportance, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java index 260a5dea0a3c1..388583f6f8656 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java @@ -295,7 +295,7 @@ private Quantiles getQuantiles(String jobId) throws Exception { prepareSearch(".ml-state*").setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(jobId))).setSize(1), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); try ( XContentParser parser = JsonXContent.jsonXContent.createParser( XContentParserConfiguration.EMPTY, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index 8fbad7ccd3877..1505d374dfa08 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -396,7 +396,7 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti } assertResponse(prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value == docCount) { + if (searchResponse.getHits().getTotalHits().value() == docCount) { long seenCount = SearchResponseUtils.getTotalHitsValue( prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) @@ -404,7 +404,7 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti logger.debug("We stopped during analysis: [{}] < [{}]", seenCount, docCount); assertThat(seenCount, lessThan((long) docCount)); } else { - logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value, docCount); + logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value(), docCount); } }); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java index c15750de3b336..edc851def4468 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java @@ -77,7 +77,7 @@ public void testCountCorrelation() { .setSize(0) .setTrackTotalHits(true), percentilesSearch -> { - long totalHits = percentilesSearch.getHits().getTotalHits().value; + long totalHits = percentilesSearch.getHits().getTotalHits().value(); Percentiles percentiles = percentilesSearch.getAggregations().get("percentiles"); Tuple aggs = buildRangeAggAndSetExpectations( percentiles, diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java index 8fddfa47c377c..139d1b074c7b2 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java @@ -192,7 +192,7 @@ private boolean doesLocalAuditMessageExist(String message) { .setQuery(new MatchPhraseQueryBuilder("message", message)) .get(); try { - return response.getHits().getTotalHits().value > 0; + return response.getHits().getTotalHits().value() > 0; } finally { response.decRef(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 17fe20c5115ff..dfb960794537b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -767,7 +767,7 @@ private static DataCounts getDataCountsFromIndex(String jobId) throws IOExceptio prepareSearch().setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .setQuery(QueryBuilders.idsQuery().addIds(DataCounts.documentId(jobId))), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { setOnce.set(new DataCounts(jobId)); return; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index a47b67e490851..210973f2601d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -189,7 +189,7 @@ private void initChunkedBucketSearcher( ML_ORIGIN, searchRequest, ActionListener.wrap(searchResponse -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); if (totalHits > 0) { InternalAggregations aggregations = searchResponse.getAggregations(); Min min = aggregations.get(EARLIEST_TIME); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 6aaa1e50f2e8a..d676e6cc9d065 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -415,7 +415,7 @@ private void checkModelIdAgainstTags(String modelId, ActionListener listen ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_MODEL_ID_AND_TAGS_UNIQUE, modelId)) ); @@ -443,7 +443,7 @@ private void checkTagsAgainstModelIds(List tags, ActionListener li ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_TAGS_AND_MODEL_IDS_UNIQUE, tags)) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 2ec460a08caf9..759538b4cdc63 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -433,7 +433,7 @@ private static void checkDestIndexIsEmptyIfExists( TransportSearchAction.TYPE, destEmptySearch, ActionListener.wrap(searchResponse -> { - if (searchResponse.getHits().getTotalHits().value > 0) { + if (searchResponse.getHits().getTotalHits().value() > 0) { listener.onFailure(ExceptionsHelper.badRequestException("dest index [{}] must be empty", destIndex)); } else { listener.onResponse(startContext); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java index f0e03a1e94973..7c41dbd463413 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java @@ -62,7 +62,7 @@ public static DataExtractor.DataSummary getDataSummary(SearchResponse searchResp } else { Long earliestTime = toLongIfFinite((aggregations.get(EARLIEST_TIME)).value()); Long latestTime = toLongIfFinite((aggregations.get(LATEST_TIME)).value()); - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); return new DataExtractor.DataSummary(earliestTime, latestTime, totalHits); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index 20da61a3d6910..7829adb395675 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -226,7 +226,7 @@ public void findDatafeedIdsForJobIds(Collection jobIds, ActionListenerdelegateFailureAndWrap((delegate, response) -> { Set datafeedIds = new HashSet<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { @@ -259,7 +259,7 @@ public void findDatafeedsByJobIds( listener.delegateFailureAndWrap((delegate, response) -> { Map datafeedsByJobId = new HashMap<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { DatafeedConfig.Builder builder = parseLenientlyFromSource(hit.getSourceRef()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index c890ab599c380..315d2249d00cb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -377,7 +377,7 @@ public DataSummary collectDataSummary() { SearchRequestBuilder searchRequestBuilder = buildDataSummarySearchRequestBuilder(); SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); try { - long rows = searchResponse.getHits().getTotalHits().value; + long rows = searchResponse.getHits().getTotalHits().value(); LOGGER.debug(() -> format("[%s] Data summary rows [%s]", context.jobId, rows)); return new DataSummary(rows, organicFeatures.length + processedFeatures.length); } finally { @@ -396,7 +396,7 @@ public void collectDataSummaryAsync(ActionListener dataSummaryActio TransportSearchAction.TYPE, searchRequestBuilder.request(), dataSummaryActionListener.delegateFailureAndWrap( - (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields)) + (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value(), numberOfFields)) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index dfcc12d98be41..64cf493028ad1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -169,7 +169,7 @@ private InferenceState restoreInferenceState() { ); try { Max maxIncrementalIdAgg = searchResponse.getAggregations().get(DestinationIndex.INCREMENTAL_ID); - long processedTestDocCount = searchResponse.getHits().getTotalHits().value; + long processedTestDocCount = searchResponse.getHits().getTotalHits().value(); Long lastIncrementalId = processedTestDocCount == 0 ? null : (long) maxIncrementalIdAgg.value(); if (lastIncrementalId != null) { LOGGER.debug( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java index 482e82f9ec303..fdd4bdd120f6a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java @@ -115,7 +115,7 @@ private void searchIfTestDocsExist(ActionListener listener) { ML_ORIGIN, TransportSearchAction.TYPE, searchRequest, - listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value > 0)) + listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value() > 0)) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java index 3ef2affa5d399..0b3dd573deaae 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java @@ -70,7 +70,7 @@ private TrainTestSplitter createSingleClassSplitter(Regression regression) { regression.getDependentVariable(), regression.getTrainingPercent(), regression.getRandomizeSeed(), - searchResponse.getHits().getTotalHits().value + searchResponse.getHits().getTotalHits().value() ); } finally { searchResponse.decRef(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java index f56c589aea19a..c4396c4f9d2c8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java @@ -196,7 +196,7 @@ private void doSearch( numDocsWritten += searchResponse.getHits().getHits().length; boolean endOfSearch = searchResponse.getHits().getHits().length < searchSize - || searchResponse.getHits().getTotalHits().value == numDocsWritten; + || searchResponse.getHits().getTotalHits().value() == numDocsWritten; if (endOfSearch) { successConsumer.accept(Boolean.TRUE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index f493c735d87ea..ff5f37427b18f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -1008,7 +1008,7 @@ public void expandIds( ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - long totalHitCount = response.getHits().getTotalHits().value + foundResourceIds.size(); + long totalHitCount = response.getHits().getTotalHits().value() + foundResourceIds.size(); Set foundFromDocs = new HashSet<>(); for (SearchHit hit : response.getHits().getHits()) { Map docSource = hit.getSourceAsMap(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index 8493513f40bd6..df9a187f59616 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -683,7 +683,7 @@ public void groupExists(String groupId, ActionListener listener) { ML_ORIGIN, searchRequest, ActionListener.wrap( - response -> listener.onResponse(response.getHits().getTotalHits().value > 0), + response -> listener.onResponse(response.getHits().getTotalHits().value() > 0), listener::onFailure ), client::search diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index b9cc1902b7ab6..0f3abe3ab8c20 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -352,7 +352,7 @@ public void deleteJobDocuments( } } SearchResponse searchResponse = item.getResponse(); - if (searchResponse.getHits().getTotalHits().value > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { + if (searchResponse.getHits().getTotalHits().value() > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { needToRunDBQTemp = true; } else { indicesToDelete.add(indexNames.get()[i]); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index f9e4e62e4e3bc..51b3e0b55d75b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -870,7 +870,7 @@ public void buckets( throw QueryPage.emptyQueryPage(Bucket.RESULTS_FIELD); } - QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, Bucket.RESULTS_FIELD); + QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), Bucket.RESULTS_FIELD); if (query.isExpand()) { Iterator bucketsToExpand = buckets.results() @@ -1086,7 +1086,7 @@ public void categoryDefinitions( } QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), CategoryDefinition.RESULTS_FIELD ); handler.accept(result); @@ -1143,7 +1143,7 @@ public void records( } QueryPage queryPage = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), AnomalyRecord.RESULTS_FIELD ); handler.accept(queryPage); @@ -1207,7 +1207,7 @@ public void influencers( } QueryPage result = new QueryPage<>( influencers, - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Influencer.RESULTS_FIELD ); handler.accept(result); @@ -1375,7 +1375,7 @@ private void modelSnapshots( QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), ModelSnapshot.RESULTS_FIELD ); handler.accept(result); @@ -1411,7 +1411,7 @@ public QueryPage modelPlot(String jobId, int from, int size) { } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1444,7 +1444,7 @@ public QueryPage categorizerStats(String jobId, int from, int } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1700,7 +1700,7 @@ public void scheduledEvents(ScheduledEventsQueryBuilder query, ActionListener(events, response.getHits().getTotalHits().value, ScheduledEvent.RESULTS_FIELD)); + handler.onResponse(new QueryPage<>(events, response.getHits().getTotalHits().value(), ScheduledEvent.RESULTS_FIELD)); } catch (Exception e) { handler.onFailure(e); } @@ -1901,7 +1901,7 @@ public void calendars(CalendarQueryBuilder queryBuilder, ActionListener(calendars, response.getHits().getTotalHits().value, Calendar.RESULTS_FIELD)); + listener.onResponse(new QueryPage<>(calendars, response.getHits().getTotalHits().value(), Calendar.RESULTS_FIELD)); } catch (Exception e) { listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 886c19a65a4d0..194759c026a30 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -168,7 +168,7 @@ private List findForecastsToDelete(SearchResponse searchResponse) List forecastsToDelete = new ArrayList<>(); SearchHits hits = searchResponse.getHits(); - if (hits.getTotalHits().value > MAX_FORECASTS) { + if (hits.getTotalHits().value() > MAX_FORECASTS) { LOGGER.info("More than [{}] forecasts were found. This run will only delete [{}] of them", MAX_FORECASTS, MAX_FORECASTS); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java index 86488a647baa1..ef6087f021e9d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java @@ -111,7 +111,7 @@ private SearchResponse initScroll() { ); SearchResponse searchResponse = client.search(searchRequest).actionGet(); - totalHits = searchResponse.getHits().getTotalHits().value; + totalHits = searchResponse.getHits().getTotalHits().value(); scrollId = searchResponse.getScrollId(); return searchResponse; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java index f63f6e0549179..802bcaf3b342e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java @@ -110,7 +110,7 @@ public Deque next() { SearchResponse searchResponse = doSearch(searchAfterFields()); try { if (trackTotalHits && totalHits.get() == 0) { - totalHits.set(searchResponse.getHits().getTotalHits().value); + totalHits.set(searchResponse.getHits().getTotalHits().value()); } return mapHits(searchResponse); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java index 3d17d8dd23ff6..13cf6d87728a8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java @@ -166,8 +166,8 @@ protected void doAssertLuceneQuery(SparseVectorQueryBuilder queryBuilder, Query Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java index 8da6fc843614e..00d50e0d0d7bb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java @@ -139,8 +139,8 @@ protected void doAssertLuceneQuery(TextExpansionQueryBuilder queryBuilder, Query Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index daea70abd29e3..7ddaa53a59914 100644 --- a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -149,7 +149,7 @@ public void testMonitoringBulk() throws Exception { assertResponse(client().prepareSearch(".monitoring-" + system.getSystem() + "-" + TEMPLATE_VERSION + "-*"), response -> { // exactly 3 results are expected - assertThat("No monitoring documents yet", response.getHits().getTotalHits().value, equalTo(3L)); + assertThat("No monitoring documents yet", response.getHits().getTotalHits().value(), equalTo(3L)); final List> sources = Arrays.stream(response.getHits().getHits()) .map(SearchHit::getSourceAsMap) @@ -165,7 +165,7 @@ public void testMonitoringBulk() throws Exception { assertCheckedResponse(client().prepareSearch(monitoringIndex), response -> { final SearchHits hits = response.getHits(); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat( "Monitoring documents must have the same timestamp", Arrays.stream(hits.getHits()).map(hit -> extractValue("timestamp", hit.getSourceAsMap())).distinct().count(), diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index 93e055b58ddc3..d68395ef7656f 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -113,7 +113,7 @@ public void testExport() throws Exception { assertResponse( prepareSearch(".monitoring-*"), - response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value)) + response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value())) ); }); @@ -260,7 +260,7 @@ private void checkMonitoringDocs() { DateFormatter dateFormatter = DateFormatter.forPattern(customTimeFormat).withZone(ZoneOffset.UTC); assertResponse(prepareSearch(".monitoring-*").setSize(100), rsp -> { - assertThat(rsp.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(rsp.getHits().getTotalHits().value(), greaterThan(0L)); for (SearchHit hit : rsp.getHits().getHits()) { final Map source = hit.getSourceAsMap(); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index d6e15ea25c8e1..d382905c1c9c2 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -293,12 +293,14 @@ private void assertNoWatchesExist() { .query(QueryBuilders.matchQuery("metadata.xpack.cluster_uuid", clusterUUID)); assertResponse(prepareSearch(".watches").setSource(searchSource), response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { List invalidWatches = new ArrayList<>(); for (SearchHit hit : response.getHits().getHits()) { invalidWatches.add(ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap())); } - fail("Found [" + response.getHits().getTotalHits().value + "] invalid watches when none were expected: " + invalidWatches); + fail( + "Found [" + response.getHits().getTotalHits().value() + "] invalid watches when none were expected: " + invalidWatches + ); } }); } diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java index 803c7f410c41d..71f788727aa23 100644 --- a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java @@ -97,7 +97,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna .getAsVersionId( "version", IndexVersion::fromId, - IndexVersion.fromId(randomBoolean() ? 5000099 : 6000099) + IndexVersion.fromId(randomFrom(5000099, 6000099, 7000099)) ) ) ) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java index e5de349203b3d..18adebb145f98 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java @@ -196,7 +196,7 @@ static final OldSegmentInfos readCommit(Directory directory, String segmentFileN long generation = generationFromSegmentsFileName(segmentFileName); // System.out.println(Thread.currentThread() + ": SegmentInfos.readCommit " + segmentFileName); - try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READ)) { + try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName)) { try { return readCommit(directory, input, generation, minSupportedMajorVersion); } catch (EOFException | NoSuchFileException | FileNotFoundException e) { @@ -305,7 +305,7 @@ private static void parseSegmentInfos(Directory directory, DataInput input, OldS byte[] segmentID = new byte[StringHelper.ID_LENGTH]; input.readBytes(segmentID, 0, segmentID.length); Codec codec = readCodec(input); - SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ); + SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.DEFAULT); info.setCodec(codec); totalDocs += info.maxDoc(); long delGen = CodecUtil.readBELong(input); diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 25b4b685ac50f..3ed8fc26ac937 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.lucene.bwc.codecs; -import org.apache.lucene.backward_codecs.lucene70.Lucene70Codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FieldsConsumer; @@ -27,7 +26,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec; import java.io.IOException; import java.util.ArrayList; @@ -101,6 +99,7 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { false, fieldInfo.getIndexOptions(), fieldInfo.getDocValuesType(), + fieldInfo.docValuesSkipIndexType(), fieldInfo.getDocValuesGen(), fieldInfo.attributes(), fieldInfo.getPointDimensionCount(), @@ -119,9 +118,7 @@ private static FieldInfos filterFields(FieldInfos fieldInfos) { } public static SegmentInfo wrap(SegmentInfo segmentInfo) { - // special handling for Lucene70Codec (which is currently bundled with Lucene) - // Use BWCLucene70Codec instead as that one extends BWCCodec (similar to all other older codecs) - final Codec codec = segmentInfo.getCodec() instanceof Lucene70Codec ? new BWCLucene70Codec() : segmentInfo.getCodec(); + final Codec codec = segmentInfo.getCodec(); final SegmentInfo segmentInfo1 = new SegmentInfo( segmentInfo.dir, // Use Version.LATEST instead of original version, otherwise SegmentCommitInfo will bark when processing (N-1 limitation) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java index 5a9b1bb252308..c7abed7d69a59 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java @@ -182,10 +182,7 @@ public Number next() { try { if (nextDocID > values.docID()) { if (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - ordCount = 0; - while (values.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { - ordCount++; - } + ordCount = values.docValueCount(); } } int result; @@ -225,6 +222,7 @@ public Iterator iterator() { return new Iterator() { private boolean nextIsSet; + private int currentIndex = 0; private long nextOrd; private void setNext() { @@ -232,17 +230,22 @@ private void setNext() { if (nextIsSet == false) { if (values.docID() == -1) { values.nextDoc(); + currentIndex = 0; } while (true) { if (values.docID() == DocIdSetIterator.NO_MORE_DOCS) { nextOrd = -1; break; } - nextOrd = values.nextOrd(); - if (nextOrd != -1) { - break; + if (currentIndex < values.docValueCount()) { + nextOrd = values.nextOrd(); + currentIndex++; + if (nextOrd != -1) { + break; + } } values.nextDoc(); + currentIndex = 0; } nextIsSet = true; } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java index 21b6818bd5613..80236f3847e12 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java @@ -53,7 +53,7 @@ public int nextDoc() { while (docID < maxDoc) { values.setDocument(docID); ord = values.nextOrd(); - if (ord != NO_MORE_ORDS) { + if (ord != LegacySortedSetDocValues.NO_MORE_ORDS) { return docID; } docID++; @@ -81,7 +81,7 @@ public boolean advanceExact(int target) throws IOException { docID = target; values.setDocument(docID); ord = values.nextOrd(); - return ord != NO_MORE_ORDS; + return ord != LegacySortedSetDocValues.NO_MORE_ORDS; } @Override @@ -92,7 +92,7 @@ public long cost() { @Override public long nextOrd() { long result = ord; - if (result != NO_MORE_ORDS) { + if (result != LegacySortedSetDocValues.NO_MORE_ORDS) { ord = values.nextOrd(); } return result; diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java index a567f25869407..007b398624d56 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java @@ -105,14 +105,14 @@ private static int encodedSize(PackedInts.Format format, int packedIntsVersion, for (int bpv = 1; bpv <= 32; ++bpv) { final FormatAndBits formatAndBits = PackedInts.fastestFormatAndBits(BLOCK_SIZE, bpv, acceptableOverheadRatio); - assert formatAndBits.format.isSupported(formatAndBits.bitsPerValue); - assert formatAndBits.bitsPerValue <= 32; - encodedSizes[bpv] = encodedSize(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - encoders[bpv] = PackedInts.getEncoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - decoders[bpv] = PackedInts.getDecoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); + assert formatAndBits.format().isSupported(formatAndBits.bitsPerValue()); + assert formatAndBits.bitsPerValue() <= 32; + encodedSizes[bpv] = encodedSize(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + encoders[bpv] = PackedInts.getEncoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + decoders[bpv] = PackedInts.getDecoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); iterations[bpv] = computeIterations(decoders[bpv]); - out.writeVInt(formatAndBits.format.getId() << 5 | (formatAndBits.bitsPerValue - 1)); + out.writeVInt(formatAndBits.format().getId() << 5 | (formatAndBits.bitsPerValue() - 1)); } } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java index 83fcb17449100..06002d2d10dee 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java @@ -23,6 +23,7 @@ import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -103,6 +104,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm storePayloads, indexOptions, docValuesType, + DocValuesSkipIndexType.NONE, dvGen, attributes, 0, diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java index 09147e821d9fb..607d9903abc87 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -1316,6 +1317,11 @@ public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { } } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return null; + } + private SortedSetDocValues getSortedSetWithAddresses(FieldInfo field) throws IOException { final long valueCount = binaries.get(field.name).count; // we keep the byte[]s and list of ords on disk, these could be large diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java index f3ce3ea0755e1..43203caf571f1 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java @@ -63,14 +63,14 @@ public MetadataOnlyBKDReader(IndexInput metaIn) throws IOException { numLeaves = metaIn.readVInt(); assert numLeaves > 0; - minPackedValue = new byte[config.packedIndexBytesLength]; - maxPackedValue = new byte[config.packedIndexBytesLength]; - - metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength); - metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength); - final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim); - for (int dim = 0; dim < config.numIndexDims; dim++) { - if (comparator.compare(minPackedValue, dim * config.bytesPerDim, maxPackedValue, dim * config.bytesPerDim) > 0) { + minPackedValue = new byte[config.packedIndexBytesLength()]; + maxPackedValue = new byte[config.packedIndexBytesLength()]; + + metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength()); + metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength()); + final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim()); + for (int dim = 0; dim < config.numIndexDims(); dim++) { + if (comparator.compare(minPackedValue, dim * config.bytesPerDim(), maxPackedValue, dim * config.bytesPerDim()) > 0) { throw new CorruptIndexException( "minPackedValue " + new BytesRef(minPackedValue) @@ -104,17 +104,17 @@ public byte[] getMaxPackedValue() { @Override public int getNumDimensions() { - return config.numDims; + return config.numDims(); } @Override public int getNumIndexDimensions() { - return config.numIndexDims; + return config.numIndexDims(); } @Override public int getBytesPerDimension() { - return config.bytesPerDim; + return config.bytesPerDim(); } @Override diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java index 0e689138acd8f..0100a8bd14635 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java @@ -32,7 +32,7 @@ public class BWCLucene70Codec extends BWCCodec { private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat(); private final CompoundFormat compoundFormat = new Lucene50CompoundFormat(); private final StoredFieldsFormat storedFieldsFormat; - private final DocValuesFormat defaultDVFormat = DocValuesFormat.forName("Lucene70"); + private final DocValuesFormat defaultDVFormat = new Lucene70DocValuesFormat(); private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { @@ -47,7 +47,11 @@ public PostingsFormat getPostingsFormatForField(String field) { }; public BWCLucene70Codec() { - super("BWCLucene70Codec"); + this("BWCLucene70Codec"); + } + + protected BWCLucene70Codec(String name) { + super(name); storedFieldsFormat = new Lucene50StoredFieldsFormat(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java new file mode 100644 index 0000000000000..75119247cdb13 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java @@ -0,0 +1,327 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BitSetIterator; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.RoaringDocIdSet; + +import java.io.DataInput; +import java.io.IOException; + +/** + * Disk-based implementation of a {@link DocIdSetIterator} which can return the index of the current + * document, i.e. the ordinal of the current document among the list of documents that this iterator + * can return. This is useful to implement sparse doc values by only having to encode values for + * documents that actually have a value. + * + *

Implementation-wise, this {@link DocIdSetIterator} is inspired of {@link RoaringDocIdSet + * roaring bitmaps} and encodes ranges of {@code 65536} documents independently and picks between 3 + * encodings depending on the density of the range: + * + *

    + *
  • {@code ALL} if the range contains 65536 documents exactly, + *
  • {@code DENSE} if the range contains 4096 documents or more; in that case documents are + * stored in a bit set, + *
  • {@code SPARSE} otherwise, and the lower 16 bits of the doc IDs are stored in a {@link + * DataInput#readShort() short}. + *
+ * + *

Only ranges that contain at least one value are encoded. + * + *

This implementation uses 6 bytes per document in the worst-case, which happens in the case + * that all ranges contain exactly one document. + */ +final class IndexedDISI extends DocIdSetIterator { + + static final int MAX_ARRAY_LENGTH = (1 << 12) - 1; + + private static void flush(int block, FixedBitSet buffer, int cardinality, IndexOutput out) throws IOException { + assert block >= 0 && block < 65536; + out.writeShort((short) block); + assert cardinality > 0 && cardinality <= 65536; + out.writeShort((short) (cardinality - 1)); + if (cardinality > MAX_ARRAY_LENGTH) { + if (cardinality != 65536) { // all docs are set + for (long word : buffer.getBits()) { + out.writeLong(word); + } + } + } else { + BitSetIterator it = new BitSetIterator(buffer, cardinality); + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + out.writeShort((short) doc); + } + } + } + + static void writeBitSet(DocIdSetIterator it, IndexOutput out) throws IOException { + int i = 0; + final FixedBitSet buffer = new FixedBitSet(1 << 16); + int prevBlock = -1; + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + final int block = doc >>> 16; + if (prevBlock != -1 && block != prevBlock) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + prevBlock = block; + i = 0; + } + buffer.set(doc & 0xFFFF); + i++; + prevBlock = block; + } + if (i > 0) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + } + // NO_MORE_DOCS is stored explicitly + buffer.set(DocIdSetIterator.NO_MORE_DOCS & 0xFFFF); + flush(DocIdSetIterator.NO_MORE_DOCS >>> 16, buffer, 1, out); + } + + /** The slice that stores the {@link DocIdSetIterator}. */ + private final IndexInput slice; + + private final long cost; + + IndexedDISI(IndexInput in, long offset, long length, long cost) throws IOException { + this(in.slice("docs", offset, length), cost); + } + + // This constructor allows to pass the slice directly in case it helps reuse + // see eg. Lucene70 norms producer's merge instance + IndexedDISI(IndexInput slice, long cost) throws IOException { + this.slice = slice; + this.cost = cost; + } + + private int block = -1; + private long blockEnd; + private int nextBlockIndex = -1; + Method method; + + private int doc = -1; + private int index = -1; + + // SPARSE variables + boolean exists; + + // DENSE variables + private long word; + private int wordIndex = -1; + // number of one bits encountered so far, including those of `word` + private int numberOfOnes; + + // ALL variables + private int gap; + + @Override + public int docID() { + return doc; + } + + @Override + public int advance(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + if (block == targetBlock) { + if (method.advanceWithinBlock(this, target)) { + return doc; + } + readBlockHeader(); + } + boolean found = method.advanceWithinBlock(this, block); + assert found; + return doc; + } + + public boolean advanceExact(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + boolean found = block == targetBlock && method.advanceExactWithinBlock(this, target); + this.doc = target; + return found; + } + + private void advanceBlock(int targetBlock) throws IOException { + do { + slice.seek(blockEnd); + readBlockHeader(); + } while (block < targetBlock); + } + + private void readBlockHeader() throws IOException { + block = Short.toUnsignedInt(slice.readShort()) << 16; + assert block >= 0; + final int numValues = 1 + Short.toUnsignedInt(slice.readShort()); + index = nextBlockIndex; + nextBlockIndex = index + numValues; + if (numValues <= MAX_ARRAY_LENGTH) { + method = Method.SPARSE; + blockEnd = slice.getFilePointer() + (numValues << 1); + } else if (numValues == 65536) { + method = Method.ALL; + blockEnd = slice.getFilePointer(); + gap = block - index - 1; + } else { + method = Method.DENSE; + blockEnd = slice.getFilePointer() + (1 << 13); + wordIndex = -1; + numberOfOnes = index + 1; + } + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + public int index() { + return index; + } + + @Override + public long cost() { + return cost; + } + + enum Method { + SPARSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + disi.doc = disi.block | doc; + disi.exists = true; + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + if (target == disi.doc) { + return disi.exists; + } + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + if (doc != targetInBlock) { + disi.index--; + disi.slice.seek(disi.slice.getFilePointer() - Short.BYTES); + break; + } + disi.exists = true; + return true; + } + } + disi.exists = false; + return false; + } + }, + DENSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + if (leftBits != 0L) { + disi.doc = target + Long.numberOfTrailingZeros(leftBits); + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return true; + } + + while (++disi.wordIndex < 1024) { + disi.word = disi.slice.readLong(); + if (disi.word != 0) { + disi.index = disi.numberOfOnes; + disi.numberOfOnes += Long.bitCount(disi.word); + disi.doc = disi.block | (disi.wordIndex << 6) | Long.numberOfTrailingZeros(disi.word); + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return (leftBits & 1L) != 0; + } + }, + ALL { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.doc = target; + disi.index = target - disi.gap; + return true; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.index = target - disi.gap; + return true; + } + }; + + /** + * Advance to the first doc from the block that is equal to or greater than {@code target}. + * Return true if there is such a doc and false otherwise. + */ + abstract boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException; + + /** + * Advance the iterator exactly to the position corresponding to the given {@code target} and + * return whether this document exists. + */ + abstract boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java new file mode 100644 index 0000000000000..77de24b53069d --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +public class Lucene70Codec extends BWCLucene70Codec { + + public Lucene70Codec() { + super("Lucene70"); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java new file mode 100644 index 0000000000000..1d35a60235d35 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java @@ -0,0 +1,681 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicWriter; +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.EmptyDocValuesProducer; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.SortedSetSelector; +import org.apache.lucene.store.ByteBuffersDataOutput; +import org.apache.lucene.store.ByteBuffersIndexOutput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.MathUtil; +import org.apache.lucene.util.StringHelper; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE; + +/** writer for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesConsumer extends DocValuesConsumer { + + IndexOutput data, meta; + final int maxDoc; + + /** expert: Creates a new writer */ + Lucene70DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + boolean success = false; + try { + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + data = EndiannessReverserUtil.createOutput(state.directory, dataName, state.context); + CodecUtil.writeIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + meta = EndiannessReverserUtil.createOutput(state.directory, metaName, state.context); + CodecUtil.writeIndexHeader( + meta, + metaCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + maxDoc = state.segmentInfo.maxDoc(); + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + @Override + public void close() throws IOException { + boolean success = false; + try { + if (meta != null) { + meta.writeInt(-1); // write EOF marker + CodecUtil.writeFooter(meta); // write checksum + } + if (data != null) { + CodecUtil.writeFooter(data); // write checksum + } + success = true; + } finally { + if (success) { + IOUtils.close(data, meta); + } else { + IOUtils.closeWhileHandlingException(data, meta); + } + meta = data = null; + } + } + + @Override + public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.NUMERIC); + + writeValues(field, new EmptyDocValuesProducer() { + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + return DocValues.singleton(valuesProducer.getNumeric(field)); + } + }); + } + + private static class MinMaxTracker { + long min, max, numValues, spaceInBits; + + MinMaxTracker() { + reset(); + spaceInBits = 0; + } + + private void reset() { + min = Long.MAX_VALUE; + max = Long.MIN_VALUE; + numValues = 0; + } + + /** Accumulate a new value. */ + void update(long v) { + min = Math.min(min, v); + max = Math.max(max, v); + ++numValues; + } + + /** Update the required space. */ + void finish() { + if (max > min) { + spaceInBits += LegacyDirectWriter.unsignedBitsRequired(max - min) * numValues; + } + } + + /** Update space usage and get ready for accumulating values for the next block. */ + void nextBlock() { + finish(); + reset(); + } + } + + private long[] writeValues(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + int numDocsWithValue = 0; + MinMaxTracker minMax = new MinMaxTracker(); + MinMaxTracker blockMinMax = new MinMaxTracker(); + long gcd = 0; + Set uniqueValues = new HashSet<>(); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + + if (gcd != 1) { + if (v < Long.MIN_VALUE / 2 || v > Long.MAX_VALUE / 2) { + // in that case v - minValue might overflow and make the GCD computation return + // wrong results. Since these extreme values are unlikely, we just discard + // GCD computation for them + gcd = 1; + } else if (minMax.numValues != 0) { // minValue needs to be set first + gcd = MathUtil.gcd(gcd, v - minMax.min); + } + } + + minMax.update(v); + blockMinMax.update(v); + if (blockMinMax.numValues == NUMERIC_BLOCK_SIZE) { + blockMinMax.nextBlock(); + } + + if (uniqueValues != null && uniqueValues.add(v) && uniqueValues.size() > 256) { + uniqueValues = null; + } + } + + numDocsWithValue++; + } + + minMax.finish(); + blockMinMax.finish(); + + final long numValues = minMax.numValues; + long min = minMax.min; + final long max = minMax.max; + assert blockMinMax.spaceInBits <= minMax.spaceInBits; + + if (numDocsWithValue == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithValue == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedNumeric(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeLong(numValues); + final int numBitsPerValue; + boolean doBlocks = false; + Map encode = null; + if (min >= max) { + numBitsPerValue = 0; + meta.writeInt(-1); + } else { + if (uniqueValues != null + && uniqueValues.size() > 1 + && LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1) < LegacyDirectWriter.unsignedBitsRequired( + (max - min) / gcd + )) { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1); + final Long[] sortedUniqueValues = uniqueValues.toArray(new Long[0]); + Arrays.sort(sortedUniqueValues); + meta.writeInt(sortedUniqueValues.length); + for (Long v : sortedUniqueValues) { + meta.writeLong(v); + } + encode = new HashMap<>(); + for (int i = 0; i < sortedUniqueValues.length; ++i) { + encode.put(sortedUniqueValues[i], i); + } + min = 0; + gcd = 1; + } else { + uniqueValues = null; + // we do blocks if that appears to save 10+% storage + doBlocks = minMax.spaceInBits > 0 && (double) blockMinMax.spaceInBits / minMax.spaceInBits <= 0.9; + if (doBlocks) { + numBitsPerValue = 0xFF; + meta.writeInt(-2 - NUMERIC_BLOCK_SHIFT); + } else { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired((max - min) / gcd); + if (gcd == 1 + && min > 0 + && LegacyDirectWriter.unsignedBitsRequired(max) == LegacyDirectWriter.unsignedBitsRequired(max - min)) { + min = 0; + } + meta.writeInt(-1); + } + } + } + + meta.writeByte((byte) numBitsPerValue); + meta.writeLong(min); + meta.writeLong(gcd); + long startOffset = data.getFilePointer(); + meta.writeLong(startOffset); + if (doBlocks) { + writeValuesMultipleBlocks(valuesProducer.getSortedNumeric(field), gcd); + } else if (numBitsPerValue != 0) { + writeValuesSingleBlock(valuesProducer.getSortedNumeric(field), numValues, numBitsPerValue, min, gcd, encode); + } + meta.writeLong(data.getFilePointer() - startOffset); + + return new long[] { numDocsWithValue, numValues }; + } + + private void writeValuesSingleBlock( + SortedNumericDocValues values, + long numValues, + int numBitsPerValue, + long min, + long gcd, + Map encode + ) throws IOException { + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numValues, numBitsPerValue); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + if (encode == null) { + writer.add((v - min) / gcd); + } else { + writer.add(encode.get(v)); + } + } + } + writer.finish(); + } + + private void writeValuesMultipleBlocks(SortedNumericDocValues values, long gcd) throws IOException { + final long[] buffer = new long[NUMERIC_BLOCK_SIZE]; + final ByteBuffersDataOutput encodeBuffer = ByteBuffersDataOutput.newResettableInstance(); + int upTo = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + buffer[upTo++] = values.nextValue(); + if (upTo == NUMERIC_BLOCK_SIZE) { + writeBlock(buffer, NUMERIC_BLOCK_SIZE, gcd, encodeBuffer); + upTo = 0; + } + } + } + if (upTo > 0) { + writeBlock(buffer, upTo, gcd, encodeBuffer); + } + } + + private void writeBlock(long[] values, int length, long gcd, ByteBuffersDataOutput buffer) throws IOException { + assert length > 0; + long min = values[0]; + long max = values[0]; + for (int i = 1; i < length; ++i) { + final long v = values[i]; + assert Math.floorMod(values[i] - min, gcd) == 0; + min = Math.min(min, v); + max = Math.max(max, v); + } + if (min == max) { + data.writeByte((byte) 0); + data.writeLong(min); + } else { + final int bitsPerValue = LegacyDirectWriter.unsignedBitsRequired(max - min); + buffer.reset(); + assert buffer.size() == 0; + final LegacyDirectWriter w = LegacyDirectWriter.getInstance(buffer, length, bitsPerValue); + for (int i = 0; i < length; ++i) { + w.add((values[i] - min) / gcd); + } + w.finish(); + data.writeByte((byte) bitsPerValue); + data.writeLong(min); + data.writeInt(Math.toIntExact(buffer.size())); + buffer.copyTo(data); + } + } + + @Override + public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.BINARY); + + BinaryDocValues values = valuesProducer.getBinary(field); + long start = data.getFilePointer(); + meta.writeLong(start); + int numDocsWithField = 0; + int minLength = Integer.MAX_VALUE; + int maxLength = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + BytesRef v = values.binaryValue(); + int length = v.length; + data.writeBytes(v.bytes, v.offset, v.length); + minLength = Math.min(length, minLength); + maxLength = Math.max(length, maxLength); + } + assert numDocsWithField <= maxDoc; + meta.writeLong(data.getFilePointer() - start); + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getBinary(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + meta.writeInt(minLength); + meta.writeInt(maxLength); + if (maxLength > minLength) { + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + writer.add(addr); + values = valuesProducer.getBinary(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.binaryValue().length; + writer.add(addr); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED); + doAddSortedField(field, valuesProducer); + } + + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedDocValues values = valuesProducer.getSorted(field); + int numDocsWithField = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + } + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSorted(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + if (values.getValueCount() <= 1) { + meta.writeByte((byte) 0); + meta.writeLong(0L); + meta.writeLong(0L); + } else { + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numDocsWithField, numberOfBitsPerOrd); + values = valuesProducer.getSorted(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + writer.add(values.ordValue()); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + + addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); + } + + private void addTermsDict(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeVLong(size); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT); + + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + meta.writeInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + long numBlocks = (size + Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) >>> Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT; + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + BytesRefBuilder previous = new BytesRefBuilder(); + long ord = 0; + long start = data.getFilePointer(); + int maxLength = 0; + TermsEnum iterator = values.termsEnum(); + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) == 0) { + writer.add(data.getFilePointer() - start); + data.writeVInt(term.length); + data.writeBytes(term.bytes, term.offset, term.length); + } else { + final int prefixLength = StringHelper.bytesDifference(previous.get(), term); + final int suffixLength = term.length - prefixLength; + assert suffixLength > 0; // terms are unique + + data.writeByte((byte) (Math.min(prefixLength, 15) | (Math.min(15, suffixLength - 1) << 4))); + if (prefixLength >= 15) { + data.writeVInt(prefixLength - 15); + } + if (suffixLength >= 16) { + data.writeVInt(suffixLength - 16); + } + data.writeBytes(term.bytes, term.offset + prefixLength, term.length - prefixLength); + } + maxLength = Math.max(maxLength, term.length); + previous.copyBytes(term); + ++ord; + } + writer.finish(); + meta.writeInt(maxLength); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + + // Now write the reverse terms index + writeTermsIndex(values); + } + + private void writeTermsIndex(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + long start = data.getFilePointer(); + + long numBlocks = 1L + ((size + Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) + >>> Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + TermsEnum iterator = values.termsEnum(); + BytesRefBuilder previous = new BytesRefBuilder(); + long offset = 0; + long ord = 0; + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == 0) { + writer.add(offset); + final int sortKeyLength; + if (ord == 0) { + // no previous term: no bytes to write + sortKeyLength = 0; + } else { + sortKeyLength = StringHelper.sortKeyLength(previous.get(), term); + } + offset += sortKeyLength; + data.writeBytes(term.bytes, term.offset, sortKeyLength); + } else if ((ord + & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) { + previous.copyBytes(term); + } + ++ord; + } + writer.add(offset); + writer.finish(); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + } + + @Override + public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_NUMERIC); + + long[] stats = writeValues(field, valuesProducer); + int numDocsWithField = Math.toIntExact(stats[0]); + long numValues = stats[1]; + assert numValues >= numDocsWithField; + + meta.writeInt(numDocsWithField); + if (numValues > numDocsWithField) { + long start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1L, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_SET); + + SortedSetDocValues values = valuesProducer.getSortedSet(field); + int numDocsWithField = 0; + long numOrds = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + numOrds += values.docValueCount(); + } + + if (numDocsWithField == numOrds) { + meta.writeByte((byte) 0); + doAddSortedField(field, new EmptyDocValuesProducer() { + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); + } + }); + return; + } + meta.writeByte((byte) 1); + + assert numDocsWithField != 0; + if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedSet(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numOrds, numberOfBitsPerOrd); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0; i < values.docValueCount(); i++) { + writer.add(values.nextOrd()); + } + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + + meta.writeInt(numDocsWithField); + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + values.nextOrd(); + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + + addTermsDict(values); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java new file mode 100644 index 0000000000000..76fce4cd15c93 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java @@ -0,0 +1,171 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.util.SmallFloat; + +import java.io.IOException; + +/** + * Lucene 7.0 DocValues format. + * + *

Documents that have a value for the field are encoded in a way that it is always possible to + * know the ordinal of the current document in the set of documents that have a value. For instance, + * say the set of documents that have a value for the field is {1, 5, 6, 11}. When the + * iterator is on 6, it knows that this is the 3rd item of the set. This way, values + * can be stored densely and accessed based on their index at search time. If all documents in a + * segment have a value for the field, the index is the same as the doc ID, so this case is encoded + * implicitly and is very fast at query time. On the other hand if some documents are missing a + * value for the field then the set of documents that have a value is encoded into blocks. All doc + * IDs that share the same upper 16 bits are encoded into the same block with the following + * strategies: + * + *

    + *
  • SPARSE: This strategy is used when a block contains at most 4095 documents. The lower 16 + * bits of doc IDs are stored as {@link DataOutput#writeShort(short) shorts} while the upper + * 16 bits are given by the block ID. + *
  • DENSE: This strategy is used when a block contains between 4096 and 65535 documents. The + * lower bits of doc IDs are stored in a bit set. Advancing is performed using {@link + * Long#numberOfTrailingZeros(long) ntz} operations while the index is computed by + * accumulating the {@link Long#bitCount(long) bit counts} of the visited longs. + *
  • ALL: This strategy is used when a block contains exactly 65536 documents, meaning that the + * block is full. In that case doc IDs do not need to be stored explicitly. This is typically + * faster than both SPARSE and DENSE which is a reason why it is preferable to have all + * documents that have a value for a field using contiguous doc IDs, for instance by using + * {@link IndexWriterConfig#setIndexSort(org.apache.lucene.search.Sort) index sorting}. + *
+ * + *

Then the five per-document value types (Numeric,Binary,Sorted,SortedSet,SortedNumeric) are + * encoded using the following strategies: + * + *

{@link DocValuesType#NUMERIC NUMERIC}: + * + *

    + *
  • Delta-compressed: per-document integers written as deltas from the minimum value, + * compressed with bitpacking. For more information, see {@link LegacyDirectWriter}. + *
  • Table-compressed: when the number of unique values is very small (< 256), and when there + * are unused "gaps" in the range of values used (such as {@link SmallFloat}), a lookup table + * is written instead. Each per-document entry is instead the ordinal to this table, and those + * ordinals are compressed with bitpacking ({@link LegacyDirectWriter}). + *
  • GCD-compressed: when all numbers share a common divisor, such as dates, the greatest common + * denominator (GCD) is computed, and quotients are stored using Delta-compressed Numerics. + *
  • Monotonic-compressed: when all numbers are monotonically increasing offsets, they are + * written as blocks of bitpacked integers, encoding the deviation from the expected delta. + *
  • Const-compressed: when there is only one possible value, no per-document data is needed and + * this value is encoded alone. + *
+ * + *

{@link DocValuesType#BINARY BINARY}: + * + *

    + *
  • Fixed-width Binary: one large concatenated byte[] is written, along with the fixed length. + * Each document's value can be addressed directly with multiplication ({@code docID * + * length}). + *
  • Variable-width Binary: one large concatenated byte[] is written, along with end addresses + * for each document. The addresses are written as Monotonic-compressed numerics. + *
  • Prefix-compressed Binary: values are written in chunks of 16, with the first value written + * completely and other values sharing prefixes. chunk addresses are written as + * Monotonic-compressed numerics. A reverse lookup index is written from a portion of every + * 1024th term. + *
+ * + *

{@link DocValuesType#SORTED SORTED}: + * + *

    + *
  • Sorted: a mapping of ordinals to deduplicated terms is written as Prefix-compressed Binary, + * along with the per-document ordinals written using one of the numeric strategies above. + *
+ * + *

{@link DocValuesType#SORTED_SET SORTED_SET}: + * + *

    + *
  • Single: if all documents have 0 or 1 value, then data are written like SORTED. + *
  • SortedSet: a mapping of ordinals to deduplicated terms is written as Binary, an ordinal + * list and per-document index into this list are written using the numeric strategies above. + *
+ * + *

{@link DocValuesType#SORTED_NUMERIC SORTED_NUMERIC}: + * + *

    + *
  • Single: if all documents have 0 or 1 value, then data are written like NUMERIC. + *
  • SortedNumeric: a value list and per-document index into this list are written using the + * numeric strategies above. + *
+ * + *

Files: + * + *

    + *
  1. .dvd: DocValues data + *
  2. .dvm: DocValues metadata + *
+ */ +public final class Lucene70DocValuesFormat extends DocValuesFormat { + + /** Sole Constructor */ + public Lucene70DocValuesFormat() { + super("Lucene70"); + } + + @Override + public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { + return new Lucene70DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + @Override + public DocValuesProducer fieldsProducer(SegmentReadState state) throws IOException { + return new Lucene70DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + static final String DATA_CODEC = "Lucene70DocValuesData"; + static final String DATA_EXTENSION = "dvd"; + static final String META_CODEC = "Lucene70DocValuesMetadata"; + static final String META_EXTENSION = "dvm"; + static final int VERSION_START = 0; + static final int VERSION_CURRENT = VERSION_START; + + // indicates docvalues type + static final byte NUMERIC = 0; + static final byte BINARY = 1; + static final byte SORTED = 2; + static final byte SORTED_SET = 3; + static final byte SORTED_NUMERIC = 4; + + static final int DIRECT_MONOTONIC_BLOCK_SHIFT = 16; + + static final int NUMERIC_BLOCK_SHIFT = 14; + static final int NUMERIC_BLOCK_SIZE = 1 << NUMERIC_BLOCK_SHIFT; + + static final int TERMS_DICT_BLOCK_SHIFT = 4; + static final int TERMS_DICT_BLOCK_SIZE = 1 << TERMS_DICT_BLOCK_SHIFT; + static final int TERMS_DICT_BLOCK_MASK = TERMS_DICT_BLOCK_SIZE - 1; + + static final int TERMS_DICT_REVERSE_INDEX_SHIFT = 10; + static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; + static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java new file mode 100644 index 0000000000000..5164a67c428b3 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java @@ -0,0 +1,1461 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicReader; +import org.apache.lucene.backward_codecs.packed.LegacyDirectReader; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BaseTermsEnum; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.ImpactsEnum; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.index.TermsEnum.SeekStatus; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.RandomAccessInput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.LongValues; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +/** reader for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesProducer extends DocValuesProducer { + private final Map numerics = new HashMap<>(); + private final Map binaries = new HashMap<>(); + private final Map sorted = new HashMap<>(); + private final Map sortedSets = new HashMap<>(); + private final Map sortedNumerics = new HashMap<>(); + private final IndexInput data; + private final int maxDoc; + + static final long NO_MORE_ORDS = -1; + + /** expert: instantiates a new reader */ + Lucene70DocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + this.maxDoc = state.segmentInfo.maxDoc(); + + int version = -1; + + // read in the entries from the metadata file. + try (ChecksumIndexInput in = EndiannessReverserUtil.openChecksumInput(state.directory, metaName, state.context)) { + Throwable priorE = null; + try { + version = CodecUtil.checkIndexHeader( + in, + metaCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + readFields(in, state.fieldInfos); + } catch (Throwable exception) { + priorE = exception; + } finally { + CodecUtil.checkFooter(in, priorE); + } + } + + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + this.data = EndiannessReverserUtil.openInput(state.directory, dataName, state.context); + boolean success = false; + try { + final int version2 = CodecUtil.checkIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + if (version != version2) { + throw new CorruptIndexException("Format versions mismatch: meta=" + version + ", data=" + version2, data); + } + + // NOTE: data file is too costly to verify checksum against all the bytes on open, + // but for now we at least verify proper structure of the checksum footer: which looks + // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption + // such as file truncation. + CodecUtil.retrieveChecksum(data); + + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this.data); + } + } + } + + private void readFields(ChecksumIndexInput meta, FieldInfos infos) throws IOException { + for (int fieldNumber = meta.readInt(); fieldNumber != -1; fieldNumber = meta.readInt()) { + FieldInfo info = infos.fieldInfo(fieldNumber); + if (info == null) { + throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); + } + byte type = meta.readByte(); + if (type == Lucene70DocValuesFormat.NUMERIC) { + numerics.put(info.name, readNumeric(meta)); + } else if (type == Lucene70DocValuesFormat.BINARY) { + binaries.put(info.name, readBinary(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED) { + sorted.put(info.name, readSorted(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_SET) { + sortedSets.put(info.name, readSortedSet(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_NUMERIC) { + sortedNumerics.put(info.name, readSortedNumeric(meta)); + } else { + throw new CorruptIndexException("invalid type: " + type, meta); + } + } + } + + private NumericEntry readNumeric(ChecksumIndexInput meta) throws IOException { + NumericEntry entry = new NumericEntry(); + readNumeric(meta, entry); + return entry; + } + + private void readNumeric(ChecksumIndexInput meta, NumericEntry entry) throws IOException { + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numValues = meta.readLong(); + int tableSize = meta.readInt(); + if (tableSize > 256) { + throw new CorruptIndexException("invalid table size: " + tableSize, meta); + } + if (tableSize >= 0) { + entry.table = new long[tableSize]; + for (int i = 0; i < tableSize; ++i) { + entry.table[i] = meta.readLong(); + } + } + if (tableSize < -1) { + entry.blockShift = -2 - tableSize; + } else { + entry.blockShift = -1; + } + entry.bitsPerValue = meta.readByte(); + entry.minValue = meta.readLong(); + entry.gcd = meta.readLong(); + entry.valuesOffset = meta.readLong(); + entry.valuesLength = meta.readLong(); + } + + private BinaryEntry readBinary(ChecksumIndexInput meta) throws IOException { + BinaryEntry entry = new BinaryEntry(); + entry.dataOffset = meta.readLong(); + entry.dataLength = meta.readLong(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.minLength = meta.readInt(); + entry.maxLength = meta.readInt(); + if (entry.minLength < entry.maxLength) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1L, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + private SortedEntry readSorted(ChecksumIndexInput meta) throws IOException { + SortedEntry entry = new SortedEntry(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private SortedSetEntry readSortedSet(ChecksumIndexInput meta) throws IOException { + SortedSetEntry entry = new SortedSetEntry(); + byte multiValued = meta.readByte(); + switch (multiValued) { + case 0: // singlevalued + entry.singleValueEntry = readSorted(meta); + return entry; + case 1: // multivalued + break; + default: + throw new CorruptIndexException("Invalid multiValued flag: " + multiValued, meta); + } + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private static void readTermDict(ChecksumIndexInput meta, TermsDictEntry entry) throws IOException { + entry.termsDictSize = meta.readVLong(); + entry.termsDictBlockShift = meta.readInt(); + final int blockShift = meta.readInt(); + final long addressesSize = (entry.termsDictSize + (1L << entry.termsDictBlockShift) - 1) >>> entry.termsDictBlockShift; + entry.termsAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, addressesSize, blockShift); + entry.maxTermLength = meta.readInt(); + entry.termsDataOffset = meta.readLong(); + entry.termsDataLength = meta.readLong(); + entry.termsAddressesOffset = meta.readLong(); + entry.termsAddressesLength = meta.readLong(); + entry.termsDictIndexShift = meta.readInt(); + final long indexSize = (entry.termsDictSize + (1L << entry.termsDictIndexShift) - 1) >>> entry.termsDictIndexShift; + entry.termsIndexAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, 1 + indexSize, blockShift); + entry.termsIndexOffset = meta.readLong(); + entry.termsIndexLength = meta.readLong(); + entry.termsIndexAddressesOffset = meta.readLong(); + entry.termsIndexAddressesLength = meta.readLong(); + } + + private SortedNumericEntry readSortedNumeric(ChecksumIndexInput meta) throws IOException { + SortedNumericEntry entry = new SortedNumericEntry(); + readNumeric(meta, entry); + entry.numDocsWithField = meta.readInt(); + if (entry.numDocsWithField != entry.numValues) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + @Override + public void close() throws IOException { + data.close(); + } + + private static class NumericEntry { + long[] table; + int blockShift; + byte bitsPerValue; + long docsWithFieldOffset; + long docsWithFieldLength; + long numValues; + long minValue; + long gcd; + long valuesOffset; + long valuesLength; + } + + private static class BinaryEntry { + long dataOffset; + long dataLength; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + int minLength; + int maxLength; + long addressesOffset; + long addressesLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + } + + private static class TermsDictEntry { + long termsDictSize; + int termsDictBlockShift; + LegacyDirectMonotonicReader.Meta termsAddressesMeta; + int maxTermLength; + long termsDataOffset; + long termsDataLength; + long termsAddressesOffset; + long termsAddressesLength; + int termsDictIndexShift; + LegacyDirectMonotonicReader.Meta termsIndexAddressesMeta; + long termsIndexOffset; + long termsIndexLength; + long termsIndexAddressesOffset; + long termsIndexAddressesLength; + } + + private static class SortedEntry extends TermsDictEntry { + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + } + + private static class SortedSetEntry extends TermsDictEntry { + SortedEntry singleValueEntry; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + private static class SortedNumericEntry extends NumericEntry { + int numDocsWithField; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + @Override + public NumericDocValues getNumeric(FieldInfo field) throws IOException { + NumericEntry entry = numerics.get(field.name); + return getNumeric(entry); + } + + private abstract static class DenseNumericDocValues extends NumericDocValues { + + final int maxDoc; + int doc = -1; + + DenseNumericDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public long cost() { + return maxDoc; + } + } + + private abstract static class SparseNumericDocValues extends NumericDocValues { + + final IndexedDISI disi; + + SparseNumericDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + } + + private NumericDocValues getNumeric(NumericEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + // empty + return DocValues.emptyNumeric(); + } else if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.bitsPerValue == 0) { + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // dense but split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new DenseNumericDocValues(maxDoc) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int block = doc >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(doc & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(doc)]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return mul * values.get(doc) + delta; + } + }; + } + } + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numValues); + if (entry.bitsPerValue == 0) { + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // sparse and split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new SparseNumericDocValues(disi) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int index = disi.index(); + final int block = index >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(disi.index())]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return mul * values.get(disi.index()) + delta; + } + }; + } + } + } + } + } + + private LongValues getNumericValues(NumericEntry entry) throws IOException { + if (entry.bitsPerValue == 0) { + return new LongValues() { + @Override + public long get(long index) { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + final int shift = entry.blockShift; + final long mul = entry.gcd; + final long mask = (1L << shift) - 1; + return new LongValues() { + long block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long get(long index) { + final long block = index >>> shift; + if (this.block != block) { + assert block > this.block : "Reading backwards is illegal: " + this.block + " < " + block; + int bitsPerValue; + do { + offset = blockEndOffset; + try { + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + } catch (IOException e) { + throw new RuntimeException(e); + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 ? LongValues.ZEROES : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new LongValues() { + @Override + public long get(long index) { + return table[(int) values.get(index)]; + } + }; + } else if (entry.gcd != 1) { + final long gcd = entry.gcd; + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) * gcd + minValue; + } + }; + } else if (entry.minValue != 0) { + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) + minValue; + } + }; + } else { + return values; + } + } + } + } + + private abstract static class DenseBinaryDocValues extends BinaryDocValues { + + final int maxDoc; + int doc = -1; + + DenseBinaryDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + doc = target; + return true; + } + } + + private abstract static class SparseBinaryDocValues extends BinaryDocValues { + + final IndexedDISI disi; + + SparseBinaryDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + } + + @Override + public BinaryDocValues getBinary(FieldInfo field) throws IOException { + BinaryEntry entry = binaries.get(field.name); + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptyBinary(); + } + + final IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength); + + if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) doc * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + long startOffset = addresses.get(doc); + bytes.length = (int) (addresses.get(doc + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) disi.index() * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + final int index = disi.index(); + long startOffset = addresses.get(index); + bytes.length = (int) (addresses.get(index + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } + } + + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + SortedEntry entry = sorted.get(field.name); + return getSorted(entry); + } + + private SortedDocValues getSorted(SortedEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptySorted(); + } + + final LongValues ords; + if (entry.bitsPerValue == 0) { + ords = new LongValues() { + @Override + public long get(long index) { + return 0L; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + } + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedDocValues(entry, data) { + + int doc = -1; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public int ordValue() { + return (int) ords.get(doc); + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedDocValues(entry, data) { + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int ordValue() { + return (int) ords.get(disi.index()); + } + }; + } + } + + private abstract static class BaseSortedDocValues extends SortedDocValues { + + final SortedEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedDocValues(SortedEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public int getValueCount() { + return Math.toIntExact(entry.termsDictSize); + } + + @Override + public BytesRef lookupOrd(int ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public int lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return Math.toIntExact(termsEnum.ord()); + case NOT_FOUND: + case END: + default: + return Math.toIntExact(-1L - termsEnum.ord()); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private abstract static class BaseSortedSetDocValues extends SortedSetDocValues { + + final SortedSetEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedSetDocValues(SortedSetEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public long getValueCount() { + return entry.termsDictSize; + } + + @Override + public BytesRef lookupOrd(long ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public long lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return termsEnum.ord(); + case NOT_FOUND: + case END: + default: + return -1L - termsEnum.ord(); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private static class TermsDict extends BaseTermsEnum { + + final TermsDictEntry entry; + final LongValues blockAddresses; + final IndexInput bytes; + final long blockMask; + final LongValues indexAddresses; + final IndexInput indexBytes; + final BytesRef term; + long ord = -1; + + TermsDict(TermsDictEntry entry, IndexInput data) throws IOException { + this.entry = entry; + RandomAccessInput addressesSlice = data.randomAccessSlice(entry.termsAddressesOffset, entry.termsAddressesLength); + blockAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsAddressesMeta, addressesSlice); + bytes = data.slice("terms", entry.termsDataOffset, entry.termsDataLength); + blockMask = (1L << entry.termsDictBlockShift) - 1; + RandomAccessInput indexAddressesSlice = data.randomAccessSlice( + entry.termsIndexAddressesOffset, + entry.termsIndexAddressesLength + ); + indexAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsIndexAddressesMeta, indexAddressesSlice); + indexBytes = data.slice("terms-index", entry.termsIndexOffset, entry.termsIndexLength); + term = new BytesRef(entry.maxTermLength); + } + + @Override + public BytesRef next() throws IOException { + if (++ord >= entry.termsDictSize) { + return null; + } + if ((ord & blockMask) == 0L) { + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + } else { + final int token = Byte.toUnsignedInt(bytes.readByte()); + int prefixLength = token & 0x0F; + int suffixLength = 1 + (token >>> 4); + if (prefixLength == 15) { + prefixLength += bytes.readVInt(); + } + if (suffixLength == 16) { + suffixLength += bytes.readVInt(); + } + term.length = prefixLength + suffixLength; + bytes.readBytes(term.bytes, prefixLength, suffixLength); + } + return term; + } + + @Override + public void seekExact(long ord) throws IOException { + if (ord < 0 || ord >= entry.termsDictSize) { + throw new IndexOutOfBoundsException(); + } + final long blockIndex = ord >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(blockIndex); + bytes.seek(blockAddress); + this.ord = (blockIndex << entry.termsDictBlockShift) - 1; + do { + next(); + } while (this.ord < ord); + } + + private BytesRef getTermFromIndex(long index) throws IOException { + assert index >= 0 && index <= (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + final long start = indexAddresses.get(index); + term.length = (int) (indexAddresses.get(index + 1) - start); + indexBytes.seek(start); + indexBytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekTermsIndex(BytesRef text) throws IOException { + long lo = 0L; + long hi = (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + while (lo <= hi) { + final long mid = (lo + hi) >>> 1; + getTermFromIndex(mid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + lo = mid + 1; + } else { + hi = mid - 1; + } + } + + assert hi < 0 || getTermFromIndex(hi).compareTo(text) <= 0; + assert hi == ((entry.termsDictSize - 1) >>> entry.termsDictIndexShift) || getTermFromIndex(hi + 1).compareTo(text) > 0; + + return hi; + } + + private BytesRef getFirstTermFromBlock(long block) throws IOException { + assert block >= 0 && block <= (entry.termsDictSize - 1) >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(block); + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekBlock(BytesRef text) throws IOException { + long index = seekTermsIndex(text); + if (index == -1L) { + return -1L; + } + + long ordLo = index << entry.termsDictIndexShift; + long ordHi = Math.min(entry.termsDictSize, ordLo + (1L << entry.termsDictIndexShift)) - 1L; + + long blockLo = ordLo >>> entry.termsDictBlockShift; + long blockHi = ordHi >>> entry.termsDictBlockShift; + + while (blockLo <= blockHi) { + final long blockMid = (blockLo + blockHi) >>> 1; + getFirstTermFromBlock(blockMid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + blockLo = blockMid + 1; + } else { + blockHi = blockMid - 1; + } + } + + assert blockHi < 0 || getFirstTermFromBlock(blockHi).compareTo(text) <= 0; + assert blockHi == ((entry.termsDictSize - 1) >>> entry.termsDictBlockShift) + || getFirstTermFromBlock(blockHi + 1).compareTo(text) > 0; + + return blockHi; + } + + @Override + public SeekStatus seekCeil(BytesRef text) throws IOException { + final long block = seekBlock(text); + if (block == -1) { + // before the first term + seekExact(0L); + return SeekStatus.NOT_FOUND; + } + final long blockAddress = blockAddresses.get(block); + this.ord = block << entry.termsDictBlockShift; + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + while (true) { + int cmp = term.compareTo(text); + if (cmp == 0) { + return SeekStatus.FOUND; + } else if (cmp > 0) { + return SeekStatus.NOT_FOUND; + } + if (next() == null) { + return SeekStatus.END; + } + } + } + + @Override + public BytesRef term() throws IOException { + return term; + } + + @Override + public long ord() throws IOException { + return ord; + } + + @Override + public long totalTermFreq() throws IOException { + return -1L; + } + + @Override + public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public ImpactsEnum impacts(int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int docFreq() throws IOException { + throw new UnsupportedOperationException(); + } + } + + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + SortedNumericEntry entry = sortedNumerics.get(field.name); + if (entry.numValues == entry.numDocsWithField) { + return DocValues.singleton(getNumeric(entry)); + } + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + final LongValues values = getNumericValues(entry); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new SortedNumericDocValues() { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextValue() throws IOException { + return values.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new SortedNumericDocValues() { + + boolean set; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + @Override + public long nextValue() throws IOException { + set(); + return values.get(start++); + } + + @Override + public int docValueCount() { + set(); + return count; + } + + private void set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + } + } + }; + } + } + + @Override + public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { + SortedSetEntry entry = sortedSets.get(field.name); + if (entry.singleValueEntry != null) { + return DocValues.singleton(getSorted(entry.singleValueEntry)); + } + + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + final LongValues ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedSetDocValues(entry, data) { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextOrd() throws IOException { + if (start == end) { + return NO_MORE_ORDS; + } + return ords.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedSetDocValues(entry, data) { + + boolean set; + long start; + long end = 0; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + private boolean set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + return true; + } + return false; + } + + @Override + public long nextOrd() throws IOException { + if (set()) { + return ords.get(start++); + } else if (start == end) { + return NO_MORE_ORDS; + } else { + return ords.get(start++); + } + } + + @Override + public int docValueCount() { + set(); + return count; + } + }; + } + } + + @Override + public void checkIntegrity() throws IOException { + CodecUtil.checksumEntireFile(data); + } + + @Override + public DocValuesSkipper getSkipper(FieldInfo field) { + return null; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 6e5205d664f2d..0215e9f7ca4ab 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -6,5 +6,6 @@ # org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene62.Lucene62Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene60.Lucene60Codec diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat index 2d46b4bca3d0c..8d24d86982da8 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat @@ -14,3 +14,4 @@ # limitations under the License. org.elasticsearch.xpack.lucene.bwc.codecs.lucene54.Lucene54DocValuesFormat +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java index 42b5ba83a0828..1288ae30caa72 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java @@ -18,7 +18,7 @@ public class OldCodecsAvailableTests extends ESTestCase { * to the next major Lucene version. */ @UpdateForV9 - @AwaitsFix(bugUrl = "muted until we add bwc codecs as part of lucene 10 upgrade") + @AwaitsFix(bugUrl = "muted until we add bwc codecs to support 7.x indices in Elasticsearch 9.0") public void testLuceneBWCCodecsAvailable() { assertEquals("Add Lucene BWC codecs for Elasticsearch version 7", 8, Version.CURRENT.major); } diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java index 304f7b0c934fb..59f5e5de1eff7 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java @@ -48,7 +48,9 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.test.ESTestCase; @@ -187,7 +189,11 @@ public void assertTerms(Terms leftTerms, Terms rightTerms, boolean deep) throws int numIntersections = atLeast(3); for (int i = 0; i < numIntersections; i++) { String re = AutomatonTestUtil.randomRegexp(random()); - CompiledAutomaton automaton = new CompiledAutomaton(new RegExp(re, RegExp.NONE).toAutomaton()); + Automaton determinized = Operations.determinize( + new RegExp(re, RegExp.NONE).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); + CompiledAutomaton automaton = new CompiledAutomaton(determinized); if (automaton.type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL) { // TODO: test start term too TermsEnum leftIntersection = leftTerms.intersect(automaton, null); diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java index c819dca3ec6ff..1a2aca0d63bde 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java @@ -10,12 +10,12 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.GraalVMThreadsFilter; @ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) -public class Lucene54DocValuesFormatTests extends BaseDocValuesFormatTestCase { +public class Lucene54DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene54DocValuesFormat()); diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java new file mode 100644 index 0000000000000..ce645feb854d1 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; +import org.apache.lucene.tests.util.TestUtil; +import org.elasticsearch.test.GraalVMThreadsFilter; + +@ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) +public class Lucene70DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { + + private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene70DocValuesFormat()); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java index 48673d2002170..f447f67b4cdd2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java @@ -179,7 +179,7 @@ private void searchProfilingEvents( .setQuery(request.getQuery()) .setTrackTotalHits(true) .execute(ActionListener.wrap(searchResponse -> { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); EventsIndex resampledIndex = mediumDownsampled.getResampledIndex(request.getSampleSize(), sampleCount); log.debug( "User requested [{}] samples, [{}] samples matched in [{}]. Picking [{}]", @@ -220,7 +220,7 @@ private void searchGenericEvents( .setPreference(String.valueOf(request.hashCode())) .setQuery(request.getQuery()) .execute(ActionListener.wrap(searchResponse -> { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); int requestedSampleCount = request.getSampleSize(); // random sampler aggregation does not support sampling rates between 0.5 and 1.0 -> clamp to 1.0 if (sampleCount <= requestedSampleCount * 2L) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java index 9dd46e778fb9a..dbb4cf4dc6856 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java @@ -180,7 +180,7 @@ private void execute(ClusterState state, ActionListener { - boolean hasData = searchResponse.getHits().getTotalHits().value > 0; + boolean hasData = searchResponse.getHits().getTotalHits().value() > 0; listener.onResponse( new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, resourcesCreated, anyPre891Data, hasData) ); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java index 8eac03d36371e..e4f5810ac89d3 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -51,8 +50,7 @@ public char escape() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java index 528872ca9b4cf..41ae97ec5e4fd 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ql.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,10 @@ public RLikePattern(String regexpPattern) { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java index fd6bd177e4c60..6703f1aeacbb5 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -39,8 +38,7 @@ public String pattern() { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java index b501967524a6b..29c471296b5d1 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java @@ -218,7 +218,7 @@ public void testMultipleOnlyKnn() { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -355,7 +355,7 @@ public void testBM25AndKnnWithBucketAggregation() { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -483,7 +483,7 @@ public void testBM25AndMultipleKnnWithAggregation() { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java index 7269d9c3e5e7f..ed26aa50ffa62 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java @@ -217,7 +217,7 @@ public void testMultipleOnlyKnn() { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -356,7 +356,7 @@ public void testBM25AndKnnWithBucketAggregation() { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -486,7 +486,7 @@ public void testBM25AndMultipleKnnWithAggregation() { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java index 2a17a4a1152cf..8df4e3a8dbea5 100644 --- a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java +++ b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java @@ -120,7 +120,7 @@ private void assertPinnedPromotions(PinnedQueryBuilder pqb, LinkedHashSet { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, lessThanOrEqualTo((long) numRelevantDocs + pins.size())); // Check pins are sorted by increasing score, (unlike organic, there are no duplicate scores) @@ -193,7 +193,7 @@ public void testExhaustiveScoring() throws Exception { private void assertExhaustiveScoring(PinnedQueryBuilder pqb) { assertResponse(prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSearchType(DFS_QUERY_THEN_FETCH), response -> { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, equalTo(2L)); }); } diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java index d9e65c385c610..2370a3dee6d03 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java @@ -79,12 +79,10 @@ public Query rewrite(IndexSearcher searcher) throws IOException { */ protected static class CappedBulkScorer extends BulkScorer { final BulkScorer bulkScorer; - final Weight weight; final float maxScore; - public CappedBulkScorer(BulkScorer bulkScorer, Weight weight, float maxScore) { + public CappedBulkScorer(BulkScorer bulkScorer, float maxScore) { this.bulkScorer = bulkScorer; - this.weight = weight; this.maxScore = maxScore; } @@ -125,15 +123,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo final Weight innerWeight = searcher.createWeight(query, scoreMode, boost); if (scoreMode.needsScores()) { return new CappedScoreWeight(this, innerWeight, maxScore) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - final BulkScorer innerScorer = innerWeight.bulkScorer(context); - if (innerScorer == null) { - return null; - } - return new CappedBulkScorer(innerScorer, this, maxScore); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); @@ -152,7 +141,13 @@ public Scorer get(long leadCost) throws IOException { return innerScorer; } } - return new CappedScorer(innerWeight, innerScorer, maxScore); + return new CappedScorer(innerScorer, maxScore); + } + + @Override + public BulkScorer bulkScorer() throws IOException { + final BulkScorer innerScorer = innerScorerSupplier.bulkScorer(); + return new CappedBulkScorer(innerScorer, maxScore); } @Override @@ -166,15 +161,6 @@ public long cost() { public Matches matches(LeafReaderContext context, int doc) throws IOException { return innerWeight.matches(context, doc); } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } }; } else { return innerWeight; diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java index 6ad3b9ce4ef85..ccc90e8f671a6 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java @@ -11,6 +11,7 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -36,8 +37,22 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new CappedScorer(this, innerWeight.scorer(context), maxScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); + if (innerScorerSupplier == null) { + return null; + } + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new CappedScorer(innerScorerSupplier.get(leadCost), maxScore); + } + + @Override + public long cost() { + return innerScorerSupplier.cost(); + } + }; } @Override diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java index 57b2b62b77f6d..67813588ba3be 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java @@ -9,15 +9,14 @@ import org.apache.lucene.search.FilterScorer; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; import java.io.IOException; public class CappedScorer extends FilterScorer { private final float maxScore; - public CappedScorer(Weight weight, Scorer delegate, float maxScore) { - super(delegate, weight); + public CappedScorer(Scorer delegate, float maxScore) { + super(delegate); this.maxScore = maxScore; } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index faf41e7e655a8..eab73fbe5ad04 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -305,7 +305,7 @@ public void testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQuerying assertThat(newSearchResponse.getSuccessfulShards(), equalTo(totalShards)); assertThat(newSearchResponse.getFailedShards(), equalTo(0)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo((long) numDocsWithinRange)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo((long) numDocsWithinRange)); }); // test with SearchShardsAPI @@ -655,7 +655,7 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() assertThat(searchResponse.getFailedShards(), equalTo(indexOutsideSearchRangeShardCount)); assertThat(searchResponse.getSkippedShards(), equalTo(searchableSnapshotShardCount)); assertThat(searchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -736,7 +736,7 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() // a shard that's available in order to construct the search response assertThat(newSearchResponse.getSkippedShards(), equalTo(totalShards - 1)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); }); @@ -850,7 +850,7 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo SearchResponse response = client().search(request).actionGet(); logger.info( "[TEST DEBUG INFO] Search hits: {} Successful shards: {}, failed shards: {}, skipped shards: {}, total shards: {}", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), response.getSuccessfulShards(), response.getFailedShards(), response.getSkippedShards(), diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java index 7615723860cff..9888afdd16499 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java @@ -79,7 +79,7 @@ public void testSearchableSnapshotRelocationDoNotUseSnapshotBasedRecoveries() th ensureGreen(restoredIndexName); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mockLog.assertAllExpectationsMatched(); } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java index a3da932398fb1..1e76477378da2 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java @@ -66,7 +66,7 @@ public void testRepositoryUsedBySearchableSnapshotCanBeUpdatedButNotUnregistered Storage storage = randomFrom(Storage.values()); String restoredIndexName = (storage == Storage.FULL_COPY ? "fully-mounted-" : "partially-mounted-") + indexName + '-' + i; mountSnapshot(repositoryName, snapshotName, indexName, restoredIndexName, Settings.EMPTY, storage); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mountedIndices[i] = restoredIndexName; } @@ -183,7 +183,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); final String mountedAgain = randomValueOtherThan(mounted, () -> randomAlphaOfLength(10).toLowerCase(Locale.ROOT)); final SnapshotRestoreException exception = expectThrows( @@ -208,7 +208,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value()); assertAcked(indicesAdmin().prepareDelete(mountedAgain)); assertAcked(indicesAdmin().prepareDelete(mounted)); @@ -240,7 +240,7 @@ public void testDeletionOfSnapshotSettingCannotBeUpdated() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); if (randomBoolean()) { assertAcked(indicesAdmin().prepareClose(mounted)); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 40b7e08936fa3..7eaf5d8f060c6 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -179,7 +179,7 @@ public void testCleanUpAfterIndicesAreDeleted() throws Exception { ) .setSize(0), res -> { - final long remainingEntriesInCache = res.getHits().getTotalHits().value; + final long remainingEntriesInCache = res.getHits().getTotalHits().value(); if (indicesToDelete.contains(mountedIndex)) { assertThat(remainingEntriesInCache, equalTo(0L)); } else if (snapshotId.equals(SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings))) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index a21e3e6beabce..21e67212f1f51 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -549,7 +549,7 @@ public void run() { try (listeners) { executeSearch(new SearchRequest().source(getSearchSourceBuilder().trackTotalHits(true)), (searchResponse, refs) -> { assert total.get() == 0L; - total.set(searchResponse.getHits().getTotalHits().value); + total.set(searchResponse.getHits().getTotalHits().value()); handleSearchResponse(searchResponse, refs); }); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java index da08c6b38819b..a7fb5571995b3 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -173,9 +174,10 @@ long getCacheSize(ShardId shardId, SnapshotId snapshotId, Predicate predic final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final String cacheFileId = getValue(document, CACHE_ID_FIELD); if (predicate.test(snapshotCacheDir.resolve(cacheFileId))) { long size = buildCacheFileRanges(document).stream().mapToLong(ByteRange::length).sum(); @@ -423,9 +425,10 @@ static Map loadDocuments(Path directoryPath) throws IOExceptio for (LeafReaderContext leafReaderContext : indexReader.leaves()) { final LeafReader leafReader = leafReaderContext.reader(); final Bits liveDocs = leafReader.getLiveDocs(); + final StoredFields storedFields = leafReader.storedFields(); for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { - final Document document = leafReader.document(i); + final Document document = storedFields.document(i); logger.trace("loading document [{}]", document); documents.put(getValue(document, CACHE_ID_FIELD), document); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java index 81cf205c13dd2..4711043fff281 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.ReadAdvice; import org.elasticsearch.blobcache.BlobCacheUtils; import org.elasticsearch.blobcache.common.ByteRange; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo; @@ -35,7 +36,7 @@ public class CachedBlobContainerIndexInput extends MetadataCachingIndexInput { * a complete part of the {@link #fileInfo} at once in the cache and should not be * used for anything else than what the {@link #prefetchPart(int, Supplier)} method does. */ - public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(); + public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(IOContext.Context.DEFAULT, null, null, ReadAdvice.NORMAL); private static final Logger logger = LogManager.getLogger(CachedBlobContainerIndexInput.class); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java index 94ba06a00cc4e..c1468f2e45df0 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java @@ -221,7 +221,6 @@ public static boolean assertCurrentThreadMayAccessBlobStore() { ThreadPool.Names.SNAPSHOT, ThreadPool.Names.GENERIC, ThreadPool.Names.SEARCH, - ThreadPool.Names.SEARCH_WORKER, ThreadPool.Names.SEARCH_THROTTLED, // Cache asynchronous fetching runs on a dedicated thread pool. diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java index 41121453e41a4..4ee2bf7e65633 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java @@ -348,8 +348,8 @@ public static Tuple randomChecksumBytes(byte[] bytes) throws IOE * uses a different buffer size for them. */ public static IOContext randomIOContext() { - final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READ, IOContext.READONCE); - assert ioContext.context != IOContext.Context.MERGE; + final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READONCE); + assert ioContext.context() != IOContext.Context.MERGE; return ioContext; } } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java index c97d6cb4cab08..eab6f1a629f36 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java @@ -179,7 +179,7 @@ public void testSupportsNoOpCommits() throws IOException { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } try (IndexWriter indexWriter = new IndexWriter(inMemoryNoOpCommitDirectory, new IndexWriterConfig())) { @@ -226,7 +226,7 @@ public void testSupportsDeletes() throws IOException { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } assertEquals(1, DirectoryReader.listCommits(inMemoryNoOpCommitDirectory).size()); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index 1452847c65b4c..98df96eca7772 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -401,9 +401,9 @@ public void testChecksumBlobContainerIndexInput() throws Exception { false, // no prewarming in this test because we want to ensure that files are accessed on purpose (directory, snapshotDirectory) -> { for (String fileName : randomSubsetOf(Arrays.asList(snapshotDirectory.listAll()))) { - final long checksum; - try (IndexInput input = directory.openInput(fileName, Store.READONCE_CHECKSUM)) { - checksum = CodecUtil.checksumEntireFile(input); + final long expectedChecksum; + try (IndexInput input = directory.openInput(fileName, IOContext.READONCE)) { + expectedChecksum = CodecUtil.checksumEntireFile(input); } final long snapshotChecksum; @@ -418,9 +418,9 @@ public void testChecksumBlobContainerIndexInput() throws Exception { } assertThat( - "Expected checksum [" + checksum + "] but got [" + snapshotChecksum + ']', + "Expected checksum [" + expectedChecksum + "] but got [" + snapshotChecksum + ']', snapshotChecksum, - equalTo(checksum) + equalTo(expectedChecksum) ); assertThat( "File [" + fileName + "] should have been read from heap", @@ -692,7 +692,7 @@ private void testDirectories( private void testIndexInputs(final CheckedBiConsumer consumer) throws Exception { testDirectories((directory, snapshotDirectory) -> { for (String fileName : randomSubsetOf(Arrays.asList(snapshotDirectory.listAll()))) { - final IOContext context = randomIOContext(); + final IOContext context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : randomIOContext(); try (IndexInput indexInput = directory.openInput(fileName, context)) { final List closeables = new ArrayList<>(); try { diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java index 6ffa09dc1f265..6d9110b564862 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java @@ -224,7 +224,7 @@ protected void assertSearchResponseContainsEmptyResult(Response response) { assertOK(response); SearchResponse searchResponse = SearchResponseUtils.responseAsSearchResponse(response); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java index 767452e6fcae7..4b994ce82d92f 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java @@ -282,7 +282,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr searchResponse = SearchResponseUtils.parseSearchResponse(parser); } try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java index d5e77c1694640..1602a097b1b08 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java @@ -362,7 +362,7 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr assertOK(response); final SearchResponse searchResponse = SearchResponseUtils.parseSearchResponse(responseAsParser(response)); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java index 8b18359fb8310..1345e275fab17 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java @@ -119,7 +119,7 @@ public void testCrossClusterSearch() throws Exception { responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index 69331fa448113..4cbd1cab21af9 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -419,7 +419,7 @@ public void testCrossClusterSearch() throws Exception { responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java index 505b82b39b960..53c622898476a 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java @@ -230,7 +230,7 @@ public void testAnonymousUserFromQueryClusterWorks() throws Exception { Arrays.stream(searchResponse5.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toList()), containsInAnyOrder(".security-7") ); - assertThat(searchResponse5.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(searchResponse5.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); } finally { searchResponse5.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java index 3871029b3b44b..6fa3ef1b4ef63 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java @@ -134,7 +134,7 @@ public void testCrossClusterScrollWithSniffModeWhenSomeRemoteNodesAreNotDirectly final Request scrollRequest = new Request("GET", "/_search/scroll"); final String scrollId; try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics")); documentFieldValues.add(searchResponse.getHits().getHits()[0].getSourceAsMap().get("name")); scrollId = searchResponse.getScrollId(); @@ -153,7 +153,7 @@ public void testCrossClusterScrollWithSniffModeWhenSomeRemoteNodesAreNotDirectly responseAsParser(performRequestWithRemoteMetricUser(scrollRequest)) ); try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat( Arrays.stream(scrollResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics") diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java index b1a76a4559812..9a1d653132d2d 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java @@ -81,7 +81,7 @@ public void testDateMathExpressionsCanBeAuthorized() throws Exception { assertResponse( client.prepareMultiSearch().add(client.prepareSearch(expression).setQuery(QueryBuilders.matchAllQuery()).request()), - multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)) + multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)) ); UpdateResponse updateResponse = client.prepareUpdate(expression, response.getId()) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index b0572b265a45b..a5f827c2a4b53 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -437,7 +437,7 @@ private void assertSearchResponse(SearchRequestBuilder requestBuilder, Set assertResponse(prepareSearch("alias" + role), searchResponse2 -> { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(searchResponse2.getHits().getTotalHits().value)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(searchResponse2.getHits().getTotalHits().value())); for (int hitI = 0; hitI < searchResponse1.getHits().getHits().length; hitI++) { assertThat(searchResponse1.getHits().getAt(hitI).getId(), equalTo(searchResponse2.getHits().getAt(hitI).getId())); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index c0866fa7ea694..87ca7d279c709 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -474,13 +474,13 @@ public void testMSearch() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -495,13 +495,13 @@ public void testMSearch() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); @@ -522,7 +522,7 @@ public void testMSearch() throws Exception { ), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -531,7 +531,7 @@ public void testMSearch() throws Exception { assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -898,7 +898,7 @@ public void testKnnSearch() throws Exception { .addFetchField("field1") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field1")); @@ -914,7 +914,7 @@ public void testKnnSearch() throws Exception { .addFetchField("field2") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field2")); @@ -929,7 +929,7 @@ public void testKnnSearch() throws Exception { .setQuery(query) .setSize(10), response -> { - assertEquals(10, response.getHits().getTotalHits().value); + assertEquals(10, response.getHits().getTotalHits().value()); assertEquals(10, response.getHits().getHits().length); } ); @@ -1265,7 +1265,7 @@ public void testScroll() throws Exception { .get(); do { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1325,7 +1325,7 @@ public void testReaderId() throws Exception { .setQuery(termQuery("field1", "value1")) .get(); assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java index 34eecd57b53d5..01020a428c318 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java @@ -208,7 +208,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field1", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -231,7 +231,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field2", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -254,7 +254,7 @@ public void testDuel() throws Exception { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field3", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index bffa53b1f4da6..66c8c0a5b1b52 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -956,10 +956,10 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } @@ -975,10 +975,10 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); } @@ -993,11 +993,11 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1013,9 +1013,9 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); } ); @@ -1029,12 +1029,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1051,12 +1051,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1073,12 +1073,12 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1095,11 +1095,11 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1132,7 +1132,7 @@ public void testScroll() throws Exception { .get(); do { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1191,7 +1191,7 @@ public void testPointInTimeId() throws Exception { .setQuery(constantScoreQuery(termQuery("field1", "value1"))) .setFetchSource(true), response -> { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1281,7 +1281,7 @@ public void testScrollWithQueryCache() { .setSize(1) .setFetchSource(true) .get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); } else { user2SearchResponse.decRef(); @@ -1289,7 +1289,7 @@ public void testScrollWithQueryCache() { user2SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)) ).prepareSearchScroll(user2SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); if (randomBoolean()) { // maybe reuse the scroll even if empty @@ -1309,7 +1309,7 @@ public void testScrollWithQueryCache() { .setSize(1) .setFetchSource(true) .get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1319,7 +1319,7 @@ public void testScrollWithQueryCache() { user1SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ).prepareSearchScroll(user1SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); if (scrolledDocsUser1 < numDocs) { assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); @@ -2042,7 +2042,7 @@ private void verifyParentChild() { .setQuery(hasChildQuery("child", termQuery("field1", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -2061,7 +2061,7 @@ private void verifyParentChild() { .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index d4375d15e6a6d..7d99d5817bdc0 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -103,20 +103,20 @@ public void testSearchAndMSearch() throws Exception { indexRandom(true, prepareIndex(index).setSource(field, "bar")); assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), response -> { - final long hits = response.getHits().getTotalHits().value; + final long hits = response.getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); assertResponse( client().filterWithHeader( singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), - response2 -> assertEquals(response2.getHits().getTotalHits().value, hits) + response2 -> assertEquals(response2.getHits().getTotalHits().value(), hits) ); final long multiHits; MultiSearchResponse multiSearchResponse = client().prepareMultiSearch() .add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) .get(); try { - multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; + multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); } finally { multiSearchResponse.decRef(); @@ -125,7 +125,7 @@ public void testSearchAndMSearch() throws Exception { singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); try { - assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); + assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), multiHits); } finally { multiSearchResponse.decRef(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java index af54f71779f08..6f8ea0f103a56 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java @@ -312,7 +312,7 @@ public void testMultiNamesWorkCorrectly() { assertResponse( userAClient.prepareSearch("alias1").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); final ElasticsearchSecurityException e1 = expectThrows( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 78146e58e91e2..e178f4bf3eb6c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -343,7 +343,7 @@ private void testAddUserAndRoleThenAuth(String username, String roleName) { String token = basicAuthHeaderValue(username, new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); assertClusterHealthOnlyAuthorizesWhenAnonymousRoleActive(token); @@ -366,7 +366,7 @@ public void testUpdatingUserAndAuthentication() throws Exception { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); preparePutUser("joe", "s3krit-password2", hasher, SecuritySettingsSource.TEST_ROLE).get(); @@ -382,7 +382,7 @@ public void testUpdatingUserAndAuthentication() throws Exception { token = basicAuthHeaderValue("joe", new SecureString("s3krit-password2")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); } @@ -403,7 +403,7 @@ public void testCreateDeleteAuthenticate() { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); DeleteUserResponse response = new DeleteUserRequestBuilder(client()).username("joe").get(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index f34983f7f125c..0acc281dd8440 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -213,7 +213,7 @@ public void testMultiSearchUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -231,7 +231,7 @@ public void testMultiSearchUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -249,7 +249,7 @@ public void testMultiSearchMissingUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -267,7 +267,7 @@ public void testMultiSearchMissingUnauthorizedIndex() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -317,7 +317,7 @@ public void testMultiSearchWildcard() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); } @@ -336,7 +336,7 @@ public void testMultiSearchWildcard() { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java index eb7c5e5276c15..a4cadeb953e14 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java @@ -48,13 +48,13 @@ public void testScrollIsPerUser() throws Exception { indexRandom(true, docs); assertResponse(prepareSearch("foo").setScroll(TimeValue.timeValueSeconds(5L)).setQuery(matchAllQuery()).setSize(1), response -> { - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); if (randomBoolean()) { assertResponse( client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueSeconds(5L)), response2 -> { - assertEquals(numDocs, response2.getHits().getTotalHits().value); + assertEquals(numDocs, response2.getHits().getTotalHits().value()); assertEquals(1, response2.getHits().getHits().length); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java index 4b8fbfd41acdf..437fb76351176 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java @@ -856,7 +856,7 @@ private SuggestProfilesResponse.ProfileHit[] doSuggest(Set dataKeys, Str final SuggestProfilesRequest suggestProfilesRequest = new SuggestProfilesRequest(dataKeys, name, 10, hint); final SuggestProfilesResponse suggestProfilesResponse = client().execute(SuggestProfilesAction.INSTANCE, suggestProfilesRequest) .actionGet(); - assertThat(suggestProfilesResponse.getTotalHits().relation, is(TotalHits.Relation.EQUAL_TO)); + assertThat(suggestProfilesResponse.getTotalHits().relation(), is(TotalHits.Relation.EQUAL_TO)); return suggestProfilesResponse.getProfileHits(); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 90566e25b4ea5..0a8947c49c606 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -2254,7 +2254,7 @@ public void queryApiKeys(SearchRequest searchRequest, boolean withLimitedBy, Act TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No api keys found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryApiKeysResult.EMPTY); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index d866bd2a9d229..74a9aa7291ba4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -179,7 +179,7 @@ public void queryUsers(SearchRequest searchRequest, ActionListener { - final long total = searchResponse.getHits().getTotalHits().value; + final long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No users found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryUserResults.EMPTY); @@ -214,7 +214,7 @@ void getUserCount(final ActionListener listener) { .setSize(0) .setTrackTotalHits(true) .request(), - listener.safeMap(response -> response.getHits().getTotalHits().value), + listener.safeMap(response -> response.getHits().getTotalHits().value()), client::search ) ); @@ -706,7 +706,7 @@ void getAllReservedUserInfo(ActionListener> listen @Override public void onResponse(SearchResponse searchResponse) { Map userInfos = new HashMap<>(); - assert searchResponse.getHits().getTotalHits().value <= 10 + assert searchResponse.getHits().getTotalHits().value() <= 10 : "there are more than 10 reserved users we need to change this to retrieve them all!"; for (SearchHit searchHit : searchResponse.getHits().getHits()) { Map sourceMap = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 1c773a6e3963f..fa6187798da25 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -85,6 +85,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.NamedClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import org.elasticsearch.xpack.core.sql.SqlAsyncActionNames; import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; @@ -550,7 +551,7 @@ public void validateIndexPermissionsAreSubset( Automaton existingPermissions = permissionMap.computeIfAbsent(entry.getKey(), role::allowedActionsMatcher); for (String alias : entry.getValue()) { Automaton newNamePermissions = permissionMap.computeIfAbsent(alias, role::allowedActionsMatcher); - if (Operations.subsetOf(newNamePermissions, existingPermissions) == false) { + if (Automatons.subsetOf(newNamePermissions, existingPermissions) == false) { listener.onResponse(AuthorizationResult.deny()); return; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java index 40cb3ea4d9864..8ff535f3f6231 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationCategory; @@ -21,6 +20,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import java.time.ZoneOffset; @@ -195,7 +195,7 @@ private void logDeprecatedPermission(RoleDescriptor roleDescriptor) { index.getName(), i -> IndexPrivilege.get(indexPrivileges).getAutomaton() ); - if (false == Operations.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { + if (false == Automatons.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { inferiorIndexNames.add(index.getName()); } } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 9ddda193dba39..52b39e2aae694 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -278,7 +278,7 @@ public void queryRoleDescriptors(SearchSourceBuilder searchSourceBuilder, Action TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No roles found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryRoleResult.EMPTY); @@ -730,28 +730,28 @@ public void onResponse(MultiSearchResponse items) { if (responses[0].isFailure()) { usageStats.put("size", 0); } else { - usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value); + usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value()); } if (responses[1].isFailure()) { usageStats.put("fls", false); } else { - usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[2].isFailure()) { usageStats.put("dls", false); } else { - usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[3].isFailure()) { usageStats.put("remote_indices", 0); } else { - usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value()); } if (responses[4].isFailure()) { usageStats.put("remote_cluster", 0); } else { - usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value()); } delegate.onResponse(usageStats); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index b347ceb833f64..b347c278aae08 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -414,19 +414,19 @@ public void usageStats(ActionListener> listener) { logger.debug("error on counting total profiles", items[0].getFailure()); usage.put("total", 0L); } else { - usage.put("total", items[0].getResponse().getHits().getTotalHits().value); + usage.put("total", items[0].getResponse().getHits().getTotalHits().value()); } if (items[1].isFailure()) { logger.debug("error on counting enabled profiles", items[0].getFailure()); usage.put("enabled", 0L); } else { - usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value); + usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value()); } if (items[2].isFailure()) { logger.debug("error on counting recent profiles", items[0].getFailure()); usage.put("recent", 0L); } else { - usage.put("recent", items[2].getResponse().getHits().getTotalHits().value); + usage.put("recent", items[2].getResponse().getHits().getTotalHits().value()); } listener.onResponse(usage); }, listener::onFailure) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java index 5ec76a8dc3d01..5cd8cba763d3d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java @@ -77,8 +77,8 @@ public void migrate(SecurityIndexManager indexManager, Client client, ActionList client.search(countRequest, ActionListener.wrap(response -> { // If there are no roles, skip migration - if (response.getHits().getTotalHits().value > 0) { - logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value + "] roles"); + if (response.getHits().getTotalHits().value() > 0) { + logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value() + "] roles"); updateRolesByQuery(indexManager, client, filterQuery, listener); } else { listener.onResponse(null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 0a2c40d2a257a..a4d9dacd1a63d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.automaton.MinimizationOperations; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -137,8 +137,8 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("idx3")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); - assertTrue(Operations.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); descriptor = roles.get("role1.ab"); assertNotNull(descriptor); @@ -181,9 +181,9 @@ public void testParseFile() throws Exception { assertThat(group.indices()[0], equalTo("/.*_.*/")); assertThat(group.privilege(), notNullValue()); assertTrue( - Operations.sameLanguage( + AutomatonTestUtil.sameLanguage( group.privilege().getAutomaton(), - MinimizationOperations.minimize( + Operations.determinize( Operations.union(IndexPrivilege.READ.getAutomaton(), IndexPrivilege.WRITE.getAutomaton()), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ) @@ -236,7 +236,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("field_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); @@ -258,7 +258,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertFalse(group.getFieldPermissions().hasFieldLevelSecurity()); assertThat(group.getQuery(), notNullValue()); @@ -279,7 +279,7 @@ public void testParseFile() throws Exception { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_fields_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); diff --git a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index d42d45e430627..e5171a7c51650 100644 --- a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -281,7 +281,7 @@ public void testRetentionWhileSnapshotInProgress() throws Exception { completedSnapshotName, Strings.arrayToCommaDelimitedString(resp.getHits().getHits()) ); - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); } ); }); diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index df8dc54bb7490..405a9926e2e5f 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -1595,7 +1595,7 @@ private void assertDocumentsAreEqual(String indexName, int docCount) { int docIdToMatch = randomIntBetween(0, docCount - 1); assertResponse(searchRequestBuilder.setQuery(QueryBuilders.termQuery("field", docIdToMatch)), searchResponse -> { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = searchResponse.getHits().getAt(0); Map source = searchHit.getSourceAsMap(); assertThat(source, is(notNullValue())); @@ -1613,7 +1613,7 @@ private void assertDocumentsAreEqual(String indexName, int docCount) { private void assertSearchResponseContainsAllIndexedDocs(SearchResponse searchResponse, long docCount) { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docCount)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docCount)); for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { SearchHit searchHit = searchResponse.getHits().getAt(i); Map source = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java index 3c64d140e2b56..e7b9156d5fb66 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java @@ -129,7 +129,7 @@ public void testKnownIssueWithCellLeftOfDatelineTouchingPolygonOnRightOfDateline client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder), innerResponse -> assertThat( "Bucket " + bucket.getKeyAsString(), - innerResponse.getHits().getTotalHits().value, + innerResponse.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); @@ -320,7 +320,7 @@ private void assertQuery(List buckets, BiFunction assertThat( "Expected hits at precision " + precision + " for H3 cell " + bucket.getKeyAsString(), - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java index b4a3a07502abf..b4d7a472591bd 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java @@ -175,7 +175,7 @@ public void testStorePolygonDateLine() throws Exception { indexRandom(true, prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); assertNoFailuresAndResponse(client().prepareSearch("test").setFetchSource(false).addStoredField("shape"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.field("shape").getValue(), instanceOf(BytesRef.class)); BytesRef bytesRef = searchHit.field("shape").getValue(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java index 1c013aba52261..4f23b6de4c37d 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java @@ -247,7 +247,7 @@ public void testFieldAlias() { assertResponse( client().prepareSearch(INDEX).setQuery(new ShapeQueryBuilder("alias", queryGeometry).relation(ShapeRelation.INTERSECTS)), response -> { - assertTrue(response.getHits().getTotalHits().value > 0); + assertTrue(response.getHits().getTotalHits().value() > 0); } ); } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java index 1ac6bf3b6fd31..e26066cd89c50 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java @@ -112,7 +112,7 @@ public void testIndexPointsFilterRectangle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -123,7 +123,7 @@ public void testIndexPointsFilterRectangle() { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -138,7 +138,7 @@ public void testIndexPointsCircle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, circle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -154,7 +154,7 @@ public void testIndexPointsPolygon() { .setQuery(new ShapeQueryBuilder(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), response -> { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(searchHits.getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); } @@ -175,7 +175,7 @@ public void testIndexPointsMultiPolygon() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, mp).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("3"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("3"))); @@ -191,7 +191,7 @@ public void testIndexPointsRectangle() { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -232,7 +232,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .indexedShapePath(indexedShapePath) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java index de66d0b822c94..275385dca3535 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.spatial.index.query; -import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.settings.Settings; @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.containsString; @UpdateForV9 -@LuceneTestCase.AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") +@AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public class LegacyGeoShapeWithDocValuesQueryTests extends GeoShapeQueryTestCase { @SuppressWarnings("deprecation") diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java index 66f5597be543e..2713afc149e05 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java @@ -230,8 +230,8 @@ public void testGeoShapeQueryAcrossDateline() throws IOException { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value(), equalTo(1L)); } } } @@ -261,8 +261,8 @@ public void testShapeQuery() throws IOException { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(centerPointQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(centerPointQuery, 1).totalHits.value(), equalTo(1L)); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 9cf60ec3bb2e4..d30bc8f99cc34 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -224,7 +224,7 @@ protected static void logSearchResponse(SearchResponse response, Logger logger) } var totalHits = response.getHits().getTotalHits(); - var hits = totalHits != null ? "hits " + totalHits.relation + " " + totalHits.value + ", " : ""; + var hits = totalHits != null ? "hits " + totalHits.relation() + " " + totalHits.value() + ", " : ""; logger.trace( "Got search response [{}{} aggregations: [{}], {} failed shards, {} skipped shards, " + "{} successful shards, {} total shards, took {}, timed out [{}]]", @@ -549,7 +549,7 @@ protected List initBucketExtractors(SearchResponse response) { List exts = new ArrayList<>(refs.size()); TotalHits totalHits = response.getHits().getTotalHits(); - ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value); + ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value()); for (QueryContainer.FieldInfo ref : refs) { exts.add(createExtractor(ref.extraction(), totalCount)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java index 78976ea7e83c0..cf52a5f5d7126 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java @@ -76,7 +76,7 @@ public Object extract(Bucket bucket) { throw new SqlIllegalArgumentException("Cannot find an aggregation named {}", name); } - if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value == 0) { + if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value() == 0) { return null; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java index 8ee23e38f9ffe..0ba29fef8e06d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java @@ -123,7 +123,7 @@ protected void masterOperation( Arrays.toString(transformCountSuccess.getShardFailures()) ); } - long totalTransforms = transformCountSuccess.getHits().getTotalHits().value; + long totalTransforms = transformCountSuccess.getHits().getTotalHits().value(); if (totalTransforms == 0) { var usage = new TransformFeatureSetUsage(transformsCountByState, Collections.emptyMap(), new TransformIndexerStats()); listener.onResponse(new XPackUsageFeatureResponse(usage)); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java index f49d5fc96f3ab..cd06a4cadaa37 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java @@ -84,7 +84,7 @@ public void sourceHasChanged(TransformCheckpoint lastCheckpoint, ActionListener< client, TransportSearchAction.TYPE, searchRequest, - ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value > 0L), listener::onFailure) + ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value() > 0L), listener::onFailure) ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java index ffc4b48f9cc30..9d5175922c892 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java @@ -509,7 +509,7 @@ public void expandTransformIds( final ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(idTokens, allowNoMatch); executeAsyncWithOrigin(request, foundConfigsListener.delegateFailureAndWrap((l, searchResponse) -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); // important: preserve order Set ids = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); Set configs = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); @@ -589,7 +589,7 @@ public void resetTransform(String transformId, ActionListener listener) .trackTotalHitsUpTo(1) ); executeAsyncWithOrigin(TransportSearchAction.TYPE, searchRequest, deleteListener.delegateFailureAndWrap((l, searchResponse) -> { - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { listener.onFailure( new ResourceNotFoundException(TransformMessages.getMessage(TransformMessages.REST_UNKNOWN_TRANSFORM, transformId)) ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java index 23bab56de5ec9..2de810b2b902d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java @@ -207,7 +207,7 @@ private SearchRequest buildSearchRequestForValidation(String logId, SourceConfig @Override public void getInitialProgressFromResponse(SearchResponse response, ActionListener progressListener) { - progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value, 0L, 0L)); + progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value(), 0L, 0L)); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java index 684e3a085405d..68b31d4f466b6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.transform.transforms.pivot; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.geo.GeoPoint; @@ -560,7 +560,7 @@ static class GeoTileFieldCollector implements FieldCollector { @Override public int getMaxPageSize() { // this collector is limited by indices.query.bool.max_clause_count, default 1024 - return BooleanQuery.getMaxClauseCount(); + return IndexSearcher.getMaxClauseCount(); } @Override diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java index 706337768a299..5f7c6490e51f1 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java @@ -50,7 +50,7 @@ public void testCanUseAnyConcreteIndexName() throws Exception { assertBusy(() -> { assertResponse( prepareSearch(watchResultsIndex).setTrackTotalHits(true), - searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value, greaterThan(0)) + searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value(), greaterThan(0)) ); }); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java index f1ad29607b5b8..7fa5365afa0ab 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java @@ -120,7 +120,7 @@ private void assertTotalHistoryEntries(String id, long expectedCount) throws Exc assertResponse( prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSize(0) .setSource(new SearchSourceBuilder().query(QueryBuilders.boolQuery().must(termQuery("watch_id", id)))), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(oneOf(expectedCount, expectedCount + 1))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(oneOf(expectedCount, expectedCount + 1))) ); }); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index 60867ba5d4410..4068c534013b9 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -122,7 +122,7 @@ public void testActionConditionWithHardFailures() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -176,7 +176,7 @@ public void testActionConditionWithFailures() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -236,7 +236,7 @@ public void testActionCondition() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index 5b7ea39079f28..dac87eaa6f034 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -105,7 +105,7 @@ public void testEmailFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 97347de1ea23e..ffac36846414e 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -102,7 +102,7 @@ public void testHttpFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java index 7dde279fb90db..8dec5287ae607 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java @@ -54,7 +54,7 @@ public void testIndexActionFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index 567d4acfa45e5..b268caa45f471 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -72,7 +72,7 @@ public void testHttpFields() throws Exception { ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 5dc537fc259d9..5eaf27e7b2670 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -377,7 +377,7 @@ protected void assertWatchWithMinimumPerformedActionsCount( lastResponse.set(searchResponse); assertThat( "could not find executed watch record for watch " + watchName, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(minimumExpectedWatchActionsWithActionPerformed) ); if (assertConditionMet) { @@ -396,7 +396,7 @@ protected void assertWatchWithMinimumPerformedActionsCount( } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -452,7 +452,7 @@ protected void assertWatchWithNoActionNeeded(final String watchName, final long searchResponse -> { lastResponse.set(searchResponse); assertThat( - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(expectedWatchActionsWithNoActionNeeded) ); } @@ -461,7 +461,7 @@ protected void assertWatchWithNoActionNeeded(final String watchName, final long } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -497,7 +497,7 @@ protected void assertWatchWithMinimumActionsCount(final String watchName, final searchResponse -> { assertThat( "could not find executed watch record", - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(recordCount) ); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java index 99640d1ebc3ea..03f1e6cb57eb8 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java @@ -296,8 +296,8 @@ private void assertSingleExecutionAndCompleteWatchHistory(final long numberOfWat AtomicLong successfulWatchExecutions = new AtomicLong(); refresh(); assertResponse(prepareSearch("output"), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(numberOfWatches))); - successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value); + assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(numberOfWatches))); + successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value()); }); // the watch history should contain entries for each triggered watch, which a few have been marked as not executed @@ -378,7 +378,7 @@ public void testWatchRecordSavedTwice() throws Exception { // the actual documents are in the output index refresh(); assertResponse(prepareSearch(HistoryStoreField.DATA_STREAM).setSize(numRecords), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo((long) numRecords)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo((long) numRecords)); for (int i = 0; i < numRecords; i++) { assertThat(searchResponse.getHits().getAt(i).getSourceAsMap().get("state"), is(ExecutionState.EXECUTED.id())); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java index e5f4091ca89eb..f3648580691cb 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java @@ -55,7 +55,7 @@ public void testHistoryOnRejection() throws Exception { assertBusy(() -> { flushAndRefresh(".watcher-history-*"); assertResponse(prepareSearch(".watcher-history-*"), searchResponse -> { - assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(2L)); + assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(2L)); assertThat( "Did not find watcher history for rejected watch", Arrays.stream(searchResponse.getHits().getHits()) diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java index 7ff293ed9b150..fbb1996a4cf42 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -69,7 +69,7 @@ public void testThatLoadingWithNonExistingIndexWorks() throws Exception { assertThat(refreshResponse.getStatus(), equalTo(RestStatus.OK)); assertResponse( prepareSearch(".watcher-history*").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(1L))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(1L))) ); }, 30, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java index 4298f641cbdd2..e12805f3ace09 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java @@ -79,7 +79,7 @@ public void testWatchMetadata() throws Exception { } assertNotNull(searchResponse); try { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 92ac91a63e097..2ec6541275d04 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -139,13 +139,13 @@ public void testScriptTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); @@ -184,12 +184,12 @@ public void testSearchTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); } @@ -223,13 +223,13 @@ public void testChainTransform() throws Exception { refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 5389f34212270..0ea9b432d3b0f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -340,7 +340,7 @@ private Collection loadWatches(ClusterState clusterState) { throw new ElasticsearchException("Partial response while loading watches"); } - if (response.getHits().getTotalHits().value == 0) { + if (response.getHits().getTotalHits().value() == 0) { return Collections.emptyList(); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java index 9a165112c41d1..327d345af864e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java @@ -42,7 +42,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.service.ClusterService; @@ -440,7 +439,7 @@ static CharacterRunAutomaton createAutomaton(List whiteListedHosts) { } Automaton whiteListAutomaton = Regex.simpleMatchToAutomaton(whiteListedHosts.toArray(Strings.EMPTY_ARRAY)); - whiteListAutomaton = MinimizationOperations.minimize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + whiteListAutomaton = Operations.determinize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); return new CharacterRunAutomaton(whiteListAutomaton); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index 6775dca424bf1..dfa0c47493ed7 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -156,7 +156,7 @@ public Collection findTriggeredWatches(Collection watches SearchResponse response = null; try { response = client.search(searchRequest).actionGet(defaultSearchTimeout); - logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value); + logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value()); while (response.getHits().getHits().length != 0) { for (SearchHit hit : response.getHits()) { Wid wid = new Wid(hit.getId()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java index 9d6186e9c1c48..e6bd1b0efb95d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java @@ -91,7 +91,7 @@ SearchInput.Result doExecute(WatchExecutionContext ctx, WatcherSearchTemplateReq try { if (logger.isDebugEnabled()) { - logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value); + logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value()); } final Payload payload; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java index 97ae29a26e68c..358a839e60ea5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportQueryWatchesAction.java @@ -98,11 +98,11 @@ SearchRequest createSearchRequest(QueryWatchesAction.Request request) { } void transformResponse(SearchResponse searchResponse, ActionListener listener) { - assert searchResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert searchResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; List items = Arrays.stream(searchResponse.getHits().getHits()) .map(this::transformSearchHit) .toList(); - listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value, items)); + listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value(), items)); } QueryWatchesAction.Response.Item transformSearchHit(SearchHit searchHit) { diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java index 608e5f1972373..191775f46cd72 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -69,44 +70,56 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ByteArrayStreamInput bytes = new ByteArrayStreamInput(); final BinaryDocValues values = DocValues.getBinary(context.reader(), field); - Scorer approxScorer = approxWeight.scorer(context); - if (approxScorer == null) { + ScorerSupplier approxScorerSupplier = approxWeight.scorerSupplier(context); + if (approxScorerSupplier == null) { // No matches to be had return null; } - DocIdSetIterator approxDisi = approxScorer.iterator(); - TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { + + return new ScorerSupplier() { @Override - public boolean matches() throws IOException { - if (values.advanceExact(approxDisi.docID()) == false) { - // Can happen when approxQuery resolves to some form of MatchAllDocs expression - return false; - } - BytesRef arrayOfValues = values.binaryValue(); - bytes.reset(arrayOfValues.bytes); - bytes.setPosition(arrayOfValues.offset); - - int size = bytes.readVInt(); - for (int i = 0; i < size; i++) { - int valLength = bytes.readVInt(); - if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { - return true; + public Scorer get(long leadCost) throws IOException { + Scorer approxScorer = approxScorerSupplier.get(leadCost); + DocIdSetIterator approxDisi = approxScorer.iterator(); + TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { + @Override + public boolean matches() throws IOException { + if (values.advanceExact(approxDisi.docID()) == false) { + // Can happen when approxQuery resolves to some form of MatchAllDocs expression + return false; + } + BytesRef arrayOfValues = values.binaryValue(); + bytes.reset(arrayOfValues.bytes); + bytes.setPosition(arrayOfValues.offset); + + int size = bytes.readVInt(); + for (int i = 0; i < size; i++) { + int valLength = bytes.readVInt(); + if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { + return true; + } + bytes.skipBytes(valLength); + } + return false; + } + + @Override + public float matchCost() { + // TODO: how can we compute this? + return 1000f; } - bytes.skipBytes(valLength); - } - return false; + }; + return new ConstantScoreScorer(score(), scoreMode, twoPhase); } @Override - public float matchCost() { - // TODO: how can we compute this? - return 1000f; + public long cost() { + return approxScorerSupplier.cost(); } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); } @Override diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 7784e7ffdda12..f3b01bb898126 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -38,7 +38,6 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchParseException; @@ -349,7 +348,7 @@ public Query wildcardQuery(String wildcardPattern, RewriteMethod method, boolean } Automaton automaton = caseInsensitive ? AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term(name(), wildcardPattern)) - : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern)); + : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (clauseCount > 0) { // We can accelerate execution with the ngram query BooleanQuery approxQuery = rewritten.build(); @@ -379,7 +378,6 @@ public Query regexpQuery( RegExp regExp = new RegExp(value, syntaxFlags, matchFlags); Automaton a = regExp.toAutomaton(); a = Operations.determinize(a, maxDeterminizedStates); - a = MinimizationOperations.minimize(a, maxDeterminizedStates); if (Operations.isTotal(a)) { // Will match all return existsQuery(context); } @@ -390,7 +388,7 @@ public Query regexpQuery( Query approxNgramQuery = rewriteBoolToNgramQuery(approxBooleanQuery); RegExp regex = new RegExp(value, syntaxFlags, matchFlags); - Automaton automaton = regex.toAutomaton(maxDeterminizedStates); + Automaton automaton = Operations.determinize(regex.toAutomaton(), maxDeterminizedStates); // We can accelerate execution with the ngram query return new BinaryDvConfirmedAutomatonQuery(approxNgramQuery, name(), value, automaton); @@ -550,9 +548,9 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { BooleanQuery.Builder rewritten = new BooleanQuery.Builder(); int clauseCount = 0; for (BooleanClause clause : bq) { - Query q = rewriteBoolToNgramQuery(clause.getQuery()); + Query q = rewriteBoolToNgramQuery(clause.query()); if (q != null) { - if (clause.getOccur().equals(Occur.FILTER)) { + if (clause.occur().equals(Occur.FILTER)) { // Can't drop "should" clauses because it can elevate a sibling optional item // to mandatory (shoulds with 1 clause) causing false negatives // Dropping MUSTs increase false positives which are OK because are verified anyway. @@ -561,7 +559,7 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { break; } } - rewritten.add(q, clause.getOccur()); + rewritten.add(q, clause.occur()); } } return rewritten.build(); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 4b9ccff6f526c..a1a01ebdcc590 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -41,6 +41,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -182,7 +183,7 @@ public void testTooBigKeywordField() throws IOException { Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery("*a*", null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(1L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(1L)); reader.close(); dir.close(); @@ -229,12 +230,12 @@ public void testTooBigQueryField() throws IOException { String queryString = randomABString((IndexSearcher.getMaxClauseCount() * 2) + 1); Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(queryString, null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); // Test regexp query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(queryString, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); reader.close(); dir.close(); @@ -271,13 +272,13 @@ public void testTermAndPrefixQueryIgnoreWildcardSyntax() throws IOException { private void expectTermMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().termQuery(term, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } private void expectPrefixMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().prefixQuery(term, null, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } public void testSearchResultsVersusKeywordField() throws IOException { @@ -390,8 +391,8 @@ public void testSearchResultsVersusKeywordField() throws IOException { TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, values.size() + 1, Sort.RELEVANCE); assertThat( keywordFieldQuery + "\n" + wildcardFieldQuery, - wildcardFieldTopDocs.totalHits.value, - equalTo(kwTopDocs.totalHits.value) + wildcardFieldTopDocs.totalHits.value(), + equalTo(kwTopDocs.totalHits.value()) ); HashSet expectedDocs = new HashSet<>(); @@ -497,7 +498,7 @@ public void testRangeQueryVersusKeywordField() throws IOException { TopDocs kwTopDocs = searcher.search(keywordFieldQuery, 10, Sort.RELEVANCE); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.RELEVANCE); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(kwTopDocs.totalHits.value)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(kwTopDocs.totalHits.value())); HashSet expectedDocs = new HashSet<>(); for (ScoreDoc topDoc : kwTopDocs.scoreDocs) { @@ -642,7 +643,7 @@ public void testWildcardAcceleration() throws IOException, ParseException { public void testQueryCachingEquality() throws IOException, ParseException { String pattern = "A*b*B?a"; // Case sensitivity matters when it comes to caching - Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); Automaton caseInSensitiveAutomaton = AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term("field", pattern)); BinaryDvConfirmedAutomatonQuery csQ = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), @@ -660,7 +661,10 @@ public void testQueryCachingEquality() throws IOException, ParseException { assertNotEquals(csQ.hashCode(), ciQ.hashCode()); // Same query should be equal - Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton( + new Term("field", pattern), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); BinaryDvConfirmedAutomatonQuery csQ2 = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), "field", @@ -880,11 +884,11 @@ private Query rewriteFiltersToMustsForComparisonPurposes(Query q) { if (q instanceof BooleanQuery bq) { BooleanQuery.Builder result = new BooleanQuery.Builder(); for (BooleanClause cq : bq.clauses()) { - Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.getQuery()); - if (cq.getOccur() == Occur.FILTER) { + Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.query()); + if (cq.occur() == Occur.FILTER) { result.add(rewritten, Occur.MUST); } else { - result.add(rewritten, cq.getOccur()); + result.add(rewritten, cq.occur()); } } return result.build(); @@ -1013,8 +1017,9 @@ protected String convertToRandomRegex(String randomValue) { } // Assert our randomly generated regex actually matches the provided raw input. - RegExp regex = new RegExp(result.toString()); - Automaton automaton = regex.toAutomaton(); + int includeDeprecatedComplement = RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT; + RegExp regex = new RegExp(result.toString(), includeDeprecatedComplement); + Automaton automaton = Operations.determinize(regex.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); ByteRunAutomaton bytesMatcher = new ByteRunAutomaton(automaton); BytesRef br = new BytesRef(randomValue); assertTrue( diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index d61c143098fcb..f502683e42eb2 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -377,7 +377,7 @@ private void assertDocs( try { logger.info(searchResponse); // check hit count - assertEquals(numDocs, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocs, searchResponse.getHits().getTotalHits().value()); // check that _index is properly set assertTrue(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).allMatch(index::equals)); // check that all _ids are there @@ -404,7 +404,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals(id, searchResponse.getHits().getHits()[0].getId()); assertEquals(sourceForDoc(num), searchResponse.getHits().getHits()[0].getSourceAsString()); } finally { @@ -456,7 +456,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(typeCount, searchResponse.getHits().getTotalHits().value); + assertEquals(typeCount, searchResponse.getHits().getTotalHits().value()); for (SearchHit hit : searchResponse.getHits().getHits()) { DocumentField typeField = hit.field("_type"); assertNotNull(typeField); @@ -482,7 +482,7 @@ private void assertDocs( ); try { logger.info(searchResponse); - assertEquals(0, searchResponse.getHits().getTotalHits().value); + assertEquals(0, searchResponse.getHits().getTotalHits().value()); assertEquals(numberOfShards, searchResponse.getSuccessfulShards()); // When all shards are skipped, at least one of them is queried in order to provide a proper search response. assertEquals(numberOfShards - 1, searchResponse.getSkippedShards()); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index dddba9b7b0fba..02dc679152bf4 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -445,7 +445,7 @@ private List getAllTokenIds() throws IOException { final SearchHits searchHits = response.getHits(); assertThat( "Search request used with size parameter that was too small to fetch all tokens.", - searchHits.getTotalHits().value, + searchHits.getTotalHits().value(), lessThanOrEqualTo(searchSize) ); final List tokenIds = Arrays.stream(searchHits.getHits()).map(searchHit -> { diff --git a/x-pack/qa/runtime-fields/build.gradle b/x-pack/qa/runtime-fields/build.gradle index 5add595d64e3f..43d6d9463e0d1 100644 --- a/x-pack/qa/runtime-fields/build.gradle +++ b/x-pack/qa/runtime-fields/build.gradle @@ -29,7 +29,7 @@ subprojects { restResources { restApi { - include '_common', 'bulk', 'count', 'cluster', 'index', 'indices', 'field_caps', 'msearch', + include 'capabilities', '_common', 'bulk', 'count', 'cluster', 'index', 'indices', 'field_caps', 'msearch', 'search', 'async_search', 'graph', '*_point_in_time', 'put_script', 'scripts_painless_execute' } restTests {