Skip to content

Commit

Permalink
iter
Browse files Browse the repository at this point in the history
  • Loading branch information
javanna committed Apr 4, 2024
1 parent 3c6a71d commit beb872a
Show file tree
Hide file tree
Showing 5 changed files with 91 additions and 108 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,22 @@
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
import org.elasticsearch.search.aggregations.metrics.InternalTopHits;
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.xcontent.XContentType;
import org.hamcrest.Matchers;

import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;

/**
* Test that index enough data to trigger the creation of Cuckoo filters.
Expand Down Expand Up @@ -64,4 +74,33 @@ private void assertNumRareTerms(int maxDocs, int rareTerms) {
}
);
}

public void testGlobalAggregationWithScore() {
createIndex("global", Settings.EMPTY, "_doc", "keyword", "type=keyword");
prepareIndex("global").setSource("keyword", "a").setRefreshPolicy(IMMEDIATE).get();
prepareIndex("global").setSource("keyword", "c").setRefreshPolicy(IMMEDIATE).get();
prepareIndex("global").setSource("keyword", "e").setRefreshPolicy(IMMEDIATE).get();
GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation(
new RareTermsAggregationBuilder("terms").field("keyword")
.subAggregation(
new RareTermsAggregationBuilder("sub_terms").field("keyword")
.subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_"))
)
);
assertNoFailuresAndResponse(client().prepareSearch("global").addAggregation(globalBuilder), response -> {
InternalGlobal result = response.getAggregations().get("global");
InternalMultiBucketAggregation<?, ?> terms = result.getAggregations().get("terms");
assertThat(terms.getBuckets().size(), equalTo(3));
for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) {
InternalMultiBucketAggregation<?, ?> subTerms = bucket.getAggregations().get("sub_terms");
assertThat(subTerms.getBuckets().size(), equalTo(1));
MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0);
InternalTopHits topHits = subBucket.getAggregations().get("top_hits");
assertThat(topHits.getHits().getHits().length, equalTo(1));
for (SearchHit hit : topHits.getHits()) {
assertThat(hit.getScore(), greaterThan(0f));
}
}
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,24 @@
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
import org.elasticsearch.search.aggregations.metrics.Avg;
import org.elasticsearch.search.aggregations.metrics.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.InternalTopHits;
import org.elasticsearch.search.aggregations.metrics.Stats;
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.ESIntegTestCase;
Expand Down Expand Up @@ -63,6 +71,7 @@
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.startsWith;
import static org.hamcrest.core.IsNull.notNullValue;
Expand Down Expand Up @@ -1376,4 +1385,46 @@ private void assertOrderByKeyResponse(
}
);
}

public void testGlobalAggregationWithScore() throws Exception {
assertAcked(prepareCreate("global").setMapping("keyword", "type=keyword"));
indexRandom(
true,
prepareIndex("global").setSource("keyword", "a"),
prepareIndex("global").setSource("keyword", "c"),
prepareIndex("global").setSource("keyword", "e")
);
String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString();
Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values());
GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation(
new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING)
.executionHint(executionHint)
.collectMode(collectionMode)
.field("keyword")
.order(BucketOrder.key(true))
.subAggregation(
new TermsAggregationBuilder("sub_terms").userValueTypeHint(ValueType.STRING)
.executionHint(executionHint)
.collectMode(collectionMode)
.field("keyword")
.order(BucketOrder.key(true))
.subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_"))
)
);
assertNoFailuresAndResponse(prepareSearch("global").addAggregation(globalBuilder), response -> {
InternalGlobal result = response.getAggregations().get("global");
InternalMultiBucketAggregation<?, ?> terms = result.getAggregations().get("terms");
assertThat(terms.getBuckets().size(), equalTo(3));
for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) {
InternalMultiBucketAggregation<?, ?> subTerms = bucket.getAggregations().get("sub_terms");
assertThat(subTerms.getBuckets().size(), equalTo(1));
MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0);
InternalTopHits topHits = subBucket.getAggregations().get("top_hits");
assertThat(topHits.getHits().getHits().length, equalTo(1));
for (SearchHit hit : topHits.getHits()) {
assertThat(hit.getScore(), greaterThan(0f));
}
}
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import org.elasticsearch.common.util.LongObjectPagedHashMap.Cursor;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.NestedDocuments;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.AggregationExecutionContext;
Expand Down Expand Up @@ -219,14 +218,7 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE

private static FetchSearchResult runFetchPhase(SubSearchContext subSearchContext, int[] docIdsToLoad) {
// Fork the search execution context for each slice, because the fetch phase does not support concurrent execution yet.
SearchExecutionContext searchExecutionContext = new SearchExecutionContext(subSearchContext.getSearchExecutionContext()) {
@Override
public NestedDocuments getNestedDocuments() {
// this is a horrible hack: AggregationTestCase mocks SearchExecutionContext to override the resolution of nested documents.
// That gets lost as we fork the search execution context: overriding this method preserves that.
return subSearchContext.getSearchExecutionContext().getNestedDocuments();
}
};
SearchExecutionContext searchExecutionContext = new SearchExecutionContext(subSearchContext.getSearchExecutionContext());
SubSearchContext fetchSubSearchContext = new SubSearchContext(subSearchContext) {
@Override
public SearchExecutionContext getSearchExecutionContext() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import org.elasticsearch.index.mapper.RangeType;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
Expand All @@ -49,8 +48,6 @@
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.MultiBucketConsumerService;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
import org.elasticsearch.search.aggregations.bucket.nested.InternalNested;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorTests;
Expand All @@ -72,7 +69,6 @@
import static java.util.stream.Collectors.toList;
import static org.elasticsearch.index.mapper.SeqNoFieldMapper.PRIMARY_TERM_NAME;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;

public class RareTermsAggregatorTests extends AggregatorTestCase {

Expand Down Expand Up @@ -334,47 +330,6 @@ public void testInsideTerms() throws IOException {
}
}

public void testGlobalAggregationWithScore() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document();
document.add(new SortedDocValuesField("keyword", new BytesRef("a")));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedDocValuesField("keyword", new BytesRef("c")));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedDocValuesField("keyword", new BytesRef("e")));
indexWriter.addDocument(document);
try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) {
GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation(
new RareTermsAggregationBuilder("terms").field("keyword")
.subAggregation(
new RareTermsAggregationBuilder("sub_terms").field("keyword")
.subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_"))
)
);

MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword");

InternalGlobal result = searchAndReduce(indexReader, new AggTestConfig(globalBuilder, fieldType));
InternalMultiBucketAggregation<?, ?> terms = result.getAggregations().get("terms");
assertThat(terms.getBuckets().size(), equalTo(3));
for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) {
InternalMultiBucketAggregation<?, ?> subTerms = bucket.getAggregations().get("sub_terms");
assertThat(subTerms.getBuckets().size(), equalTo(1));
MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0);
InternalTopHits topHits = subBucket.getAggregations().get("top_hits");
assertThat(topHits.getHits().getHits().length, equalTo(1));
for (SearchHit hit : topHits.getHits()) {
assertThat(hit.getScore(), greaterThan(0f));
}
}
}
}
}
}

public void testWithNestedAggregations() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.StringFieldScript;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
Expand All @@ -91,8 +90,6 @@
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
Expand Down Expand Up @@ -1308,57 +1305,6 @@ public void testMixLongAndDouble() throws Exception {
}
}

public void testGlobalAggregationWithScore() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document();
document.add(new SortedDocValuesField("keyword", new BytesRef("a")));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedDocValuesField("keyword", new BytesRef("c")));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedDocValuesField("keyword", new BytesRef("e")));
indexWriter.addDocument(document);
try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) {
String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString();
Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values());
GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation(
new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING)
.executionHint(executionHint)
.collectMode(collectionMode)
.field("keyword")
.order(BucketOrder.key(true))
.subAggregation(
new TermsAggregationBuilder("sub_terms").userValueTypeHint(ValueType.STRING)
.executionHint(executionHint)
.collectMode(collectionMode)
.field("keyword")
.order(BucketOrder.key(true))
.subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_"))
)
);

MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword");

InternalGlobal result = searchAndReduce(indexReader, new AggTestConfig(globalBuilder, fieldType));
InternalMultiBucketAggregation<?, ?> terms = result.getAggregations().get("terms");
assertThat(terms.getBuckets().size(), equalTo(3));
for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) {
InternalMultiBucketAggregation<?, ?> subTerms = bucket.getAggregations().get("sub_terms");
assertThat(subTerms.getBuckets().size(), equalTo(1));
MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0);
InternalTopHits topHits = subBucket.getAggregations().get("top_hits");
assertThat(topHits.getHits().getHits().length, equalTo(1));
for (SearchHit hit : topHits.getHits()) {
assertThat(hit.getScore(), greaterThan(0f));
}
}
}
}
}
}

public void testWithNestedAggregations() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Expand Down

0 comments on commit beb872a

Please sign in to comment.