Skip to content

Commit

Permalink
Revert #46598 that breaks the cachability of the sub search contexts.
Browse files Browse the repository at this point in the history
  • Loading branch information
jimczi committed Oct 15, 2019
1 parent fe15d9f commit b858e19
Show file tree
Hide file tree
Showing 13 changed files with 107 additions and 139 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -67,38 +67,29 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
}

@Override
public void doValidate(QueryShardContext queryShardContext) {
if (ParentJoinFieldMapper.getMapper(queryShardContext.getMapperService()) == null
&& innerHitBuilder.isIgnoreUnmapped() == false) {
throw new IllegalStateException("no join field has been configured");
}
}

@Override
public void build(SearchContext context, InnerHitsContext innerHitsContext) throws IOException {
protected void doBuild(SearchContext context, InnerHitsContext innerHitsContext) throws IOException {
QueryShardContext queryShardContext = context.getQueryShardContext();
ParentJoinFieldMapper joinFieldMapper = ParentJoinFieldMapper.getMapper(context.mapperService());
if (joinFieldMapper == null) {
assert innerHitBuilder.isIgnoreUnmapped() : "should be validated first";
return;
if (joinFieldMapper != null) {
String name = innerHitBuilder.getName() != null ? innerHitBuilder.getName() : typeName;
JoinFieldInnerHitSubContext joinFieldInnerHits = new JoinFieldInnerHitSubContext(name, context, typeName,
fetchChildInnerHits, joinFieldMapper);
setupInnerHitsContext(queryShardContext, joinFieldInnerHits);
innerHitsContext.addInnerHitDefinition(joinFieldInnerHits);
} else {
if (innerHitBuilder.isIgnoreUnmapped() == false) {
throw new IllegalStateException("no join field has been configured");
}
}
String name = innerHitBuilder.getName() != null ? innerHitBuilder.getName() : typeName;
JoinFieldInnerHitSubContext joinFieldInnerHits =
new JoinFieldInnerHitSubContext(name, context, typeName, fetchChildInnerHits, joinFieldMapper);
setupInnerHitsContext(queryShardContext, joinFieldInnerHits);
innerHitsContext.addInnerHitDefinition(joinFieldInnerHits);
}

static final class JoinFieldInnerHitSubContext extends InnerHitsContext.InnerHitSubContext {
private final String typeName;
private final boolean fetchChildInnerHits;
private final ParentJoinFieldMapper joinFieldMapper;

JoinFieldInnerHitSubContext(String name,
SearchContext context,
String typeName,
boolean fetchChildInnerHits,
ParentJoinFieldMapper joinFieldMapper) {
JoinFieldInnerHitSubContext(String name, SearchContext context, String typeName, boolean fetchChildInnerHits,
ParentJoinFieldMapper joinFieldMapper) {
super(name, context);
this.typeName = typeName;
this.fetchChildInnerHits = fetchChildInnerHits;
Expand All @@ -111,13 +102,13 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
TopDocsAndMaxScore[] result = new TopDocsAndMaxScore[hits.length];
for (int i = 0; i < hits.length; i++) {
SearchHit hit = hits[i];
String joinName = getSortedDocValue(joinFieldMapper.name(), this, hit.docId());
String joinName = getSortedDocValue(joinFieldMapper.name(), context, hit.docId());
if (joinName == null) {
result[i] = new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN);
continue;
}

QueryShardContext qsc = getQueryShardContext();
QueryShardContext qsc = context.getQueryShardContext();
ParentIdFieldMapper parentIdFieldMapper =
joinFieldMapper.getParentIdFieldMapper(typeName, fetchChildInnerHits == false);
if (parentIdFieldMapper == null) {
Expand All @@ -135,14 +126,14 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
.add(joinFieldMapper.fieldType().termQuery(typeName, qsc), BooleanClause.Occur.FILTER)
.build();
} else {
String parentId = getSortedDocValue(parentIdFieldMapper.name(), this, hit.docId());
q = mapperService().fullName(IdFieldMapper.NAME).termQuery(parentId, qsc);
String parentId = getSortedDocValue(parentIdFieldMapper.name(), context, hit.docId());
q = context.mapperService().fullName(IdFieldMapper.NAME).termQuery(parentId, qsc);
}

Weight weight = searcher().createWeight(searcher().rewrite(q), ScoreMode.COMPLETE_NO_SCORES, 1f);
Weight weight = context.searcher().createWeight(context.searcher().rewrite(q), ScoreMode.COMPLETE_NO_SCORES, 1f);
if (size() == 0) {
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
for (LeafReaderContext ctx : searcher().getIndexReader().leaves()) {
for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
}
result[i] = new TopDocsAndMaxScore(
Expand All @@ -151,7 +142,7 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
Lucene.EMPTY_SCORE_DOCS
), Float.NaN);
} else {
int topN = Math.min(from() + size(), searcher().getIndexReader().maxDoc());
int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
TopDocsCollector<?> topDocsCollector;
MaxScoreCollector maxScoreCollector = null;
if (sort() != null) {
Expand All @@ -164,7 +155,7 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
maxScoreCollector = new MaxScoreCollector();
}
try {
for (LeafReaderContext ctx : searcher().getIndexReader().leaves()) {
for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx);
}
} finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@ protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query que
// doCreateTestQueryBuilder)
queryBuilder = (HasParentQueryBuilder) queryBuilder.rewrite(context);

assertNotNull(context);
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
assertTrue(innerHitBuilders.containsKey(queryBuilder.innerHit().getName()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.elasticsearch.search.sort.SortBuilder;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;

Expand All @@ -46,9 +47,13 @@ protected InnerHitContextBuilder(QueryBuilder query, InnerHitBuilder innerHitBui
this.query = query;
}

public final void validate(QueryShardContext queryShardContext) {
public InnerHitBuilder innerHitBuilder() {
return innerHitBuilder;
}

public final void build(SearchContext parentSearchContext, InnerHitsContext innerHitsContext) throws IOException {
long innerResultWindow = innerHitBuilder.getFrom() + innerHitBuilder.getSize();
int maxInnerResultWindow = queryShardContext.getIndexSettings().getMaxInnerResultWindow();
int maxInnerResultWindow = parentSearchContext.mapperService().getIndexSettings().getMaxInnerResultWindow();
if (innerResultWindow > maxInnerResultWindow) {
throw new IllegalArgumentException(
"Inner result window is too large, the inner hit definition's [" + innerHitBuilder.getName() +
Expand All @@ -57,16 +62,10 @@ public final void validate(QueryShardContext queryShardContext) {
"] index level setting."
);
}
doValidate(queryShardContext);
}

public InnerHitBuilder innerHitBuilder() {
return innerHitBuilder;
doBuild(parentSearchContext, innerHitsContext);
}

protected abstract void doValidate(QueryShardContext queryShardContext);

public abstract void build(SearchContext parentSearchContext, InnerHitsContext innerHitsContext) throws IOException;
protected abstract void doBuild(SearchContext parentSearchContext, InnerHitsContext innerHitsContext) throws IOException;

public static void extractInnerHits(QueryBuilder query, Map<String, InnerHitContextBuilder> innerHitBuilders) {
if (query instanceof AbstractQueryBuilder) {
Expand Down Expand Up @@ -101,7 +100,7 @@ protected void setupInnerHitsContext(QueryShardContext queryShardContext,
}
}
if (innerHitBuilder.getFetchSourceContext() != null) {
innerHitsContext.fetchSourceContext(innerHitBuilder.getFetchSourceContext() );
innerHitsContext.fetchSourceContext(innerHitBuilder.getFetchSourceContext());
}
if (innerHitBuilder.getSorts() != null) {
Optional<SortAndFormats> optionalSort = SortBuilder.buildSort(innerHitBuilder.getSorts(), queryShardContext);
Expand All @@ -114,6 +113,23 @@ protected void setupInnerHitsContext(QueryShardContext queryShardContext,
}
ParsedQuery parsedQuery = new ParsedQuery(query.toQuery(queryShardContext), queryShardContext.copyNamedQueries());
innerHitsContext.parsedQuery(parsedQuery);
innerHitsContext.innerHits(children);
Map<String, InnerHitsContext.InnerHitSubContext> baseChildren =
buildChildInnerHits(innerHitsContext.parentSearchContext(), children);
innerHitsContext.setChildInnerHits(baseChildren);
}

private static Map<String, InnerHitsContext.InnerHitSubContext> buildChildInnerHits(SearchContext parentSearchContext,
Map<String, InnerHitContextBuilder> children) throws IOException {

Map<String, InnerHitsContext.InnerHitSubContext> childrenInnerHits = new HashMap<>();
for (Map.Entry<String, InnerHitContextBuilder> entry : children.entrySet()) {
InnerHitsContext childInnerHitsContext = new InnerHitsContext();
entry.getValue().build(
parentSearchContext, childInnerHitsContext);
if (childInnerHitsContext.getInnerHits() != null) {
childrenInnerHits.putAll(childInnerHitsContext.getInnerHits());
}
}
return childrenInnerHits;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -332,34 +332,28 @@ public void extractInnerHitBuilders(Map<String, InnerHitContextBuilder> innerHit
static class NestedInnerHitContextBuilder extends InnerHitContextBuilder {
private final String path;

NestedInnerHitContextBuilder(String path,
QueryBuilder query,
InnerHitBuilder innerHitBuilder,
Map<String, InnerHitContextBuilder> children) {
NestedInnerHitContextBuilder(String path, QueryBuilder query, InnerHitBuilder innerHitBuilder,
Map<String, InnerHitContextBuilder> children) {
super(query, innerHitBuilder, children);
this.path = path;
}

@Override
public void doValidate(QueryShardContext queryShardContext) {
if (queryShardContext.getObjectMapper(path) == null
&& innerHitBuilder.isIgnoreUnmapped() == false) {
throw new IllegalStateException("[" + query.getName() + "] no mapping found for type [" + path + "]");
}
}

@Override
public void build(SearchContext searchContext, InnerHitsContext innerHitsContext) throws IOException {
QueryShardContext queryShardContext = searchContext.getQueryShardContext();
protected void doBuild(SearchContext parentSearchContext,
InnerHitsContext innerHitsContext) throws IOException {
QueryShardContext queryShardContext = parentSearchContext.getQueryShardContext();
ObjectMapper nestedObjectMapper = queryShardContext.getObjectMapper(path);
if (nestedObjectMapper == null) {
assert innerHitBuilder.isIgnoreUnmapped() : "should be validated first";
return;
if (innerHitBuilder.isIgnoreUnmapped() == false) {
throw new IllegalStateException("[" + query.getName() + "] no mapping found for type [" + path + "]");
} else {
return;
}
}
String name = innerHitBuilder.getName() != null ? innerHitBuilder.getName() : nestedObjectMapper.fullPath();
ObjectMapper parentObjectMapper = queryShardContext.nestedScope().nextLevel(nestedObjectMapper);
NestedInnerHitSubContext nestedInnerHits = new NestedInnerHitSubContext(
name, searchContext, parentObjectMapper, nestedObjectMapper
name, parentSearchContext, parentObjectMapper, nestedObjectMapper
);
setupInnerHitsContext(queryShardContext, nestedInnerHits);
queryShardContext.nestedScope().previousLevel();
Expand Down Expand Up @@ -394,7 +388,7 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
SearchHit hit = hits[i];
Query rawParentFilter;
if (parentObjectMapper == null) {
rawParentFilter = Queries.newNonNestedFilter(indexShard().indexSettings().getIndexVersionCreated());
rawParentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated());
} else {
rawParentFilter = parentObjectMapper.nestedTypeFilter();
}
Expand All @@ -405,17 +399,17 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
LeafReaderContext ctx = searcher().getIndexReader().leaves().get(readerIndex);

Query childFilter = childObjectMapper.nestedTypeFilter();
BitSetProducer parentFilter = bitsetFilterCache().getBitSetProducer(rawParentFilter);
BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
Query q = new ParentChildrenBlockJoinQuery(parentFilter, childFilter, parentDocId);
Weight weight = searcher().createWeight(searcher().rewrite(q),
Weight weight = context.searcher().createWeight(context.searcher().rewrite(q),
org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f);
if (size() == 0) {
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
result[i] = new TopDocsAndMaxScore(new TopDocs(new TotalHits(totalHitCountCollector.getTotalHits(),
TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN);
} else {
int topN = Math.min(from() + size(), searcher().getIndexReader().maxDoc());
int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
TopDocsCollector<?> topDocsCollector;
MaxScoreCollector maxScoreCollector = null;
if (sort() != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.InnerHitContextBuilder;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
Expand Down Expand Up @@ -113,7 +112,6 @@ final class DefaultSearchContext extends SearchContext {
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
private DocValueFieldsContext docValueFieldsContext;
private Map<String, InnerHitContextBuilder> innerHits = Collections.emptyMap();
private int from = -1;
private int size = -1;
private SortAndFormats sort;
Expand Down Expand Up @@ -398,16 +396,6 @@ public void highlight(SearchContextHighlight highlight) {
this.highlight = highlight;
}

@Override
public void innerHits(Map<String, InnerHitContextBuilder> innerHits) {
this.innerHits = innerHits;
}

@Override
public Map<String, InnerHitContextBuilder> innerHits() {
return innerHits;
}

@Override
public SuggestionSearchContext suggest() {
return suggest;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -740,7 +740,6 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc
context.from(source.from());
context.size(source.size());
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
context.innerHits(innerHitBuilders);
if (source.query() != null) {
InnerHitContextBuilder.extractInnerHits(source.query(), innerHitBuilders);
context.parsedQuery(queryShardContext.toQuery(source.query()));
Expand All @@ -751,7 +750,11 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc
}
if (innerHitBuilders.size() > 0) {
for (Map.Entry<String, InnerHitContextBuilder> entry : innerHitBuilders.entrySet()) {
entry.getValue().validate(queryShardContext);
try {
entry.getValue().build(context, context.innerHits());
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to build inner_hits", e);
}
}
}
if (source.sorts() != null) {
Expand Down
Loading

0 comments on commit b858e19

Please sign in to comment.