Skip to content

Commit

Permalink
Update elasticsearch to 8.6.0 (#453)
Browse files Browse the repository at this point in the history
* Update elasticsearch to 8.6.0

* Updates for ES 8.6

Co-authored-by: Daniel Worley <[email protected]>
  • Loading branch information
Philippus and worleydl authored Jan 11, 2023
1 parent f6f8470 commit 2338220
Show file tree
Hide file tree
Showing 6 changed files with 21 additions and 13 deletions.
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
ltrVersion = 1.5.8
elasticsearchVersion = 8.5.3
elasticsearchVersion = 8.6.0
luceneVersion = 9.4.2
ow2Version = 8.0.1
antlrVersion = 4.5.1-1
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_ARRAY_HEADER;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_HEADER;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF;
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseTopLevelQuery;

public class PrecompiledTemplateFeature implements Feature, Accountable {
private static final long BASE_RAM_USED = RamUsageEstimator.shallowSizeOfInstance(StoredFeature.class);
Expand Down Expand Up @@ -93,7 +93,7 @@ public Query doToQuery(LtrQueryContext context, FeatureSet set, Map<String, Obje
try {
XContentParser parser = XContentFactory.xContent(query)
.createParser(context.getSearchExecutionContext().getParserConfig(), query);
QueryBuilder queryBuilder = parseInnerQueryBuilder(parser);
QueryBuilder queryBuilder = parseTopLevelQuery(parser);
// XXX: QueryShardContext extends QueryRewriteContext (for now)
return Rewriteable.rewrite(queryBuilder, context.getSearchExecutionContext()).toQuery(context.getSearchExecutionContext());
} catch (IOException | ParsingException | IllegalArgumentException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public class Caches {

static {
LTR_CACHE_MEM_SETTING = Setting.memorySizeSetting("ltr.caches.max_mem",
(s) -> new ByteSizeValue(Math.min(RamUsageEstimator.ONE_MB*10,
(s) -> ByteSizeValue.ofBytes(Math.min(RamUsageEstimator.ONE_MB*10,
JvmInfo.jvmInfo().getMem().getHeapMax().getBytes()/10)).toString(),
Setting.Property.NodeScope);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.fetch.StoredFieldsSpec;
import org.elasticsearch.search.rescore.QueryRescorer;
import org.elasticsearch.search.rescore.RescoreContext;

Expand Down Expand Up @@ -160,6 +161,11 @@ public void process(HitContext hitContext) throws IOException {
scorer.score();
}
}

@Override
public StoredFieldsSpec storedFieldsSpec() {
return StoredFieldsSpec.NO_REQUIREMENTS;
}
}

static class HitLogConsumer implements LogLtrRanker.LogConsumer {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ public void testCachedModelSet() throws IOException {
}

public void testWontBlowUp() throws IOException {
Caches caches = new Caches(TimeValue.timeValueHours(1), TimeValue.timeValueHours(1), new ByteSizeValue(100000));
Caches caches = new Caches(TimeValue.timeValueHours(1), TimeValue.timeValueHours(1), ByteSizeValue.ofBytes(100000));
CachedFeatureStore store = new CachedFeatureStore(memStore, caches);
long curWeight = store.modelWeight();
long maxWeight = caches.getMaxWeight();
Expand All @@ -114,7 +114,7 @@ public void testWontBlowUp() throws IOException {

@BadApple(bugUrl = "https://github.com/o19s/elasticsearch-learning-to-rank/issues/75")
public void testExpirationOnWrite() throws IOException, InterruptedException {
Caches caches = new Caches(TimeValue.timeValueMillis(100), TimeValue.timeValueHours(1), new ByteSizeValue(1000000));
Caches caches = new Caches(TimeValue.timeValueMillis(100), TimeValue.timeValueHours(1), ByteSizeValue.ofBytes(1000000));
CachedFeatureStore store = new CachedFeatureStore(memStore, caches);
CompiledLtrModel model = LtrTestUtils.buildRandomModel();
memStore.add(model);
Expand All @@ -132,7 +132,7 @@ public void testExpirationOnWrite() throws IOException, InterruptedException {

@BadApple(bugUrl = "https://github.com/o19s/elasticsearch-learning-to-rank/issues/75")
public void testExpirationOnGet() throws IOException, InterruptedException {
Caches caches = new Caches(TimeValue.timeValueHours(1), TimeValue.timeValueMillis(100), new ByteSizeValue(1000000));
Caches caches = new Caches(TimeValue.timeValueHours(1), TimeValue.timeValueMillis(100), ByteSizeValue.ofBytes(1000000));
CachedFeatureStore store = new CachedFeatureStore(memStore, caches);
CompiledLtrModel model = LtrTestUtils.buildRandomModel();
memStore.add(model);
Expand Down Expand Up @@ -190,4 +190,4 @@ public void testCacheStatsIsolation() throws IOException {
caches.evict(two.getStoreName());
assertTrue(caches.getCachedStoreNames().isEmpty());
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,11 @@
import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.index.fielddata.plain.SortedDoublesIndexFieldData;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.lookup.Source;
import org.junit.AfterClass;
import org.junit.BeforeClass;

Expand Down Expand Up @@ -179,11 +181,10 @@ public void collect(int doc) throws IOException {
String id = d.get("id");
SearchHit hit = new SearchHit(
doc,
id,
random().nextBoolean() ? new HashMap<>() : null,
null
id
);
processor.process(new FetchSubPhase.HitContext(hit, context, doc));
Source source = null;
processor.process(new FetchSubPhase.HitContext(hit, context, doc, Map.of(), source));
hits.add(hit);
}
}
Expand Down Expand Up @@ -217,8 +218,9 @@ public Query buildFunctionScore() {
new SortedDoublesIndexFieldData(
"score",
FLOAT,
CoreValuesSourceType.NUMERIC,
(dv, n) -> { throw new UnsupportedOperationException(); }));
return new FunctionScoreQuery(new MatchAllDocsQuery(),
fieldValueFactorFunction, CombineFunction.MULTIPLY, 0F, Float.MAX_VALUE);
}
}
}

0 comments on commit 2338220

Please sign in to comment.