Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
IndexMetadata.INDEX_ROUTING_PATH,
IndexSettings.TIME_SERIES_START_TIME,
IndexSettings.TIME_SERIES_END_TIME,
IndexSettings.SEQ_NO_INDEX_OPTIONS_SETTING,

// Legacy index settings we must keep around for BWC from 7.x
EngineConfig.INDEX_OPTIMIZE_AUTO_GENERATED_IDS,
Expand Down
24 changes: 24 additions & 0 deletions server/src/main/java/org/elasticsearch/index/IndexSettings.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper;
import org.elasticsearch.index.translog.Translog;
Expand Down Expand Up @@ -829,6 +830,23 @@ private static String getIgnoreAboveDefaultValue(final Settings settings) {
}
}

public static final Setting<SeqNoFieldMapper.SeqNoIndexOptions> SEQ_NO_INDEX_OPTIONS_SETTING = Setting.enumSetting(
SeqNoFieldMapper.SeqNoIndexOptions.class,
settings -> {
final IndexMode indexMode = IndexSettings.MODE.get(settings);
if ((indexMode == IndexMode.LOGSDB || indexMode == IndexMode.TIME_SERIES)
&& IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings).onOrAfter(IndexVersions.SEQ_NO_WITHOUT_POINTS)) {
return SeqNoFieldMapper.SeqNoIndexOptions.DOC_VALUES_ONLY.toString();
} else {
return SeqNoFieldMapper.SeqNoIndexOptions.POINTS_AND_DOC_VALUES.toString();
}
},
"index.seq_no.index_options",
value -> {},
Property.IndexScope,
Property.Final
);

private final Index index;
private final IndexVersion version;
private final Logger logger;
Expand Down Expand Up @@ -933,6 +951,7 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) {
private volatile int maxRegexLength;

private final IndexRouting indexRouting;
private final SeqNoFieldMapper.SeqNoIndexOptions seqNoIndexOptions;

/**
* The default mode for storing source, for all mappers not overriding this setting.
Expand Down Expand Up @@ -1099,6 +1118,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti
recoverySourceSyntheticEnabled = DiscoveryNode.isStateless(nodeSettings) == false
&& scopedSettings.get(RECOVERY_USE_SYNTHETIC_SOURCE_SETTING);
useDocValuesSkipper = DOC_VALUES_SKIPPER && scopedSettings.get(USE_DOC_VALUES_SKIPPER);
seqNoIndexOptions = scopedSettings.get(SEQ_NO_INDEX_OPTIONS_SETTING);
if (recoverySourceSyntheticEnabled) {
if (DiscoveryNode.isStateless(settings)) {
throw new IllegalArgumentException("synthetic recovery source is only allowed in stateful");
Expand Down Expand Up @@ -1837,4 +1857,8 @@ public DenseVectorFieldMapper.FilterHeuristic getHnswFilterHeuristic() {
private void setHnswFilterHeuristic(DenseVectorFieldMapper.FilterHeuristic heuristic) {
this.hnswFilterHeuristic = heuristic;
}

public SeqNoFieldMapper.SeqNoIndexOptions seqNoIndexOptions() {
return seqNoIndexOptions;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ private static Version parseUnchecked(String version) {
public static final IndexVersion DEFAULT_OVERSAMPLE_VALUE_FOR_BBQ = def(9_024_0_00, Version.LUCENE_10_2_1);
public static final IndexVersion SEMANTIC_TEXT_DEFAULTS_TO_BBQ = def(9_025_0_00, Version.LUCENE_10_2_1);
public static final IndexVersion DEFAULT_TO_ACORN_HNSW_FILTER_HEURISTIC = def(9_026_0_00, Version.LUCENE_10_2_1);
public static final IndexVersion SEQ_NO_WITHOUT_POINTS = def(9_027_0_00, Version.LUCENE_10_2_1);
/*
* STOP! READ THIS FIRST! No, really,
* ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
package org.elasticsearch.index.engine;

import org.apache.logging.log4j.Logger;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexCommit;
Expand Down Expand Up @@ -1811,7 +1810,10 @@ private DeletionStrategy planDeletionAsPrimary(Delete delete) throws IOException
private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws IOException {
assert assertMaxSeqNoOfUpdatesIsAdvanced(delete.uid(), delete.seqNo(), false, false);
try {
final ParsedDocument tombstone = ParsedDocument.deleteTombstone(delete.id());
final ParsedDocument tombstone = ParsedDocument.deleteTombstone(
engineConfig.getIndexSettings().seqNoIndexOptions(),
delete.id()
);
assert tombstone.docs().size() == 1 : "Tombstone doc should have single doc [" + tombstone + "]";
tombstone.updateSeqID(delete.seqNo(), delete.primaryTerm());
tombstone.version().setLongValue(plan.versionOfDeletion);
Expand Down Expand Up @@ -1970,7 +1972,10 @@ private NoOpResult innerNoOp(final NoOp noOp) throws IOException {
markSeqNoAsSeen(noOp.seqNo());
if (hasBeenProcessedBefore(noOp) == false) {
try {
final ParsedDocument tombstone = ParsedDocument.noopTombstone(noOp.reason());
final ParsedDocument tombstone = ParsedDocument.noopTombstone(
engineConfig.getIndexSettings().seqNoIndexOptions(),
noOp.reason()
);
tombstone.updateSeqID(noOp.seqNo(), noOp.primaryTerm());
// A noop tombstone does not require a _version but it's added to have a fully dense docvalues for the version
// field. 1L is selected to optimize the compression because it might probably be the most common value in
Expand Down Expand Up @@ -2753,10 +2758,10 @@ private IndexWriterConfig getIndexWriterConfig() {
? SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME
: SourceFieldMapper.RECOVERY_SOURCE_NAME,
engineConfig.getIndexSettings().getMode() == IndexMode.TIME_SERIES,
softDeletesPolicy::getRetentionQuery,
() -> softDeletesPolicy.getRetentionQuery(engineConfig.getIndexSettings().seqNoIndexOptions()),
new SoftDeletesRetentionMergePolicy(
Lucene.SOFT_DELETES_FIELD,
softDeletesPolicy::getRetentionQuery,
() -> softDeletesPolicy.getRetentionQuery(engineConfig.getIndexSettings().seqNoIndexOptions()),
new PrunePostingsMergePolicy(mergePolicy, IdFieldMapper.NAME)
)
);
Expand Down Expand Up @@ -3215,12 +3220,7 @@ public int countChanges(String source, long fromSeqNo, long toSeqNo) throws IOEx
ensureOpen();
refreshIfNeeded(source, toSeqNo);
try (Searcher searcher = acquireSearcher(source, SearcherScope.INTERNAL)) {
return LuceneChangesSnapshot.countOperations(
searcher,
fromSeqNo,
toSeqNo,
config().getIndexSettings().getIndexVersionCreated()
);
return LuceneChangesSnapshot.countOperations(searcher, engineConfig.getIndexSettings(), fromSeqNo, toSeqNo);
} catch (Exception e) {
try {
maybeFailEngine("count changes", e);
Expand Down Expand Up @@ -3448,7 +3448,11 @@ private void restoreVersionMapAndCheckpointTracker(DirectoryReader directoryRead
final IndexSearcher searcher = new IndexSearcher(directoryReader);
searcher.setQueryCache(null);
final Query query = new BooleanQuery.Builder().add(
LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, getPersistedLocalCheckpoint() + 1, Long.MAX_VALUE),
SeqNoFieldMapper.rangeQueryForSeqNo(
engineConfig.getIndexSettings().seqNoIndexOptions(),
getPersistedLocalCheckpoint() + 1,
Long.MAX_VALUE
),
BooleanClause.Occur.MUST
)
.add(Queries.newNonNestedFilter(indexVersionCreated), BooleanClause.Occur.MUST) // exclude non-root nested documents
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.core.Assertions;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.mapper.MapperService;
Expand Down Expand Up @@ -187,12 +188,12 @@ private static boolean hasSequentialAccess(ScoreDoc[] scoreDocs) {
return true;
}

static int countOperations(Engine.Searcher engineSearcher, long fromSeqNo, long toSeqNo, IndexVersion indexVersionCreated)
static int countOperations(Engine.Searcher engineSearcher, IndexSettings indexSettings, long fromSeqNo, long toSeqNo)
throws IOException {
if (fromSeqNo < 0 || toSeqNo < 0 || fromSeqNo > toSeqNo) {
throw new IllegalArgumentException("Invalid range; from_seqno [" + fromSeqNo + "], to_seqno [" + toSeqNo + "]");
}
return newIndexSearcher(engineSearcher).count(rangeQuery(fromSeqNo, toSeqNo, indexVersionCreated));
return newIndexSearcher(engineSearcher).count(rangeQuery(indexSettings, fromSeqNo, toSeqNo));
}

private Translog.Operation readDocAsOp(int docIndex) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

package org.elasticsearch.index.engine;

import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
Expand All @@ -26,6 +25,7 @@
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper;
import org.elasticsearch.index.mapper.MapperService;
Expand All @@ -47,7 +47,7 @@
public abstract class SearchBasedChangesSnapshot implements Translog.Snapshot, Closeable {
public static final int DEFAULT_BATCH_SIZE = 1024;

private final IndexVersion indexVersionCreated;
private final IndexSettings indexSettings;
private final IndexSearcher indexSearcher;
private final ValueFetcher sourceMetadataFetcher;
private final Closeable onClose;
Expand Down Expand Up @@ -97,7 +97,7 @@ protected SearchBasedChangesSnapshot(
}
};

this.indexVersionCreated = indexVersionCreated;
this.indexSettings = mapperService.getIndexSettings();
this.fromSeqNo = fromSeqNo;
this.toSeqNo = toSeqNo;
this.lastSeenSeqNo = fromSeqNo - 1;
Expand All @@ -109,7 +109,7 @@ protected SearchBasedChangesSnapshot(
this.searchBatchSize = (int) Math.min(requestingSize, searchBatchSize);

this.accessStats = accessStats;
this.totalHits = accessStats ? indexSearcher.count(rangeQuery(fromSeqNo, toSeqNo, indexVersionCreated)) : -1;
this.totalHits = accessStats ? indexSearcher.count(rangeQuery(indexSettings, fromSeqNo, toSeqNo)) : -1;
this.sourceMetadataFetcher = createSourceMetadataValueFetcher(mapperService, indexSearcher);
}

Expand Down Expand Up @@ -183,7 +183,7 @@ public void close() throws IOException {
* @return TopDocs instance containing the documents in the current batch.
*/
protected TopDocs nextTopDocs() throws IOException {
Query rangeQuery = rangeQuery(Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo, indexVersionCreated);
Query rangeQuery = rangeQuery(indexSettings, Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo);
SortField sortBySeqNo = new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG);

TopFieldCollectorManager collectorManager = new TopFieldCollectorManager(new Sort(sortBySeqNo), searchBatchSize, afterDoc, 0);
Expand Down Expand Up @@ -241,9 +241,10 @@ static IndexSearcher newIndexSearcher(Engine.Searcher engineSearcher) throws IOE
return new IndexSearcher(Lucene.wrapAllDocsLive(engineSearcher.getDirectoryReader()));
}

static Query rangeQuery(long fromSeqNo, long toSeqNo, IndexVersion indexVersionCreated) {
return new BooleanQuery.Builder().add(LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, fromSeqNo, toSeqNo), BooleanClause.Occur.MUST)
.add(Queries.newNonNestedFilter(indexVersionCreated), BooleanClause.Occur.MUST)
static Query rangeQuery(IndexSettings indexSettings, long fromSeqNo, long toSeqNo) {
Query seqNoQuery = SeqNoFieldMapper.rangeQueryForSeqNo(indexSettings.seqNoIndexOptions(), fromSeqNo, toSeqNo);
return new BooleanQuery.Builder().add(seqNoQuery, BooleanClause.Occur.MUST)
.add(Queries.newNonNestedFilter(indexSettings.getIndexVersionCreated()), BooleanClause.Occur.MUST)
.build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

package org.elasticsearch.index.engine;

import org.apache.lucene.document.LongPoint;
import org.apache.lucene.search.Query;
import org.elasticsearch.core.Releasable;
import org.elasticsearch.core.Releasables;
Expand Down Expand Up @@ -147,8 +146,8 @@ synchronized long getMinRetainedSeqNo() {
* Returns a soft-deletes retention query that will be used in {@link org.apache.lucene.index.SoftDeletesRetentionMergePolicy}
* Documents including tombstones are soft-deleted and matched this query will be retained and won't cleaned up by merges.
*/
Query getRetentionQuery() {
return LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, getMinRetainedSeqNo(), Long.MAX_VALUE);
Query getRetentionQuery(SeqNoFieldMapper.SeqNoIndexOptions seqNoIndexOptions) {
return SeqNoFieldMapper.rangeQueryForSeqNo(seqNoIndexOptions, getMinRetainedSeqNo(), Long.MAX_VALUE);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ protected DocumentParserContext(
new HashMap<>(),
null,
null,
SeqNoFieldMapper.SequenceIDFields.emptySeqID(),
SeqNoFieldMapper.SequenceIDFields.emptySeqID(mappingParserContext.getIndexSettings().seqNoIndexOptions()),
RoutingFields.fromIndexSettings(mappingParserContext.getIndexSettings()),
parent,
dynamic,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@ public class ParsedDocument {
* Create a no-op tombstone document
* @param reason the reason for the no-op
*/
public static ParsedDocument noopTombstone(String reason) {
public static ParsedDocument noopTombstone(SeqNoFieldMapper.SeqNoIndexOptions seqNoIndexOptions, String reason) {
LuceneDocument document = new LuceneDocument();
SeqNoFieldMapper.SequenceIDFields seqIdFields = SeqNoFieldMapper.SequenceIDFields.tombstone();
var seqIdFields = SeqNoFieldMapper.SequenceIDFields.tombstone(seqNoIndexOptions);
seqIdFields.addFields(document);
Field versionField = VersionFieldMapper.versionField();
document.add(versionField);
Expand All @@ -72,9 +72,9 @@ public static ParsedDocument noopTombstone(String reason) {
* The returned document consists only _uid, _seqno, _term and _version fields; other metadata fields are excluded.
* @param id the id of the deleted document
*/
public static ParsedDocument deleteTombstone(String id) {
public static ParsedDocument deleteTombstone(SeqNoFieldMapper.SeqNoIndexOptions seqNoIndexOptions, String id) {
LuceneDocument document = new LuceneDocument();
SeqNoFieldMapper.SequenceIDFields seqIdFields = SeqNoFieldMapper.SequenceIDFields.tombstone();
SeqNoFieldMapper.SequenceIDFields seqIdFields = SeqNoFieldMapper.SequenceIDFields.tombstone(seqNoIndexOptions);
seqIdFields.addFields(document);
Field versionField = VersionFieldMapper.versionField();
document.add(versionField);
Expand Down
Loading