Skip to content

Commit

Permalink
SOLR-16966: Add a first-class cache for OrdinalMaps (#119)
Browse files Browse the repository at this point in the history
backport unmerged upstream PR at commit 9eeb7e0

* include updates to accommodate FS custom CacheConfig ctor signature
  • Loading branch information
magibney authored and Justin Sweeney committed Apr 26, 2024
1 parent 433661e commit f0ac0a9
Show file tree
Hide file tree
Showing 19 changed files with 753 additions and 65 deletions.
13 changes: 11 additions & 2 deletions solr/core/src/java/org/apache/solr/core/SolrConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@
import org.apache.solr.schema.IndexSchemaFactory;
import org.apache.solr.search.CacheConfig;
import org.apache.solr.search.CaffeineCache;
import org.apache.solr.search.OrdMapRegenerator;
import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.SolrCache;
import org.apache.solr.search.ValueSourceParser;
Expand Down Expand Up @@ -306,6 +307,8 @@ private SolrConfig(
// filtOptCacheSize = getInt("query/boolTofilterOptimizer/@cacheSize",32);
// filtOptThreshold = getFloat("query/boolTofilterOptimizer/@threshold",.05f);

updateHandlerInfo = loadUpdatehandlerInfo(); // must do this before configuring ordMapCache

useFilterForSortedQuery = get("query").get("useFilterForSortedQuery").boolVal(false);
queryResultWindowSize = Math.max(1, get("query").get("queryResultWindowSize").intVal(1));
queryResultMaxDocsCached =
Expand All @@ -329,6 +332,13 @@ private SolrConfig(
conf = new CacheConfig(CaffeineCache.class, args, null);
}
fieldValueCacheConfig = conf;
conf = CacheConfig.getConfig(this, get("query").get("ordMapCache"), "query/ordMapCache");
if (conf != null) {
OrdMapRegenerator.configureRegenerator(this, conf);
ordMapCacheConfig = conf;
} else {
ordMapCacheConfig = OrdMapRegenerator.getDefaultCacheConfig(this);
}
useColdSearcher = get("query").get("useColdSearcher").boolVal(false);
dataDir = get("dataDir").txt();
if (dataDir != null && dataDir.length() == 0) dataDir = null;
Expand Down Expand Up @@ -356,8 +366,6 @@ private SolrConfig(
}
this.userCacheConfigs = Collections.unmodifiableMap(userCacheConfigs);

updateHandlerInfo = loadUpdatehandlerInfo();

final var requestParsersNode = get("requestDispatcher").get("requestParsers");

multipartUploadLimitKB =
Expand Down Expand Up @@ -675,6 +683,7 @@ public SolrRequestParsers getRequestParsers() {
// public final int filtOptCacheSize;
// public final float filtOptThreshold;
// SolrIndexSearcher - caches configurations
public final CacheConfig ordMapCacheConfig;
public final CacheConfig filterCacheConfig;
public final CacheConfig queryResultCacheConfig;
public final CacheConfig documentCacheConfig;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@

import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.ByteVectorValues;
import org.apache.lucene.index.CompositeReader;
Expand Down Expand Up @@ -54,6 +51,11 @@
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.Version;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.solr.search.CaffeineCache;
import org.apache.solr.search.OrdMapRegenerator;
import org.apache.solr.search.OrdMapRegenerator.OrdinalMapValue;
import org.apache.solr.search.SolrCache;
import org.apache.solr.util.IOFunction;

/**
* This class forces a composite reader (eg a {@link MultiReader} or {@link DirectoryReader}) to
Expand All @@ -76,24 +78,34 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
// also have a cached FieldInfos instance so this is consistent. SOLR-12878
private final FieldInfos fieldInfos;

// TODO: this could really be a weak map somewhere else on the coreCacheKey,
// but do we really need to optimize slow-wrapper any more?
final Map<String, OrdinalMap> cachedOrdMaps = new ConcurrentHashMap<>();
public static final SolrCache<String, OrdinalMapValue> NO_CACHED_ORDMAPS =
new CaffeineCache<>() {
@Override
public OrdinalMapValue computeIfAbsent(
String key, IOFunction<? super String, ? extends OrdinalMapValue> mappingFunction)
throws IOException {
return mappingFunction.apply(key);
}
};

final SolrCache<String, OrdinalMapValue> cachedOrdMaps;

/**
* This method is sugar for getting an {@link LeafReader} from an {@link IndexReader} of any kind.
* If the reader is already atomic, it is returned unchanged, otherwise wrapped by this class.
*/
public static LeafReader wrap(IndexReader reader) throws IOException {
public static LeafReader wrap(IndexReader reader, SolrCache<String, OrdinalMapValue> ordMapCache)
throws IOException {
if (reader instanceof CompositeReader) {
return new SlowCompositeReaderWrapper((CompositeReader) reader);
return new SlowCompositeReaderWrapper((CompositeReader) reader, ordMapCache);
} else {
assert reader instanceof LeafReader;
return (LeafReader) reader;
}
}

SlowCompositeReaderWrapper(CompositeReader reader) throws IOException {
SlowCompositeReaderWrapper(
CompositeReader reader, SolrCache<String, OrdinalMapValue> cachedOrdMaps) throws IOException {
in = reader;
in.registerParentReader(this);
if (reader.leaves().isEmpty()) {
Expand All @@ -114,6 +126,7 @@ public static LeafReader wrap(IndexReader reader) throws IOException {
metaData = new LeafMetaData(createdVersionMajor, minVersion, null);
}
fieldInfos = FieldInfos.getMergedFieldInfos(in);
this.cachedOrdMaps = cachedOrdMaps;
}

@Override
Expand Down Expand Up @@ -203,28 +216,19 @@ public SortedDocValues getSortedDocValues(String field) throws IOException {
return null;
}

// at this point in time we are able to formulate the producer
OrdinalMap map = null;
CacheHelper cacheHelper = getReaderCacheHelper();

Function<? super String, ? extends OrdinalMap> producer =
(notUsed) -> {
try {
OrdinalMap mapping =
OrdinalMap.build(
cacheHelper == null ? null : cacheHelper.getKey(), values, PackedInts.DEFAULT);
return mapping;
} catch (IOException e) {
throw new RuntimeException(e);
}
};

// either we use a cached result that gets produced eventually during caching,
// or we produce directly without caching
OrdinalMap map;
if (cacheHelper != null) {
map = cachedOrdMaps.computeIfAbsent(field + cacheHelper.getKey(), producer);
IOFunction<? super String, ? extends OrdinalMapValue> producer =
(notUsed) ->
OrdMapRegenerator.wrapValue(
OrdinalMap.build(cacheHelper.getKey(), values, PackedInts.DEFAULT));
map = cachedOrdMaps.computeIfAbsent(field, producer).get();
} else {
map = producer.apply("notUsed");
map = OrdinalMap.build(null, values, PackedInts.DEFAULT);
}

return new MultiSortedDocValues(values, starts, map, totalCost);
Expand Down Expand Up @@ -275,28 +279,19 @@ public SortedSetDocValues getSortedSetDocValues(String field) throws IOException
return null;
}

// at this point in time we are able to formulate the producer
OrdinalMap map = null;
CacheHelper cacheHelper = getReaderCacheHelper();

Function<? super String, ? extends OrdinalMap> producer =
(notUsed) -> {
try {
OrdinalMap mapping =
OrdinalMap.build(
cacheHelper == null ? null : cacheHelper.getKey(), values, PackedInts.DEFAULT);
return mapping;
} catch (IOException e) {
throw new RuntimeException(e);
}
};

// either we use a cached result that gets produced eventually during caching,
// or we produce directly without caching
OrdinalMap map;
if (cacheHelper != null) {
map = cachedOrdMaps.computeIfAbsent(field + cacheHelper.getKey(), producer);
IOFunction<? super String, ? extends OrdinalMapValue> producer =
(notUsed) ->
OrdMapRegenerator.wrapValue(
OrdinalMap.build(cacheHelper.getKey(), values, PackedInts.DEFAULT));
map = cachedOrdMaps.computeIfAbsent(field, producer).get();
} else {
map = producer.apply("notUsed");
map = OrdinalMap.build(null, values, PackedInts.DEFAULT);
}

return new MultiDocValues.MultiSortedSetDocValues(values, starts, map, totalCost);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ private Cache<K, V> buildCache(Cache<K, V> prev) {
builder.maximumWeight(maxRamBytes);
builder.weigher(
(k, v) -> (int) (RamUsageEstimator.sizeOfObject(k) + RamUsageEstimator.sizeOfObject(v)));
} else {
} else if (maxSize < Integer.MAX_VALUE) {
builder.maximumSize(maxSize);
}
Cache<K, V> newCache;
Expand Down
Loading

0 comments on commit f0ac0a9

Please sign in to comment.