From 5b8526d03341cd7a4c6bc93a35a09a969d3046f7 Mon Sep 17 00:00:00 2001 From: Christos Malliaridis Date: Wed, 13 Nov 2024 13:30:27 +0100 Subject: [PATCH 1/7] Enable ClassInitializationDeadlock checks --- gradle/validation/error-prone.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradle/validation/error-prone.gradle b/gradle/validation/error-prone.gradle index b7242b566c0..e9397529836 100644 --- a/gradle/validation/error-prone.gradle +++ b/gradle/validation/error-prone.gradle @@ -290,7 +290,7 @@ allprojects { prj -> '-Xep:ChainedAssertionLosesContext:WARN', '-Xep:CharacterGetNumericValue:WARN', '-Xep:ClassCanBeStatic:WARN', - // '-Xep:ClassInitializationDeadlock:WARN', // todo check if useful or comment why not + '-Xep:ClassInitializationDeadlock:WARN', '-Xep:ClassNewInstance:WARN', // '-Xep:CloseableProvides:OFF', // we don't use this annotation '-Xep:ClosingStandardOutputStreams:WARN', From eaf4d707a7e65ce2c483643bb3745b8e936d4770 Mon Sep 17 00:00:00 2001 From: Christos Malliaridis Date: Wed, 20 Nov 2024 22:41:22 +0100 Subject: [PATCH 2/7] Fix ClassInitializationDeadlock warning for TimeSource --- .../org/apache/solr/cloud/ActionThrottle.java | 5 +- .../org/apache/solr/schema/SchemaManager.java | 4 +- .../solr/security/AuditLoggerPlugin.java | 4 +- .../org/apache/solr/update/UpdateLog.java | 4 +- .../processor/DistributedUpdateProcessor.java | 4 +- .../apache/solr/util/ConcurrentLRUCache.java | 3 +- .../java/org/apache/solr/util/IdUtils.java | 6 +- .../apache/solr/cloud/ActionThrottleTest.java | 5 +- .../solr/cloud/AliasIntegrationTest.java | 4 +- ...nkeyNothingIsSafeWithPullReplicasTest.java | 4 +- ...sMonkeySafeLeaderWithPullReplicasTest.java | 4 +- .../solr/cloud/CollectionsAPISolrJTest.java | 8 +- .../solr/cloud/DeleteInactiveReplicaTest.java | 4 +- .../apache/solr/cloud/DeleteReplicaTest.java | 14 +- ...stribDocExpirationUpdateProcessorTest.java | 4 +- .../solr/cloud/DistributedQueueTest.java | 4 +- .../apache/solr/cloud/ForceLeaderTest.java | 4 +- .../solr/cloud/MigrateRouteKeyTest.java | 4 +- ...rseerCollectionConfigSetProcessorTest.java | 8 +- .../apache/solr/cloud/OverseerRolesTest.java | 4 +- .../org/apache/solr/cloud/OverseerTest.java | 16 +- .../solr/cloud/ReindexCollectionTest.java | 4 +- .../cloud/TestLeaderElectionZkExpiry.java | 6 +- .../apache/solr/cloud/TestPullReplica.java | 4 +- .../cloud/TestPullReplicaErrorHandling.java | 6 +- .../solr/cloud/TestRebalanceLeaders.java | 4 +- .../apache/solr/cloud/TestTlogReplica.java | 8 +- .../apache/solr/cloud/ZkShardTermsTest.java | 8 +- ...ncurrentDeleteAndCreateCollectionTest.java | 4 +- .../cloud/api/collections/ShardSplitTest.java | 6 +- .../SimpleCollectionCreateDeleteTest.java | 4 +- .../cloud/overseer/ZkStateReaderTest.java | 10 +- .../solr/core/TestSolrConfigHandler.java | 4 +- .../solr/handler/RequestLoggingTest.java | 4 +- .../solr/handler/TestReplicationHandler.java | 10 +- .../solr/handler/TestStressThreadBackup.java | 4 +- .../handler/admin/DaemonStreamApiTest.java | 8 +- .../handler/admin/IndexSizeEstimatorTest.java | 4 +- .../apache/solr/logging/TestLogWatcher.java | 4 +- .../solr/schema/TestBulkSchemaConcurrent.java | 8 +- .../org/apache/solr/search/TestRecovery.java | 6 +- .../security/BasicAuthIntegrationTest.java | 4 +- .../DirectUpdateHandlerWithUpdateLogTest.java | 4 +- .../update/TestInPlaceUpdatesDistrib.java | 4 +- .../solr/hdfs/search/TestRecoveryHdfs.java | 4 +- .../jwt/JWTAuthPluginIntegrationTest.java | 4 +- .../scripting/xslt/TransformerProvider.java | 5 +- .../solrj/io/stream/CloudAuthStreamTest.java | 6 +- .../solrj/cloud/DelegatingCloudManager.java | 3 +- .../solrj/impl/SolrClientCloudManager.java | 3 +- .../apache/solr/common/util/TimeSource.java | 77 +++---- .../apache/solr/common/util/TimeSources.java | 208 ++++++++++++++++++ .../client/solrj/SolrExampleTestsBase.java | 4 +- .../client/solrj/TestLBHttpSolrClient.java | 4 +- .../LBHttp2SolrClientIntegrationTest.java | 4 +- .../solr/common/util/ExecutorUtilTest.java | 2 +- .../solr/common/util/TestTimeSource.java | 6 +- .../cloud/AbstractFullDistribZkTestBase.java | 6 +- .../org/apache/solr/cloud/ChaosMonkey.java | 4 +- .../solr/cloud/MiniSolrCloudCluster.java | 6 +- ...ctCollectionsAPIDistributedZkTestBase.java | 6 +- .../apache/solr/embedded/JettySolrRunner.java | 6 +- .../solr/handler/BackupStatusChecker.java | 8 +- 63 files changed, 408 insertions(+), 203 deletions(-) create mode 100644 solr/solrj/src/java/org/apache/solr/common/util/TimeSources.java diff --git a/solr/core/src/java/org/apache/solr/cloud/ActionThrottle.java b/solr/core/src/java/org/apache/solr/cloud/ActionThrottle.java index 2b27f6d27ff..9dd8c1e3253 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ActionThrottle.java +++ b/solr/core/src/java/org/apache/solr/cloud/ActionThrottle.java @@ -19,6 +19,7 @@ import java.lang.invoke.MethodHandles; import java.util.concurrent.TimeUnit; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,7 +34,7 @@ public class ActionThrottle { private final TimeSource timeSource; public ActionThrottle(String name, long minMsBetweenActions) { - this(name, minMsBetweenActions, TimeSource.NANO_TIME); + this(name, minMsBetweenActions, TimeSources.NANO_TIME); } public ActionThrottle(String name, long minMsBetweenActions, TimeSource timeSource) { @@ -43,7 +44,7 @@ public ActionThrottle(String name, long minMsBetweenActions, TimeSource timeSour } public ActionThrottle(String name, long minMsBetweenActions, long lastActionStartedAt) { - this(name, minMsBetweenActions, lastActionStartedAt, TimeSource.NANO_TIME); + this(name, minMsBetweenActions, lastActionStartedAt, TimeSources.NANO_TIME); } public ActionThrottle( diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java index b30293efb1a..118d8684257 100644 --- a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java +++ b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java @@ -41,7 +41,7 @@ import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.util.CommandOperation; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CoreDescriptor; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrResourceLoader; @@ -100,7 +100,7 @@ public List> performOperations() throws Exception { private List> doOperations(List operations) throws InterruptedException, IOException, KeeperException { - TimeOut timeOut = new TimeOut(updateTimeOut, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(updateTimeOut, TimeUnit.SECONDS, TimeSources.NANO_TIME); SolrCore core = req.getCore(); String errorMsg = "Unable to persist managed schema. "; List> errors = Collections.emptyList(); diff --git a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java index a3a8a7c15ca..e8d59b80099 100644 --- a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java @@ -42,7 +42,7 @@ import org.apache.solr.common.SolrException; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.security.AuditEvent.EventType; @@ -367,7 +367,7 @@ public void close() throws IOException { */ protected void waitForQueueToDrain(int timeoutSeconds) { if (async && executorService != null) { - TimeOut timeOut = new TimeOut(timeoutSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(timeoutSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); while ((!queue.isEmpty() || auditsInFlight.get() > 0) && !timeOut.hasTimedOut()) { try { if (log.isInfoEnabled()) { diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java index 8e0126073d2..b0981b53f05 100644 --- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java +++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java @@ -72,7 +72,7 @@ import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.common.util.SuppressForbidden; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.DirectoryFactory; import org.apache.solr.core.PluginInfo; import org.apache.solr.core.SolrCore; @@ -2306,7 +2306,7 @@ public void doReplay(TransactionLog translog) { private void waitForAllUpdatesGetExecuted(AtomicInteger pendingTasks) { TimeOut timeOut = - new TimeOut(Integer.MAX_VALUE, TimeUnit.MILLISECONDS, TimeSource.CURRENT_TIME); + new TimeOut(Integer.MAX_VALUE, TimeUnit.MILLISECONDS, TimeSources.CURRENT_TIME); try { timeOut.waitFor( "Timeout waiting for replay updates finish", diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java index 2edbeb6c345..870be769651 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java @@ -45,7 +45,7 @@ import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.handler.component.RealTimeGetComponent; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; @@ -560,7 +560,7 @@ boolean shouldBufferUpdate( */ private long waitForDependentUpdates( AddUpdateCommand cmd, long versionOnUpdate, boolean isReplayOrPeersync) throws IOException { - TimeOut waitTimeout = new TimeOut(5, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut waitTimeout = new TimeOut(5, TimeUnit.SECONDS, TimeSources.NANO_TIME); long lastFoundVersion = getUpdateLocks() diff --git a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java index 8b3aef7130c..22b8c49e799 100644 --- a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java +++ b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java @@ -42,6 +42,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.apache.solr.common.util.Cache; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; /** * A LRU cache implementation based upon ConcurrentHashMap and other techniques to reduce contention @@ -71,7 +72,7 @@ public class ConcurrentLRUCache implements Cache, Accountable { private final Stats stats = new Stats(); private int acceptableWaterMark; private long oldestEntry = 0; // not volatile, only accessed in the cleaning method - private final TimeSource timeSource = TimeSource.NANO_TIME; + private final TimeSource timeSource = TimeSources.NANO_TIME; private final AtomicLong oldestEntryNs = new AtomicLong(0); private long maxIdleTimeNs; private final EvictionListener evictionListener; diff --git a/solr/core/src/java/org/apache/solr/util/IdUtils.java b/solr/core/src/java/org/apache/solr/util/IdUtils.java index 48e5a13e551..da3e74f8afa 100644 --- a/solr/core/src/java/org/apache/solr/util/IdUtils.java +++ b/solr/core/src/java/org/apache/solr/util/IdUtils.java @@ -18,7 +18,7 @@ import java.util.concurrent.TimeUnit; import org.apache.lucene.util.StringHelper; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; /** Helper class for generating unique ID-s. */ public class IdUtils { @@ -30,11 +30,11 @@ public static final String randomId() { /** * Generate a random id with a timestamp, in the format: hex(timestamp) + 'T' + randomId - * . This method uses {@link TimeSource#CURRENT_TIME} for timestamp values. + * . This method uses {@link TimeSources#CURRENT_TIME} for timestamp values. */ public static final String timeRandomId() { return timeRandomId( - TimeUnit.MILLISECONDS.convert(TimeSource.CURRENT_TIME.getTimeNs(), TimeUnit.NANOSECONDS)); + TimeUnit.MILLISECONDS.convert(TimeSources.CURRENT_TIME.getTimeNs(), TimeUnit.NANOSECONDS)); } /** diff --git a/solr/core/src/test/org/apache/solr/cloud/ActionThrottleTest.java b/solr/core/src/test/org/apache/solr/cloud/ActionThrottleTest.java index 8c5d34505fc..fcb972e970a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ActionThrottleTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ActionThrottleTest.java @@ -21,6 +21,7 @@ import java.util.concurrent.TimeUnit; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.junit.Test; public class ActionThrottleTest extends SolrTestCaseJ4 { @@ -52,7 +53,7 @@ public long[] getTimeAndEpochNs() { @Override public void sleep(long ms) throws InterruptedException { - TimeSource.NANO_TIME.sleep(ms); + TimeSources.NANO_TIME.sleep(ms); } @Override @@ -62,7 +63,7 @@ public long convertDelay(TimeUnit fromUnit, long value, TimeUnit toUnit) { } // use the same time source as ActionThrottle - private static final TimeSource timeSource = TimeSource.NANO_TIME; + private static final TimeSource timeSource = TimeSources.NANO_TIME; @Test public void testBasics() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java index 3863043fc1a..9d923b83887 100644 --- a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java @@ -55,7 +55,7 @@ import org.apache.solr.common.params.CollectionAdminParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.TimeOut; @@ -430,7 +430,7 @@ private int waitForAliasesUpdate(int lastVersion, ClusterStateProvider stateProv } private int waitForAliasesUpdate(int lastVersion, ZkStateReader zkStateReader) throws Exception { - TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { zkStateReader.aliasesManager.update(); Aliases aliases = zkStateReader.getAliases(); diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java index 6652009774f..6f459d1b032 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java @@ -33,7 +33,7 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.util.TestInjection; import org.apache.solr.util.TimeOut; import org.junit.AfterClass; @@ -303,7 +303,7 @@ public void test() throws Exception { waitForReplicationFromReplicas( DEFAULT_COLLECTION, zkStateReader, - new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME)); + new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME)); // waitForAllWarmingSearchers(); Set addFails = getAddFails(indexTreads); diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java index b097ebea968..273d6a29521 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java @@ -31,7 +31,7 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.util.TestInjection; import org.apache.solr.util.TimeOut; import org.junit.AfterClass; @@ -234,7 +234,7 @@ public void test() throws Exception { waitForReplicationFromReplicas( DEFAULT_COLLECTION, ZkStateReader.from(cloudClient), - new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME)); + new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME)); // waitForAllWarmingSearchers(); checkShardConsistency(batchSize == 1, true); diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java index cf338640579..1bc90fdacbb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java @@ -64,7 +64,7 @@ import org.apache.solr.common.params.CoreAdminParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.RetryUtil; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.TimeOut; @@ -188,7 +188,7 @@ public void testCreateCollWithDefaultClusterPropertiesNewFormat() throws Excepti .process(cluster.getSolrClient()); // we use a timeout so that the change made in ZK is reflected in the watched copy inside // ZkStateReader - TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, new TimeSource.NanoTimeSource()); + TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, new TimeSources.NanoTimeSource()); while (!timeOut.hasTimedOut()) { clusterProperty = cluster @@ -205,7 +205,7 @@ public void testCreateCollWithDefaultClusterPropertiesNewFormat() throws Excepti .build() .process(cluster.getSolrClient()); // assert that it is really gone in both old and new paths - timeOut = new TimeOut(5, TimeUnit.SECONDS, new TimeSource.NanoTimeSource()); + timeOut = new TimeOut(5, TimeUnit.SECONDS, new TimeSources.NanoTimeSource()); while (!timeOut.hasTimedOut()) { clusterProperty = cluster @@ -558,7 +558,7 @@ public void testCollectionProp() throws InterruptedException, IOException, SolrS private void checkCollectionProperty(String collection, String propertyName, String propertyValue) throws InterruptedException { - TimeOut timeout = new TimeOut(TIMEOUT, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(TIMEOUT, TimeUnit.MILLISECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { Thread.sleep(10); if (Objects.equals( diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java index 8d3749b7eba..aeb4bca4090 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java @@ -25,7 +25,7 @@ import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CoreDescriptor; import org.apache.solr.core.SolrCore; import org.apache.solr.embedded.JettySolrRunner; @@ -101,7 +101,7 @@ public void deleteInactiveReplicaTest() throws Exception { cluster.startJettySolrRunner(jetty); log.info("restarted jetty"); - TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "Expected data dir and instance dir of " + replica.getName() + " is deleted", () -> diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java index 16242bcc5eb..445366a2e31 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java @@ -42,7 +42,7 @@ import org.apache.solr.common.cloud.ZkNodeProps; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.cloud.ZkStateReaderAccessor; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.ZkContainer; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.TimeOut; @@ -136,7 +136,7 @@ public void deleteLiveReplicaTest() throws Exception { // the core should no longer have a watch collection state since it was removed // the core should no longer have a watch collection state since it was removed - TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "Waiting for core's watcher to be removed", () -> { @@ -289,13 +289,13 @@ public void deleteReplicaFromClusterState() throws Exception { (liveNodes, collectionState) -> collectionState.getSlice("shard1").getReplicas().size() == 2); - TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "Waiting for replica get unloaded", () -> replicaJetty.getCoreContainer().getCoreDescriptor(replica.getCoreName()) == null); // the core should no longer have a watch collection state since it was removed - timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); + timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "Waiting for core's watcher to be removed", () -> { @@ -377,7 +377,7 @@ public void raceConditionOnDeleteAndRegisterReplica() throws Exception { } boolean replicaDeleted = false; - TimeOut timeOut = new TimeOut(20, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(20, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { try { ZkStateReader stateReader = @@ -440,7 +440,7 @@ public void raceConditionOnDeleteAndRegisterReplica() throws Exception { ZkContainer.testing_beforeRegisterInZk = null; } - TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "Timeout adding replica to shard", () -> { @@ -504,7 +504,7 @@ private void waitForNodeLeave(String lostNodeName) throws InterruptedException { */ private void waitForJettyInit(JettySolrRunner replica1Jetty, String replica1JettyNodeName) throws InterruptedException { - TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!replica1Jetty.isRunning()) { Thread.sleep(100); if (timeOut.hasTimedOut()) diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java index dfd9aa4dc14..1e75a8201ee 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java @@ -39,7 +39,7 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.handler.ReplicationHandler; import org.apache.solr.update.processor.DocExpirationUpdateProcessorFactory; import org.apache.solr.util.SecurityJson; @@ -337,7 +337,7 @@ private void waitForNoResults(int maxTimeLimitSeconds, SolrParams params) final QueryRequest req = setAuthIfNeeded(new QueryRequest(params)); final TimeOut timeout = - new TimeOut(maxTimeLimitSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + new TimeOut(maxTimeLimitSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); long numFound = req.process(cluster.getSolrClient(), COLLECTION).getResults().getNumFound(); while (0L < numFound && !timeout.hasTimedOut()) { diff --git a/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java b/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java index 152a5742bd9..7505efc1145 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java @@ -31,7 +31,7 @@ import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.util.TimeOut; import org.junit.After; import org.junit.Before; @@ -149,7 +149,7 @@ public void testDistributedQueueBlocking() throws Exception { // After draining the queue, a watcher should be set. assertNull(dq.peek(100)); - TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeout.waitFor( "Timeout waiting to see dirty=false", () -> { diff --git a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java index c3008437440..5e9ec9d3481 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java @@ -35,7 +35,7 @@ import org.apache.solr.common.cloud.Replica.State; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.TimeOut; import org.junit.BeforeClass; @@ -169,7 +169,7 @@ public void testReplicasInLowerTerms() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); if (useTlogReplicas()) { - TimeOut timeOut = new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(15, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "Expected only 2 documents in the index", () -> { diff --git a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java index fcbec76bfbb..4ea7ab318a1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java @@ -31,7 +31,7 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.RoutingRule; import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.util.TimeOut; import org.apache.zookeeper.KeeperException; import org.junit.BeforeClass; @@ -204,7 +204,7 @@ public Indexer( @Override public void run() { - TimeOut timeout = new TimeOut(seconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(seconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); for (int id = 26 * 3; id < 500 && !timeout.hasTimedOut(); id++) { String shardKey = "" + (char) ('a' + (id % 26)); // See comment in ShardRoutingTest for hash distribution diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java index b38ad73f820..27d4ebd2a87 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java @@ -74,7 +74,7 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.ObjectCache; import org.apache.solr.common.util.StrUtils; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.core.CoreContainer; import org.apache.solr.handler.component.HttpShardHandler; @@ -260,7 +260,7 @@ public void setUp() throws Exception { reset(cloudDataProviderMock); objectCache.clear(); when(cloudDataProviderMock.getObjectCache()).thenReturn(objectCache); - when(cloudDataProviderMock.getTimeSource()).thenReturn(TimeSource.NANO_TIME); + when(cloudDataProviderMock.getTimeSource()).thenReturn(TimeSources.NANO_TIME); reset(clusterStateProviderMock); reset(stateManagerMock); reset(cloudManagerMock); @@ -529,7 +529,7 @@ public Void answer(InvocationOnMock invocation) { }); when(cloudManagerMock.getClusterStateProvider()).thenReturn(clusterStateProviderMock); - when(cloudManagerMock.getTimeSource()).thenReturn(new TimeSource.NanoTimeSource()); + when(cloudManagerMock.getTimeSource()).thenReturn(new TimeSources.NanoTimeSource()); when(cloudManagerMock.getDistribStateManager()).thenReturn(distribStateManagerMock); when(overseerMock.getSolrCloudManager()).thenReturn(cloudManagerMock); @@ -910,7 +910,7 @@ protected void verifySubmitCaptures( } protected void waitForEmptyQueue() throws Exception { - final TimeOut timeout = new TimeOut(MAX_WAIT_MS, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(MAX_WAIT_MS, TimeUnit.MILLISECONDS, TimeSources.NANO_TIME); while (queue.peek() != null) { if (timeout.hasTimedOut()) fail("Queue not empty within " + MAX_WAIT_MS + " ms"); Thread.sleep(100); diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java index 3da6970e1f6..892d492bcfe 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java @@ -27,7 +27,7 @@ import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.TimeOut; import org.apache.zookeeper.KeeperException; @@ -47,7 +47,7 @@ public static void setupCluster() throws Exception { public static void waitForNewOverseer( int seconds, Predicate state, boolean failOnIntermediateTransition) throws Exception { - TimeOut timeout = new TimeOut(seconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(seconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); String current = null; while (timeout.hasTimedOut() == false) { String prev = current; diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java index bad8d58d021..3c2c40eec1b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java @@ -71,7 +71,7 @@ import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.SolrNamedThreadFactory; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CloudConfig; import org.apache.solr.core.ClusterSingletons; import org.apache.solr.core.CoreContainer; @@ -1146,7 +1146,7 @@ public void testExceptionWhenFlushClusterState() throws Exception { ZkDistributedQueue q = getOpenOverseer().getStateUpdateQueue(); q.offer(badMessage); - TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { if (q.peek() == null) { break; @@ -1210,7 +1210,7 @@ public void testShardLeaderChange() throws Exception { mockController = new MockZKController(server.getZkAddress(), "node1:8983_", overseers); - TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { try { // We must only retry the enqueue to Overseer, not the collection znode creation (that @@ -1235,7 +1235,7 @@ public void testShardLeaderChange() throws Exception { } } - timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { try { mockController.publishState( @@ -1260,7 +1260,7 @@ public void testShardLeaderChange() throws Exception { Thread.sleep(100); - timeout = new TimeOut(1, TimeUnit.SECONDS, TimeSource.NANO_TIME); + timeout = new TimeOut(1, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { try { mockController.publishState( @@ -1280,7 +1280,7 @@ public void testShardLeaderChange() throws Exception { mockController2 = new MockZKController(server.getZkAddress(), "node2:8984_", overseers); - timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { try { mockController.publishState( @@ -1300,7 +1300,7 @@ public void testShardLeaderChange() throws Exception { verifyShardLeader(reader, COLLECTION, "shard1", "core1"); - timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { try { mockController2.publishState( @@ -1324,7 +1324,7 @@ public void testShardLeaderChange() throws Exception { ZkController zkController = createMockZkController(server.getZkAddress(), null, reader); zkControllers.add(zkController); - TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "Timed out waiting to see core4 as leader", () -> { diff --git a/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java index 0b79aa1c336..82f58957e7f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java @@ -41,7 +41,7 @@ import org.apache.solr.common.cloud.ImplicitDocRouter; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.CommonParams; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.util.LogLevel; import org.apache.solr.util.TestInjection; import org.apache.solr.util.TimeOut; @@ -86,7 +86,7 @@ private void waitForReindexingState(String collection, ReindexCollectionCmd.Stat ReindexCollectionCmd.State lastSeen = null; - TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { try { lastSeen = diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java index a871fc7176c..857f891938f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java @@ -26,7 +26,7 @@ import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CloudConfig; import org.apache.solr.core.CoreContainer; import org.apache.solr.util.TimeOut; @@ -67,7 +67,7 @@ public void testLeaderElectionWithZkExpiry() throws Exception { cc, server.getZkAddress(), 15000, cloudConfig, Collections::emptyList)) { threadExecutor.execute( () -> { - TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { long sessionId = zkController.getZkClient().getZkSessionId(); server.expire(sessionId); @@ -85,7 +85,7 @@ public void testLeaderElectionWithZkExpiry() throws Exception { .withTimeout(LeaderElectionTest.TIMEOUT, TimeUnit.MILLISECONDS) .build()) { boolean found = false; - TimeOut timeout = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(60, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { try { String leaderNode = OverseerCollectionConfigSetProcessor.getLeaderNode(zc); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java index ec0564c249d..d9def1afd16 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java @@ -52,7 +52,7 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CoreDescriptor; import org.apache.solr.core.SolrCore; import org.apache.solr.embedded.JettySolrRunner; @@ -714,7 +714,7 @@ private void waitForNumDocsInAllReplicas(int numDocs, Collection replic static void waitForNumDocsInAllReplicas( int numDocs, Collection replicas, String query, String user, String pass) throws IOException, SolrServerException, InterruptedException { - TimeOut t = new TimeOut(REPLICATION_TIMEOUT_SECS, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut t = new TimeOut(REPLICATION_TIMEOUT_SECS, TimeUnit.SECONDS, TimeSources.NANO_TIME); for (Replica r : replicas) { String replicaUrl = r.getCoreUrl(); try (SolrClient replicaClient = getHttpSolrClient(r)) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java index 0d68f2f1993..f5109d49d56 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java @@ -39,7 +39,7 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.SolrCore; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.TestInjection; @@ -158,7 +158,7 @@ public void testCantConnectToPullReplica() throws Exception { assertNumberOfReplicas(numShards, 0, numShards, true, true); { long numFound = 0; - TimeOut t = new TimeOut(REPLICATION_TIMEOUT_SECS, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut t = new TimeOut(REPLICATION_TIMEOUT_SECS, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (numFound < 20 && !t.hasTimedOut()) { Thread.sleep(200); numFound = @@ -266,7 +266,7 @@ public void testCloseHooksDeletedOnReconnect() throws Exception { private void assertNumDocs(int numDocs, SolrClient client, int timeoutSecs) throws InterruptedException, SolrServerException, IOException { - TimeOut t = new TimeOut(timeoutSecs, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut t = new TimeOut(timeoutSecs, TimeUnit.SECONDS, TimeSources.NANO_TIME); long numFound = -1; while (!t.hasTimedOut()) { Thread.sleep(200); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java index 20343487523..8261412bf2e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java @@ -40,7 +40,7 @@ import org.apache.solr.common.params.CollectionParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.ExecutorUtil; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.TimeOut; import org.apache.zookeeper.KeeperException; @@ -203,7 +203,7 @@ private void doTestSetArbitraryPropertySliceUnique(String propIn) // Fail if we have replicas with the preferredLeader property are _not_ also the leaders. private void checkPreferredsAreLeaders() throws InterruptedException, KeeperException { // Make sure that the shard unique are where you expect. - TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSources.NANO_TIME); while (timeout.hasTimedOut() == false) { if (checkPreferredsAreLeaders(false)) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java index d559de6d333..59e4282f6c8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java @@ -66,7 +66,7 @@ import org.apache.solr.common.params.CollectionParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.SolrCore; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.update.SolrIndexWriter; @@ -268,7 +268,7 @@ public void testAddDocs() throws Exception { assertEquals(1, leaderClient.query(new SolrQuery("*:*")).getResults().getNumFound()); } - TimeOut t = new TimeOut(REPLICATION_TIMEOUT_SECS, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut t = new TimeOut(REPLICATION_TIMEOUT_SECS, TimeUnit.SECONDS, TimeSources.NANO_TIME); for (Replica r : s.getReplicas(EnumSet.of(Replica.Type.TLOG))) { // TODO: assert replication < REPLICATION_TIMEOUT_SECS try (SolrClient tlogReplicaClient = getHttpSolrClient(r)) { @@ -608,7 +608,7 @@ public void testOnlyLeaderIndexes() throws Exception { waitForNumDocsInAllActiveReplicas(2); // There are a small delay between new searcher and copy over old updates operation - TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { if (assertCopyOverOldUpdates(1, timeCopyOverPerCores)) { break; @@ -1001,7 +1001,7 @@ private void waitForNumDocsInAllReplicas(int numDocs, Collection replic private void waitForNumDocsInAllReplicas( int numDocs, Collection replicas, String query, int timeout) throws IOException, SolrServerException, InterruptedException { - TimeOut t = new TimeOut(timeout, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut t = new TimeOut(timeout, TimeUnit.SECONDS, TimeSources.NANO_TIME); for (Replica r : replicas) { if (!r.isActive(cluster.getSolrClient().getClusterState().getLiveNodes())) { continue; diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java index ead1a49e6ea..47fcd506572 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java @@ -32,7 +32,7 @@ import java.util.function.Supplier; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.util.TimeOut; import org.junit.BeforeClass; import org.junit.Test; @@ -186,7 +186,7 @@ public void testRegisterTerm() throws InterruptedException { expectedTerms.put("rep1", 1L); expectedTerms.put("rep2", 1L); - TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, new TimeSource.CurrentTimeSource()); + TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, new TimeSources.CurrentTimeSource()); while (!timeOut.hasTimedOut()) { if (Objects.equals(expectedTerms, rep1Terms.getTerms()) && Objects.equals(expectedTerms, rep2Terms.getTerms())) break; @@ -239,7 +239,7 @@ public void testRaceConditionOnUpdates() throws InterruptedException { long maxTerm = 0; try (ZkShardTerms shardTerms = new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { shardTerms.registerTerm("leader"); - TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, new TimeSource.CurrentTimeSource()); + TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, new TimeSources.CurrentTimeSource()); while (!timeOut.hasTimedOut()) { maxTerm++; assertEquals( @@ -330,7 +330,7 @@ public void testSetTermEqualsToLeader() throws InterruptedException { } private void waitFor(T expected, Supplier supplier) throws InterruptedException { - TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, new TimeSource.CurrentTimeSource()); + TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, new TimeSources.CurrentTimeSource()); while (!timeOut.hasTimedOut()) { if (expected == supplier.get()) return; Thread.sleep(100); diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentDeleteAndCreateCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentDeleteAndCreateCollectionTest.java index b87d9af578d..55060b406b9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentDeleteAndCreateCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentDeleteAndCreateCollectionTest.java @@ -28,7 +28,7 @@ import org.apache.solr.client.solrj.response.CollectionAdminResponse; import org.apache.solr.cloud.MiniSolrCloudCluster; import org.apache.solr.common.util.IOUtils; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.embedded.JettyConfig; import org.apache.solr.util.TimeOut; import org.junit.After; @@ -148,7 +148,7 @@ public CreateDeleteCollectionThread( @Override public void run() { - final TimeOut timeout = new TimeOut(timeToRunSec, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(timeToRunSec, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut() && failure.get() == null) { doWork(); } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java index 7e610b2c2da..1a2b0dd096c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java @@ -66,7 +66,7 @@ import org.apache.solr.common.params.CollectionParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.update.SolrIndexSplitter; @@ -705,7 +705,7 @@ public void testSplitLocking() throws Exception { Thread t = new Thread(r); t.start(); // wait for the split to start executing - TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { timeOut.sleep(500); if (ZkStateReader.from(cloudClient).getZkClient().exists(path, true)) { @@ -729,7 +729,7 @@ public void testSplitLocking() throws Exception { ZkStateReader.from(cloudClient).getZkClient().exists(path, true)); // let the first split proceed TestInjection.splitLatch.countDown(); - timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { timeOut.sleep(500); if (!ZkStateReader.from(cloudClient).getZkClient().exists(path, true)) { diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/SimpleCollectionCreateDeleteTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/SimpleCollectionCreateDeleteTest.java index 626c7a4dc0f..c83b9ea9a6a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/SimpleCollectionCreateDeleteTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/SimpleCollectionCreateDeleteTest.java @@ -32,7 +32,7 @@ import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.core.CoreDescriptor; import org.apache.solr.core.SolrCore; @@ -78,7 +78,7 @@ public void testCreateAndDeleteThenCreateAgain() throws Exception { getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); // currently, removing a collection does not wait for cores to be unloaded - TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (true) { if (timeout.hasTimedOut()) { diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java index 3d0d522c973..1136c64bf1b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java @@ -55,7 +55,7 @@ import org.apache.solr.common.util.CommonTestInjection; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.common.util.ZLibCompressor; import org.apache.solr.handler.admin.ConfigSetsHandler; @@ -209,7 +209,7 @@ public void testCollectionStateWatcherCaching() throws Exception { writer.writePendingUpdates(); boolean found = false; - TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { DocCollection c1 = reader.getClusterState().getCollection("c1"); if ("y".equals(c1.getStr("x"))) { @@ -297,7 +297,7 @@ public void testNodeVersion() throws Exception { // inserted reader.registerCore("c1"); - TimeOut timeOut = new TimeOut(5000, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(5000, TimeUnit.MILLISECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "Timeout on waiting for c1 to show up in cluster state", () -> reader.getClusterState().getCollectionOrNull("c1") != null); @@ -680,7 +680,7 @@ public void testWatchRaceCondition() throws Exception { // cluster state might not be updated right the way from the removeDocCollectionWatcher call // above as org.apache.solr.common.cloud.ZkStateReader.Notification might remove the watcher // as well and might still be in the middle of updating the cluster state. - TimeOut timeOut = new TimeOut(2000, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(2000, TimeUnit.MILLISECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "The ref is not lazily loaded after waiting", () -> reader.getClusterState().getCollectionRef("c1").isLazilyLoaded()); @@ -803,7 +803,7 @@ public void testDeletePrsCollection() throws Exception { writer.enqueueUpdate(clusterState, Collections.singletonList(wc), null); clusterState = writer.writePendingUpdates(); - TimeOut timeOut = new TimeOut(5000, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(5000, TimeUnit.MILLISECONDS, TimeSources.NANO_TIME); timeOut.waitFor( "Timeout on waiting for c1 to show up in cluster state", () -> reader.getClusterState().getCollectionOrNull(collectionName) != null); diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java index b34712c2969..bde19a3c218 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java @@ -41,7 +41,7 @@ import org.apache.solr.common.LinkedHashMapWriter; import org.apache.solr.common.MapWriter; import org.apache.solr.common.util.StrUtils; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.common.util.ValidatingJsonMap; import org.apache.solr.handler.DumpRequestHandler; @@ -718,7 +718,7 @@ public static LinkedHashMapWriter testForResponseElement( boolean success = false; LinkedHashMapWriter m = null; - TimeOut timeOut = new TimeOut(maxTimeoutSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(maxTimeoutSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { try { m = diff --git a/solr/core/src/test/org/apache/solr/handler/RequestLoggingTest.java b/solr/core/src/test/org/apache/solr/handler/RequestLoggingTest.java index 7ec8cb7385f..afccb39e440 100644 --- a/solr/core/src/test/org/apache/solr/handler/RequestLoggingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/RequestLoggingTest.java @@ -30,7 +30,7 @@ import org.apache.logging.log4j.core.layout.PatternLayout; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.SuppressForbidden; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.SolrCore; import org.apache.solr.util.TimeOut; import org.junit.Before; @@ -86,7 +86,7 @@ public void testLogBeforeExecute(Logger logger) throws InterruptedException { try { assertQ(req("q", "*:*")); - TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); boolean found = false; Matcher matcher; String pat = "DEBUG.*q=\\*:\\*.*"; diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java index 8f424e97803..1eccd17d60c 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java @@ -65,7 +65,7 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CachingDirectoryFactory; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.SolrCore; @@ -713,7 +713,7 @@ public void doTestIndexFetchOnLeaderRestart() throws Exception { leaderJetty.stop(); final TimeOut waitForLeaderToShutdown = - new TimeOut(300, TimeUnit.SECONDS, TimeSource.NANO_TIME); + new TimeOut(300, TimeUnit.SECONDS, TimeSources.NANO_TIME); waitForLeaderToShutdown.waitFor( "Gave up after waiting an obscene amount of time for leader to shut down", () -> leaderJetty.isStopped()); @@ -752,7 +752,7 @@ public void doTestIndexFetchOnLeaderRestart() throws Exception { leaderJetty.start(); - final TimeOut waitForLeaderToStart = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut waitForLeaderToStart = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); waitForLeaderToStart.waitFor( "Gave up after waiting an obscene amount of time for leader to start", () -> leaderJetty.isRunning()); @@ -1617,7 +1617,7 @@ public void testEmptyBackups() throws Exception { "name", backupName)) .setRequiresCollection(true); - final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); final SimpleSolrResponse rsp = req.process(leaderClient); final String dirName = backupStatus.waitForBackupSuccess(backupName, timeout); @@ -1646,7 +1646,7 @@ public void testEmptyBackups() throws Exception { "name", backupName)) .setRequiresCollection(true); - final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); final SimpleSolrResponse rsp = req.process(leaderClient); final String dirName = backupStatus.waitForBackupSuccess(backupName, timeout); diff --git a/solr/core/src/test/org/apache/solr/handler/TestStressThreadBackup.java b/solr/core/src/test/org/apache/solr/handler/TestStressThreadBackup.java index 1817361d508..f64d9bb37d8 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestStressThreadBackup.java +++ b/solr/core/src/test/org/apache/solr/handler/TestStressThreadBackup.java @@ -49,7 +49,7 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.UpdateParams; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.util.TimeOut; import org.junit.After; import org.junit.AfterClass; @@ -142,7 +142,7 @@ private GenericSolrRequest makeReplicationReq(SolrParams p) { */ @Override public void makeBackup(final String backupName, final String snapName) throws Exception { - final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); ModifiableSolrParams p = params( "command", diff --git a/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java b/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java index b7f82ac616b..a2112658a18 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java @@ -32,7 +32,7 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.cloud.MiniSolrCloudCluster; import org.apache.solr.common.params.SolrParams; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.embedded.JettyConfig; import org.apache.solr.util.TimeOut; import org.junit.After; @@ -208,7 +208,7 @@ public void testAPIs() throws IOException, SolrServerException, InterruptedExcep // There can be some delay while threads stabilize, so we need to loop; private void checkAlive(String daemonName) throws InterruptedException, IOException { - TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (timeout.hasTimedOut() == false) { Tuple tuple = getTupleOfInterest(params("qt", "/stream", "action", "list"), daemonName); @@ -228,7 +228,7 @@ private void checkAlive(String daemonName) throws InterruptedException, IOExcept // a // stopped thread should be "TERMINATED" private void checkStopped() throws InterruptedException, IOException { - TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (timeout.hasTimedOut() == false) { Tuple tuple = getTupleOfInterest(params("qt", "/stream", "action", "list"), daemonOfInterest); @@ -241,7 +241,7 @@ private void checkStopped() throws InterruptedException, IOException { } private void checkDaemonKilled(String daemon) throws IOException, InterruptedException { - TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (timeout.hasTimedOut() == false) { List tuples = getTuples(params("qt", "/stream", "action", "list")); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java index 5523ad23c15..6be62828038 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java @@ -40,7 +40,7 @@ import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.SolrCore; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.search.SolrIndexSearcher; @@ -277,7 +277,7 @@ private static SolrInputDocument addDocs(String collection, int n) throws Except solrClient.request(ureq, collection); solrClient.commit(collection); // verify the number of docs - TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { QueryResponse rsp = solrClient.query(collection, params("q", "*:*", "rows", "0")); if (rsp.getResults().getNumFound() == n) { diff --git a/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java b/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java index 34cd05e8cbd..9d3be68f38d 100644 --- a/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java +++ b/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java @@ -29,7 +29,7 @@ import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.response.BinaryQueryResponseWriter; @@ -75,7 +75,7 @@ public void testLog4jWatcher() throws InterruptedException, IOException { log.warn(msg); // Loop to give the logger time to process the async message and notify the new watcher. - TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); boolean foundNewMsg = false; boolean foundOldMessage = false; // In local testing this loop usually succeeds 1-2 tries, so it's not very expensive to loop. diff --git a/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java b/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java index ccff799d889..904bac6c8cd 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java +++ b/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java @@ -35,7 +35,7 @@ import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.common.util.StrUtils; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.util.RestTestHarness; import org.apache.solr.util.TimeOut; @@ -172,7 +172,7 @@ private void invokeBulkAddCall(int seed, List errs) throws Exception { Set errmessages = new HashSet<>(); // don't close harness - gets closed at teardown RestTestHarness harness = randomRestTestHarness(r); - TimeOut timeout = new TimeOut(TIMEOUT, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(TIMEOUT, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { errmessages.clear(); Map m = getObj(harness, aField, "fields"); @@ -252,7 +252,7 @@ private void invokeBulkReplaceCall(int seed, List errs) throws Exception Set errmessages = new HashSet<>(); // don't close harness - gets closed at teardown RestTestHarness harness = randomRestTestHarness(r); - TimeOut timeout = new TimeOut(TIMEOUT, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(TIMEOUT, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { errmessages.clear(); Map m = getObj(harness, aField, "fields"); @@ -325,7 +325,7 @@ private void invokeBulkDeleteCall(int seed, List errs) throws Exception Set errmessages = new HashSet<>(); // don't close harness - gets closed at teardown RestTestHarness harness = randomRestTestHarness(r); - TimeOut timeout = new TimeOut(TIMEOUT, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(TIMEOUT, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { errmessages.clear(); Map m = getObj(harness, aField, "fields"); diff --git a/solr/core/src/test/org/apache/solr/search/TestRecovery.java b/solr/core/src/test/org/apache/solr/search/TestRecovery.java index 6cb6747c716..029dceb8810 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRecovery.java +++ b/solr/core/src/test/org/apache/solr/search/TestRecovery.java @@ -42,7 +42,7 @@ import java.util.concurrent.TimeUnit; import org.apache.lucene.tests.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.request.SolrQueryRequest; @@ -1234,7 +1234,7 @@ public void testExistOldBufferLog() throws Exception { ulog.bufferUpdates(); ulog.applyBufferedUpdates(); - TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeout.waitFor( "Timeout waiting for finish replay updates", () -> h.getCore().getUpdateHandler().getUpdateLog().getState() == UpdateLog.State.ACTIVE); @@ -1259,7 +1259,7 @@ public void testExistOldBufferLog() throws Exception { // Timeout for Q7 get replayed, because it was added on tlog, therefore it will be replayed on // restart - timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeout.waitFor( "Timeout waiting for finish replay updates", () -> h.getCore().getUpdateHandler().getUpdateLog().getState() == UpdateLog.State.ACTIVE); diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java index 281c6475611..3cbf546dbc9 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java @@ -60,7 +60,7 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.LogLevel; @@ -453,7 +453,7 @@ public static void executeCommand( Utils.consumeFully(r.getEntity()); // HACK (continued)... - final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeout.waitFor( "core containers never fully updated their auth plugins", () -> { diff --git a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerWithUpdateLogTest.java b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerWithUpdateLogTest.java index 49a3be19697..f66e6dc3864 100644 --- a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerWithUpdateLogTest.java +++ b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerWithUpdateLogTest.java @@ -21,7 +21,7 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.index.IndexWriter; import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.SolrCore; import org.apache.solr.util.LogLevel; import org.apache.solr.util.TimeOut; @@ -71,7 +71,7 @@ public void testShouldCommitHook() throws Exception { assertU(adoc("id", "3")); h.close(); // Then the shouldCommit hook is called. - new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME) + new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME) .waitFor( "Timeout waiting for should commit hook", () -> updater.shouldCommitCallCount.get() == 4); diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java index fe75428c695..cebd1700a64 100644 --- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java +++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java @@ -57,7 +57,7 @@ import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SolrNamedThreadFactory; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.index.NoMergePolicyFactory; import org.apache.solr.update.processor.DistributedUpdateProcessor; @@ -1276,7 +1276,7 @@ private void delayedReorderingFetchesMissingUpdateFromLeaderTest() throws Except } for (SolrClient client : clients) { - TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); try { timeout.waitFor( "Timeout", diff --git a/solr/modules/hdfs/src/test/org/apache/solr/hdfs/search/TestRecoveryHdfs.java b/solr/modules/hdfs/src/test/org/apache/solr/hdfs/search/TestRecoveryHdfs.java index 3a382b13fed..ef2f970a41e 100644 --- a/solr/modules/hdfs/src/test/org/apache/solr/hdfs/search/TestRecoveryHdfs.java +++ b/solr/modules/hdfs/src/test/org/apache/solr/hdfs/search/TestRecoveryHdfs.java @@ -44,7 +44,7 @@ import org.apache.solr.SolrIgnoredThreadsFilter; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.IOUtils; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.hdfs.cloud.HdfsTestUtil; import org.apache.solr.hdfs.update.HdfsUpdateLog; @@ -626,7 +626,7 @@ public void testExistOldBufferLog() throws Exception { assertFalse(ulog.existOldBufferLog()); // Timeout for Q7 get replayed, because it was added on tlog, therefore it will be replayed on // restart - TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeout.waitFor( "Timeout waiting for finish replay updates", () -> h.getCore().getUpdateHandler().getUpdateLog().getState() == UpdateLog.State.ACTIVE); diff --git a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginIntegrationTest.java b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginIntegrationTest.java index 3bb08460218..933a0b7124f 100644 --- a/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginIntegrationTest.java +++ b/solr/modules/jwt-auth/src/test/org/apache/solr/security/jwt/JWTAuthPluginIntegrationTest.java @@ -61,7 +61,7 @@ import org.apache.solr.cloud.SolrCloudAuthTestCase; import org.apache.solr.common.SolrException; import org.apache.solr.common.util.Pair; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.util.CryptoKeys; import org.apache.solr.util.RTimer; @@ -479,7 +479,7 @@ private void executeCommand(String url, HttpClient cl, String payload, JsonWebSi Utils.consumeFully(r.getEntity()); // HACK (continued)... - final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeout.waitFor( "core containers never fully updated their auth plugins", () -> { diff --git a/solr/modules/scripting/src/java/org/apache/solr/scripting/xslt/TransformerProvider.java b/solr/modules/scripting/src/java/org/apache/solr/scripting/xslt/TransformerProvider.java index 390fc2ceb77..f11f3db321a 100644 --- a/solr/modules/scripting/src/java/org/apache/solr/scripting/xslt/TransformerProvider.java +++ b/solr/modules/scripting/src/java/org/apache/solr/scripting/xslt/TransformerProvider.java @@ -29,7 +29,7 @@ import javax.xml.transform.stream.StreamSource; import org.apache.lucene.util.ResourceLoader; import org.apache.solr.common.util.IOUtils; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.XMLErrorLogger; import org.apache.solr.core.SolrConfig; import org.apache.solr.request.SolrQueryRequest; @@ -149,7 +149,8 @@ private Templates getTemplates(ResourceLoader loader, String filename, int cache lastFilename = filename; lastTemplates = result; - cacheExpiresTimeout = new TimeOut(cacheLifetimeSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + cacheExpiresTimeout = + new TimeOut(cacheLifetimeSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); return result; } diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java index 799330fbd0e..ce635d44c78 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java @@ -37,7 +37,7 @@ import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.security.BasicAuthPlugin; import org.apache.solr.security.RuleBasedAuthorizationPlugin; @@ -526,7 +526,7 @@ public void testDaemonUpdateStream() throws Exception { try { // We have to poll the daemon 'list' to know once it's run... long iterations = 0; - final TimeOut timeout = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(60, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { final SolrStream daemonCheck = new SolrStream( @@ -590,7 +590,7 @@ public void testDaemonUpdateStreamInsufficientCredentials() throws Exception { try { // We have to poll the daemon 'list' to know once it's run / terminated... Object state = null; - final TimeOut timeout = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(60, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { final SolrStream daemonCheck = new SolrStream( diff --git a/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/cloud/DelegatingCloudManager.java b/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/cloud/DelegatingCloudManager.java index c259ac3a29c..9eb801645c3 100644 --- a/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/cloud/DelegatingCloudManager.java +++ b/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/cloud/DelegatingCloudManager.java @@ -22,12 +22,13 @@ import org.apache.solr.client.solrj.impl.ClusterStateProvider; import org.apache.solr.common.util.ObjectCache; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; /** Base class for overriding some behavior of {@link SolrCloudManager}. */ public class DelegatingCloudManager implements SolrCloudManager { protected final SolrCloudManager delegate; private ObjectCache objectCache = new ObjectCache(); - private TimeSource timeSource = TimeSource.NANO_TIME; + private TimeSource timeSource = TimeSources.NANO_TIME; public DelegatingCloudManager(SolrCloudManager delegate) { this.delegate = delegate; diff --git a/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/impl/SolrClientCloudManager.java b/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/impl/SolrClientCloudManager.java index 3dca5176aa1..305115e7fb3 100644 --- a/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/impl/SolrClientCloudManager.java +++ b/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/impl/SolrClientCloudManager.java @@ -30,6 +30,7 @@ import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.ObjectCache; import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,7 +81,7 @@ public ObjectCache getObjectCache() { @Override public TimeSource getTimeSource() { - return TimeSource.NANO_TIME; + return TimeSources.NANO_TIME; } @Override diff --git a/solr/solrj/src/java/org/apache/solr/common/util/TimeSource.java b/solr/solrj/src/java/org/apache/solr/common/util/TimeSource.java index f5313288dc9..cea65ce6bea 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/TimeSource.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/TimeSource.java @@ -16,12 +16,7 @@ */ package org.apache.solr.common.util; -import java.lang.invoke.MethodHandles; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Source of time. @@ -31,12 +26,14 @@ * always monotonically increasing. */ public abstract class TimeSource { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); /** * Implementation that uses {@link System#currentTimeMillis()}. This implementation's {@link * #getTimeNs()} returns the same values as {@link #getEpochTimeNs()}. + * + * @deprecated Use {@link TimeSources.CurrentTimeSource} instead. */ + @Deprecated(since = "9.8", forRemoval = true) public static final class CurrentTimeSource extends TimeSource { @Override @@ -71,13 +68,16 @@ public long convertDelay(TimeUnit fromUnit, long value, TimeUnit toUnit) { * Implementation that uses {@link System#nanoTime()}. Epoch time is initialized using {@link * CurrentTimeSource}, and then calculated as the elapsed number of nanoseconds as measured by * this implementation. + * + * @deprecated Use {@link TimeSources.NanoTimeSource} instead. */ + @Deprecated(since = "9.8", forRemoval = true) public static final class NanoTimeSource extends TimeSource { private final long epochStart; private final long nanoStart; public NanoTimeSource() { - epochStart = CURRENT_TIME.getTimeNs(); + epochStart = TimeSources.CURRENT_TIME.getTimeNs(); nanoStart = System.nanoTime(); } @@ -108,7 +108,12 @@ public long convertDelay(TimeUnit fromUnit, long value, TimeUnit toUnit) { } } - /** Implementation that uses {@link #NANO_TIME} accelerated by a double multiplier. */ + /** + * Implementation that uses {@link TimeSources#NANO_TIME} accelerated by a double multiplier. + * + * @deprecated Use {@link TimeSources.SimTimeSource} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) public static final class SimTimeSource extends TimeSource { final double multiplier; @@ -122,13 +127,14 @@ public static final class SimTimeSource extends TimeSource { */ public SimTimeSource(double multiplier) { this.multiplier = multiplier; - epochStart = CURRENT_TIME.getTimeNs(); - nanoStart = NANO_TIME.getTimeNs(); + epochStart = TimeSources.CURRENT_TIME.getTimeNs(); + nanoStart = TimeSources.NANO_TIME.getTimeNs(); } @Override public long getTimeNs() { - return nanoStart + Math.round((double) (NANO_TIME.getTimeNs() - nanoStart) * multiplier); + return nanoStart + + Math.round((double) (TimeSources.NANO_TIME.getTimeNs() - nanoStart) * multiplier); } @Override @@ -160,13 +166,21 @@ public String toString() { } } - /** This instance uses {@link CurrentTimeSource} for generating timestamps. */ - public static final TimeSource CURRENT_TIME = new CurrentTimeSource(); - - /** This instance uses {@link NanoTimeSource} for generating timestamps. */ - public static final TimeSource NANO_TIME = new NanoTimeSource(); + /** + * This instance uses {@link CurrentTimeSource} for generating timestamps. + * + * @deprecated Use {@link TimeSources#CURRENT_TIME} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) + public static final TimeSource CURRENT_TIME = TimeSources.CURRENT_TIME; - private static Map simTimeSources = new ConcurrentHashMap<>(); + /** + * This instance uses {@link NanoTimeSource} for generating timestamps. + * + * @deprecated Use {@link TimeSources#CURRENT_TIME} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) + public static final TimeSource NANO_TIME = TimeSources.NANO_TIME; /** * Obtain an instance of time source. @@ -176,34 +190,11 @@ public String toString() { * simTime:2.5 * * @return one of the supported types + * @deprecated Use {@link TimeSources#get(String)} instead. */ + @Deprecated(since = "9.8", forRemoval = true) public static TimeSource get(String type) { - if (type == null) { - return NANO_TIME; - } else if (type.equals("currentTime") || type.equals(CurrentTimeSource.class.getSimpleName())) { - return CURRENT_TIME; - } else if (type.equals("nanoTime") || type.equals(NanoTimeSource.class.getSimpleName())) { - return NANO_TIME; - } else if (type.startsWith("simTime") || type.startsWith(SimTimeSource.class.getSimpleName())) { - return simTimeSources.computeIfAbsent( - type, - t -> { - String[] parts = t.split(":"); - double mul = 1.0; - if (parts.length != 2) { - log.warn("Invalid simTime specification, assuming multiplier==1.0: '{}'.", type); - } else { - try { - mul = Double.parseDouble(parts[1]); - } catch (Exception e) { - log.warn("Invalid simTime specification, assuming multiplier==1.0: '{}'.", type); - } - } - return new SimTimeSource(mul); - }); - } else { - throw new UnsupportedOperationException("Unsupported time source type '" + type + "'."); - } + return TimeSources.get(type); } /** diff --git a/solr/solrj/src/java/org/apache/solr/common/util/TimeSources.java b/solr/solrj/src/java/org/apache/solr/common/util/TimeSources.java new file mode 100644 index 00000000000..f8398f0c7df --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/common/util/TimeSources.java @@ -0,0 +1,208 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.common.util; + +import java.lang.invoke.MethodHandles; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TimeSources { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private TimeSources() {} + + /** This instance uses {@link TimeSources.CurrentTimeSource} for generating timestamps. */ + public static final TimeSource CURRENT_TIME = new TimeSources.CurrentTimeSource(); + + /** This instance uses {@link TimeSources.NanoTimeSource} for generating timestamps. */ + public static final TimeSource NANO_TIME = new TimeSources.NanoTimeSource(); + + private static final Map simTimeSources = new ConcurrentHashMap<>(); + + /** + * Obtain an instance of time source. + * + * @param type supported types: currentTime, nanoTime and accelerated + * time with a double factor in the form of simTime:FACTOR, e.g. + * simTime:2.5 + * + * @return one of the supported types + */ + public static TimeSource get(String type) { + if (type == null) { + return TimeSources.NANO_TIME; + } else if (type.equals("currentTime") + || type.equals(TimeSources.CurrentTimeSource.class.getSimpleName())) { + return TimeSources.CURRENT_TIME; + } else if (type.equals("nanoTime") + || type.equals(TimeSources.NanoTimeSource.class.getSimpleName())) { + return TimeSources.NANO_TIME; + } else if (type.startsWith("simTime") + || type.startsWith(TimeSources.SimTimeSource.class.getSimpleName())) { + return simTimeSources.computeIfAbsent( + type, + t -> { + String[] parts = t.split(":"); + double mul = 1.0; + if (parts.length != 2) { + log.warn("Invalid simTime specification, assuming multiplier==1.0: '{}'.", type); + } else { + try { + mul = Double.parseDouble(parts[1]); + } catch (Exception e) { + log.warn("Invalid simTime specification, assuming multiplier==1.0: '{}'.", type); + } + } + return new TimeSources.SimTimeSource(mul); + }); + } else { + throw new UnsupportedOperationException("Unsupported time source type '" + type + "'."); + } + } + + /** + * Implementation that uses {@link System#currentTimeMillis()}. This implementation's {@link + * #getTimeNs()} returns the same values as {@link #getEpochTimeNs()}. + */ + public static final class CurrentTimeSource extends TimeSource { + + @Override + @SuppressForbidden(reason = "Needed to provide timestamps based on currentTimeMillis.") + public long getTimeNs() { + return TimeUnit.NANOSECONDS.convert(System.currentTimeMillis(), TimeUnit.MILLISECONDS); + } + + @Override + public long getEpochTimeNs() { + return getTimeNs(); + } + + @Override + public long[] getTimeAndEpochNs() { + long time = getTimeNs(); + return new long[] {time, time}; + } + + @Override + public void sleep(long ms) throws InterruptedException { + Thread.sleep(ms); + } + + @Override + public long convertDelay(TimeUnit fromUnit, long value, TimeUnit toUnit) { + return toUnit.convert(value, fromUnit); + } + } + + /** + * Implementation that uses {@link System#nanoTime()}. Epoch time is initialized using {@link + * TimeSources.CurrentTimeSource}, and then calculated as the elapsed number of nanoseconds as + * measured by this implementation. + */ + public static final class NanoTimeSource extends TimeSource { + private final long epochStart; + private final long nanoStart; + + public NanoTimeSource() { + epochStart = TimeSources.CURRENT_TIME.getTimeNs(); + nanoStart = System.nanoTime(); + } + + @Override + public long getTimeNs() { + return System.nanoTime(); + } + + @Override + public long getEpochTimeNs() { + return epochStart + getTimeNs() - nanoStart; + } + + @Override + public long[] getTimeAndEpochNs() { + long time = getTimeNs(); + return new long[] {time, epochStart + time - nanoStart}; + } + + @Override + public void sleep(long ms) throws InterruptedException { + Thread.sleep(ms); + } + + @Override + public long convertDelay(TimeUnit fromUnit, long value, TimeUnit toUnit) { + return toUnit.convert(value, fromUnit); + } + } + + /** Implementation that uses {@link TimeSources#NANO_TIME} accelerated by a double multiplier. */ + public static final class SimTimeSource extends TimeSource { + + final double multiplier; + final long nanoStart; + final long epochStart; + + /** + * Create a simulated time source that runs faster than real time by a multiplier. + * + * @param multiplier must be greater than 0.0 + */ + public SimTimeSource(double multiplier) { + this.multiplier = multiplier; + epochStart = TimeSources.CURRENT_TIME.getTimeNs(); + nanoStart = TimeSources.NANO_TIME.getTimeNs(); + } + + @Override + public long getTimeNs() { + return nanoStart + + Math.round((double) (TimeSources.NANO_TIME.getTimeNs() - nanoStart) * multiplier); + } + + @Override + public long getEpochTimeNs() { + return epochStart + getTimeNs() - nanoStart; + } + + @Override + public long[] getTimeAndEpochNs() { + long time = getTimeNs(); + return new long[] {time, epochStart + time - nanoStart}; + } + + @Override + public void sleep(long ms) throws InterruptedException { + ms = Math.round((double) ms / multiplier); + Thread.sleep(ms); + } + + @Override + public long convertDelay(TimeUnit fromUnit, long value, TimeUnit toUnit) { + long nano = Math.round((double) TimeUnit.NANOSECONDS.convert(value, fromUnit) / multiplier); + return toUnit.convert(nano, TimeUnit.NANOSECONDS); + } + + @Override + public String toString() { + return super.toString() + ":" + multiplier; + } + } +} diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java index 4f0fc0ed374..8322c96bb4f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java @@ -29,7 +29,7 @@ import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.CommonParams; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.util.TimeOut; import org.junit.After; import org.junit.Test; @@ -175,7 +175,7 @@ public void testCommitWithinOnDelete() throws Exception { assertEquals(1, rsp.getResults().getNumFound()); // check if the doc has been deleted every 250 ms for 30 seconds - TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); do { Thread.sleep(250); // wait 250 ms diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/TestLBHttpSolrClient.java b/solr/solrj/src/test/org/apache/solr/client/solrj/TestLBHttpSolrClient.java index 331330e3fdb..349d801b706 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/TestLBHttpSolrClient.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/TestLBHttpSolrClient.java @@ -36,7 +36,7 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.SolrResponseBase; import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.embedded.JettyConfig; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.TimeOut; @@ -242,7 +242,7 @@ public void testReliability() throws Exception { // wait maximum ms for serverName to come back up private void waitForServer( int maxSeconds, LBHttpSolrClient client, int nServers, String serverName) throws Exception { - final TimeOut timeout = new TimeOut(maxSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(maxSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { QueryResponse resp; try { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientIntegrationTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientIntegrationTest.java index a4cf3292e81..af935abd94b 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientIntegrationTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientIntegrationTest.java @@ -36,7 +36,7 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.SolrResponseBase; import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.embedded.JettyConfig; import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.util.TimeOut; @@ -238,7 +238,7 @@ public void testReliability() throws Exception { private void waitForServer( int maxSeconds, LBHttp2SolrClient client, int nServers, String serverName) throws Exception { - final TimeOut timeout = new TimeOut(maxSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(maxSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { QueryResponse resp; try { diff --git a/solr/solrj/src/test/org/apache/solr/common/util/ExecutorUtilTest.java b/solr/solrj/src/test/org/apache/solr/common/util/ExecutorUtilTest.java index f9d6026edd0..144648e0678 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/ExecutorUtilTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/ExecutorUtilTest.java @@ -198,7 +198,7 @@ public void tellWorkerToFinish() { public Boolean call() { // absolute last resort timeout to prevent infinite while loop final TimeOut threadTimeout = - new TimeOut(MAX_SANE_WAIT_DURATION_MS, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + new TimeOut(MAX_SANE_WAIT_DURATION_MS, TimeUnit.MILLISECONDS, TimeSources.NANO_TIME); while (!threadTimeout.hasTimedOut()) { try { diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestTimeSource.java b/solr/solrj/src/test/org/apache/solr/common/util/TestTimeSource.java index 474ad66dd34..56a96aa81c2 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestTimeSource.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestTimeSource.java @@ -25,9 +25,9 @@ public class TestTimeSource extends SolrTestCaseJ4 { @Test public void testEpochTime() throws Exception { - doTestEpochTime(TimeSource.CURRENT_TIME); - doTestEpochTime(TimeSource.NANO_TIME); - doTestEpochTime(TimeSource.get("simTime:50")); + doTestEpochTime(TimeSources.CURRENT_TIME); + doTestEpochTime(TimeSources.NANO_TIME); + doTestEpochTime(TimeSources.get("simTime:50")); } private void doTestEpochTime(TimeSource ts) throws Exception { diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java index edbf20dc519..0ffafdd7a40 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java @@ -85,7 +85,7 @@ import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.common.util.StrUtils; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.common.util.Utils; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.Diagnostics; @@ -2347,7 +2347,7 @@ public static String getBaseUrlFromZk(ClusterState clusterState, String collecti public static void waitForNon403or404or503(SolrClient collectionClient, String baseUrl) throws Exception { SolrException exp = null; - final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { boolean missing = false; @@ -2805,7 +2805,7 @@ public static RequestStatusState getRequestStateAfterCompletion( String requestId, int waitForSeconds, SolrClient client) throws IOException, SolrServerException { RequestStatusState state = null; - final TimeOut timeout = new TimeOut(waitForSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + final TimeOut timeout = new TimeOut(waitForSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeout.hasTimedOut()) { state = getRequestState(requestId, client); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java index 805866dc855..1ecb2b27a73 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java @@ -40,7 +40,7 @@ import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.SolrCore; import org.apache.solr.embedded.JettySolrRunner; @@ -680,7 +680,7 @@ public static void start(List jettys) throws Exception { */ public static void wait(long runLength, String collectionName, ZkStateReader zkStateReader) throws InterruptedException { - TimeOut t = new TimeOut(runLength, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + TimeOut t = new TimeOut(runLength, TimeUnit.MILLISECONDS, TimeSources.NANO_TIME); while (!t.hasTimedOut()) { Thread.sleep(Math.min(1000, t.timeLeft(TimeUnit.MILLISECONDS))); logCollectionStateSummary(collectionName, zkStateReader); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java index c600c7e4ffa..d7d076dbfb4 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java @@ -76,7 +76,7 @@ import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.SolrNamedThreadFactory; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.TracerConfigurator; import org.apache.solr.embedded.JettyConfig; @@ -354,7 +354,7 @@ private void waitForAllNodes(int numServers, int timeoutSeconds) if (timeoutSeconds == 0) { timeoutSeconds = DEFAULT_TIMEOUT; } - TimeOut timeout = new TimeOut(timeoutSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(timeoutSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME); synchronized (startupWait) { while (numServers != (numRunning = numRunningJetty(getJettySolrRunners()))) { @@ -602,7 +602,7 @@ public void deleteAllCollections() throws Exception { // may be deleted, but may not be gone yet - we only wait to not see it in ZK, not for core // unloads - TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (true) { if (timeout.hasTimedOut()) { diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractCollectionsAPIDistributedZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractCollectionsAPIDistributedZkTestBase.java index d0e250e59c7..79f96c7d587 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractCollectionsAPIDistributedZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractCollectionsAPIDistributedZkTestBase.java @@ -61,7 +61,7 @@ import org.apache.solr.common.params.CoreAdminParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrInfoBean.Category; @@ -447,7 +447,7 @@ class Coll { .add("id", "8") .commit(cluster.getSolrClient(), collectionName); long numFound = 0; - TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSources.NANO_TIME); while (!timeOut.hasTimedOut()) { numFound = @@ -534,7 +534,7 @@ private void checkInstanceDirs(JettySolrRunner jetty) throws IOException { private boolean waitForReloads(String collectionName, Map urlToTimeBefore) throws SolrServerException, IOException { - TimeOut timeout = new TimeOut(45, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(45, TimeUnit.SECONDS, TimeSources.NANO_TIME); boolean allTimesAreCorrect = false; while (!timeout.hasTimedOut()) { diff --git a/solr/test-framework/src/java/org/apache/solr/embedded/JettySolrRunner.java b/solr/test-framework/src/java/org/apache/solr/embedded/JettySolrRunner.java index 9c033827241..0010e32d2c8 100644 --- a/solr/test-framework/src/java/org/apache/solr/embedded/JettySolrRunner.java +++ b/solr/test-framework/src/java/org/apache/solr/embedded/JettySolrRunner.java @@ -63,7 +63,7 @@ import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.SolrCore; import org.apache.solr.handler.admin.CoreAdminOperation; @@ -574,7 +574,7 @@ private void setProtocolAndHost() { } private void retryOnPortBindFailure(int portRetryTime, int port) throws Exception { - TimeOut timeout = new TimeOut(portRetryTime, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(portRetryTime, TimeUnit.SECONDS, TimeSources.NANO_TIME); int tryCnt = 1; while (true) { try { @@ -652,7 +652,7 @@ public synchronized void stop() throws Exception { rte.stop(); - TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSources.NANO_TIME); timeout.waitFor("Timeout waiting for reserved executor to stop.", rte::isStopped); } diff --git a/solr/test-framework/src/java/org/apache/solr/handler/BackupStatusChecker.java b/solr/test-framework/src/java/org/apache/solr/handler/BackupStatusChecker.java index a3e8571f8c1..a8756964897 100644 --- a/solr/test-framework/src/java/org/apache/solr/handler/BackupStatusChecker.java +++ b/solr/test-framework/src/java/org/apache/solr/handler/BackupStatusChecker.java @@ -27,7 +27,7 @@ import org.apache.solr.client.solrj.request.GenericSolrRequest; import org.apache.solr.client.solrj.response.SimpleSolrResponse; import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.TimeSource; +import org.apache.solr.common.util.TimeSources; import org.apache.solr.util.TimeOut; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +66,7 @@ public BackupStatusChecker(final SolrClient client) { public String waitForBackupSuccess(final String backupName, final int timeLimitInSeconds) throws Exception { return waitForBackupSuccess( - backupName, new TimeOut(timeLimitInSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME)); + backupName, new TimeOut(timeLimitInSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME)); } /** @@ -111,7 +111,7 @@ public String waitForDifferentBackupDir(final String directoryName, final int ti throws Exception { return waitForDifferentBackupDir( - directoryName, new TimeOut(timeLimitInSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME)); + directoryName, new TimeOut(timeLimitInSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME)); } /** @@ -221,7 +221,7 @@ private String _checkBackupSuccess(final String backupName) throws Exception { public void waitForBackupDeletionSuccess(final String backupName, final int timeLimitInSeconds) throws Exception { waitForBackupDeletionSuccess( - backupName, new TimeOut(timeLimitInSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME)); + backupName, new TimeOut(timeLimitInSeconds, TimeUnit.SECONDS, TimeSources.NANO_TIME)); } /** From 84c5ac4755f76fcc498139322172b9b55794694d Mon Sep 17 00:00:00 2001 From: Christos Malliaridis Date: Wed, 20 Nov 2024 22:44:09 +0100 Subject: [PATCH 3/7] Fix ClassInitializationDeadlock warning for DocRouter --- .../collections/CollectionHandlingUtils.java | 4 +- .../api/collections/CreateCollectionCmd.java | 4 +- .../api/collections/ReindexCollectionCmd.java | 3 +- .../cloud/api/collections/SplitShardCmd.java | 7 ++- .../cloud/overseer/ClusterStateMutator.java | 7 ++- .../apache/solr/handler/admin/SplitOp.java | 7 ++- .../solr/cloud/ClusterStateMockUtil.java | 4 +- .../solr/cloud/ClusterStateMockUtilTest.java | 8 +-- .../apache/solr/cloud/ClusterStateTest.java | 6 +- ...rseerCollectionConfigSetProcessorTest.java | 4 +- .../org/apache/solr/cloud/SliceStateTest.java | 4 +- .../solr/cloud/TestHashPartitioner.java | 13 +++-- .../cloud/overseer/ZkStateReaderTest.java | 34 ++++++------ .../cloud/overseer/ZkStateWriterTest.java | 10 ++-- .../org/apache/solr/core/CoreSorterTest.java | 4 +- .../solr/common/cloud/ClusterState.java | 6 +- .../apache/solr/common/cloud/DocRouter.java | 41 ++++++-------- .../apache/solr/common/cloud/DocRouters.java | 55 +++++++++++++++++++ .../apache/solr/common/cloud/RoutingRule.java | 2 +- .../org/apache/solr/common/cloud/Slice.java | 2 +- 20 files changed, 138 insertions(+), 87 deletions(-) create mode 100644 solr/solrj/src/java/org/apache/solr/common/cloud/DocRouters.java diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CollectionHandlingUtils.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CollectionHandlingUtils.java index 1816d96f5d7..9cb6ffbabb1 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CollectionHandlingUtils.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CollectionHandlingUtils.java @@ -51,7 +51,7 @@ import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocCollection.CollectionStateProps; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; @@ -113,7 +113,7 @@ private static Map makeCollectionPropsAndDefaults() { Map propsAndDefaults = Utils.makeMap( CollectionStateProps.DOC_ROUTER, - (Object) DocRouter.DEFAULT_NAME, + (Object) DocRouters.DEFAULT_NAME, CollectionStateProps.REPLICATION_FACTOR, "1", CollectionStateProps.PER_REPLICA_STATE, diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java index 5592303580a..b8f38e75510 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java @@ -64,7 +64,7 @@ import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocCollection.CollectionStateProps; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.ImplicitDocRouter; import org.apache.solr.common.cloud.PerReplicaStates; import org.apache.solr.common.cloud.PerReplicaStatesOps; @@ -144,7 +144,7 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList shardNames = populateShardNames(message, router); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java index 15324ec61aa..cb0eba2d26f 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java @@ -48,6 +48,7 @@ import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.ReplicaCount; import org.apache.solr.common.cloud.ZkNodeProps; @@ -255,7 +256,7 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList slice, String splitKey) { DocCollection collection = clusterState.getCollection(collectionName); - DocRouter router = collection.getRouter() != null ? collection.getRouter() : DocRouter.DEFAULT; + DocRouter router = collection.getRouter() != null ? collection.getRouter() : DocRouters.DEFAULT; Slice parentSlice; @@ -1076,7 +1077,7 @@ public static String fillRanges( if (range == null) { range = new PlainIdRouter().fullRange(); } - DocRouter router = collection.getRouter() != null ? collection.getRouter() : DocRouter.DEFAULT; + DocRouter router = collection.getRouter() != null ? collection.getRouter() : DocRouters.DEFAULT; if (rangesStr != null) { String[] ranges = rangesStr.split(","); if (ranges.length == 0 || ranges.length == 1) { @@ -1087,7 +1088,7 @@ public static String fillRanges( for (int i = 0; i < ranges.length; i++) { String r = ranges[i]; try { - subRanges.add(DocRouter.DEFAULT.fromString(r)); + subRanges.add(DocRouters.DEFAULT.fromString(r)); } catch (Exception e) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java index a5edff69aff..7672c212648 100644 --- a/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java +++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ClusterStateMutator.java @@ -34,6 +34,7 @@ import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocCollection.CollectionStateProps; import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.ImplicitDocRouter; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; @@ -67,8 +68,8 @@ public ZkWriteCommand createCollection(ClusterState clusterState, ZkNodeProps me Map routerSpec = DocRouter.getRouterSpec(message); String routerName = - routerSpec.get(NAME) == null ? DocRouter.DEFAULT_NAME : (String) routerSpec.get(NAME); - DocRouter router = DocRouter.getDocRouter(routerName); + routerSpec.get(NAME) == null ? DocRouters.DEFAULT_NAME : (String) routerSpec.get(NAME); + DocRouter router = DocRouters.getDocRouter(routerName); Object messageShardsObj = message.get("shards"); @@ -80,7 +81,7 @@ public ZkWriteCommand createCollection(ClusterState clusterState, ZkNodeProps me List shardNames = new ArrayList<>(); if (router instanceof ImplicitDocRouter) { - getShardNames(shardNames, message.getStr("shards", DocRouter.DEFAULT_NAME)); + getShardNames(shardNames, message.getStr("shards", DocRouters.DEFAULT_NAME)); } else { int numShards = message.getInt(ZkStateReader.NUM_SHARDS_PROP, -1); if (numShards < 1) diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SplitOp.java b/solr/core/src/java/org/apache/solr/handler/admin/SplitOp.java index e244f3e2543..38814e3d546 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/SplitOp.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/SplitOp.java @@ -43,6 +43,7 @@ import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocCollection.CollectionStateProps; import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.params.CommonAdminParams; import org.apache.solr.common.params.CoreAdminParams; @@ -104,7 +105,7 @@ public void execute(CoreAdminHandler.CallInfo it) throws Exception { ranges = new ArrayList<>(rangesArr.length); for (String r : rangesArr) { try { - ranges.add(DocRouter.DEFAULT.fromString(r)); + ranges.add(DocRouters.DEFAULT.fromString(r)); } catch (Exception e) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, @@ -151,7 +152,7 @@ public void execute(CoreAdminHandler.CallInfo it) throws Exception { DocCollection collection = clusterState.getCollection(collectionName); String sliceName = parentCore.getCoreDescriptor().getCloudDescriptor().getShardId(); Slice slice = collection.getSlice(sliceName); - router = collection.getRouter() != null ? collection.getRouter() : DocRouter.DEFAULT; + router = collection.getRouter() != null ? collection.getRouter() : DocRouters.DEFAULT; if (ranges == null) { DocRouter.Range currentRange = slice.getRange(); ranges = currentRange != null ? router.partitionRange(partitions, currentRange) : null; @@ -269,7 +270,7 @@ private void handleGetRanges(CoreAdminHandler.CallInfo it, String coreName) thro Slice slice = collection.getSlice(sliceName); CompositeIdRouter router = (CompositeIdRouter) - (collection.getRouter() != null ? collection.getRouter() : DocRouter.DEFAULT); + (collection.getRouter() != null ? collection.getRouter() : DocRouters.DEFAULT); DocRouter.Range currentRange = slice.getRange(); Object routerObj = diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java index 51a3c3263f5..2a500dba126 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java @@ -27,7 +27,7 @@ import java.util.regex.Pattern; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Replica.ReplicaStateProps; import org.apache.solr.common.cloud.Slice; @@ -133,7 +133,7 @@ public static ZkStateReader buildClusterState( collName = "collection" + (collectionStates.size() + 1), slices, collectionProps, - DocRouter.DEFAULT); + DocRouters.DEFAULT); collectionStates.put(docCollection.getName(), docCollection); break; case "s": diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtilTest.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtilTest.java index 72707e70f84..a723502ad2b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtilTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtilTest.java @@ -20,7 +20,7 @@ import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; @@ -39,7 +39,7 @@ public void testBuildClusterState_Simple() { assertEquals(1, clusterState.size()); DocCollection collection1 = clusterState.getCollectionOrNull("collection1"); assertNotNull(collection1); - assertEquals(DocRouter.DEFAULT, collection1.getRouter()); + assertEquals(DocRouters.DEFAULT, collection1.getRouter()); assertEquals(1, collection1.getActiveSlices().size()); assertEquals(1, collection1.getSlices().size()); Slice slice1 = collection1.getSlice("slice1"); @@ -65,7 +65,7 @@ public void testBuildClusterState_ReplicaTypes() { assertEquals(1, clusterState.size()); DocCollection collection1 = clusterState.getCollectionOrNull("collection1"); assertNotNull(collection1); - assertEquals(DocRouter.DEFAULT, collection1.getRouter()); + assertEquals(DocRouters.DEFAULT, collection1.getRouter()); assertEquals(1, collection1.getActiveSlices().size()); assertEquals(1, collection1.getSlices().size()); Slice slice1 = collection1.getSlice("slice1"); @@ -86,7 +86,7 @@ public void testBuildClusterState_ReplicaStateAndType() { assertEquals(1, clusterState.size()); DocCollection collection1 = clusterState.getCollectionOrNull("collection1"); assertNotNull(collection1); - assertEquals(DocRouter.DEFAULT, collection1.getRouter()); + assertEquals(DocRouters.DEFAULT, collection1.getRouter()); assertEquals(1, collection1.getActiveSlices().size()); assertEquals(1, collection1.getSlices().size()); Slice slice1 = collection1.getSlice("slice1"); diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java index e88193b4a3c..45785e9ada8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java @@ -24,7 +24,7 @@ import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; @@ -62,11 +62,11 @@ public void testStoreAndRead() { collectionStates.put( "collection1", DocCollection.create( - "collection1", slices, props, DocRouter.DEFAULT, 0, Instant.EPOCH, null)); + "collection1", slices, props, DocRouters.DEFAULT, 0, Instant.EPOCH, null)); collectionStates.put( "collection2", DocCollection.create( - "collection2", slices, props, DocRouter.DEFAULT, 0, Instant.EPOCH, null)); + "collection2", slices, props, DocRouters.DEFAULT, 0, Instant.EPOCH, null)); ClusterState clusterState = new ClusterState(liveNodes, collectionStates); assertFalse(clusterState.getCollection("collection1").getProperties().containsKey("shards")); diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java index 27d4ebd2a87..f01f046c012 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java @@ -61,7 +61,7 @@ import org.apache.solr.common.cloud.Aliases; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; @@ -687,7 +687,7 @@ private void handleCreateCollMessageProps(ZkNodeProps props) { collName, new HashMap<>(), props.getProperties(), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.EPOCH, distribStateManagerMock.getPrsSupplier(collName)))); diff --git a/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java b/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java index 64ec9f3c035..6e30f459a53 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java @@ -24,7 +24,7 @@ import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; @@ -56,7 +56,7 @@ public void testDefaultSliceState() { assertSame("Default state not set to active", Slice.State.ACTIVE, slice.getState()); slices.put("shard1", slice); collectionStates.put( - "collection1", new DocCollection("collection1", slices, props, DocRouter.DEFAULT)); + "collection1", new DocCollection("collection1", slices, props, DocRouters.DEFAULT)); ClusterState clusterState = new ClusterState(liveNodes, collectionStates); byte[] bytes = Utils.toJSON(clusterState); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java b/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java index af09e728a00..af28ad97930 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java @@ -28,6 +28,7 @@ import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocRouter; import org.apache.solr.common.cloud.DocRouter.Range; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.PlainIdRouter; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; @@ -38,7 +39,7 @@ public class TestHashPartitioner extends SolrTestCaseJ4 { public void testMapHashes() { - DocRouter hp = DocRouter.DEFAULT; + DocRouter hp = DocRouters.DEFAULT; List ranges; // make sure the partitioner uses the "natural" boundaries and doesn't suffer from an off-by-one @@ -118,7 +119,7 @@ public int hash(String id) { } public void testHashCodes() { - DocRouter router = DocRouter.getDocRouter(PlainIdRouter.NAME); + DocRouter router = DocRouters.getDocRouter(PlainIdRouter.NAME); assertTrue(router instanceof PlainIdRouter); DocCollection coll = createCollection(4, router); doNormalIdHashing(coll); @@ -161,9 +162,9 @@ public void doQuery(DocCollection coll, String id, String expectedShards) { } public void testCompositeHashCodes() { - DocRouter router = DocRouter.getDocRouter(CompositeIdRouter.NAME); + DocRouter router = DocRouters.getDocRouter(CompositeIdRouter.NAME); assertTrue(router instanceof CompositeIdRouter); - router = DocRouter.DEFAULT; + router = DocRouters.DEFAULT; assertTrue(router instanceof CompositeIdRouter); DocCollection coll = createCollection(4, router); @@ -203,7 +204,7 @@ public void testCompositeHashCodes() { /** Make sure CompositeIdRouter doesn't throw exceptions for non-conforming IDs */ public void testNonConformingCompositeIds() throws Exception { - DocRouter router = DocRouter.getDocRouter(CompositeIdRouter.NAME); + DocRouter router = DocRouters.getDocRouter(CompositeIdRouter.NAME); DocCollection coll = createCollection(4, router); String[] ids = { "A!B!C!D", @@ -229,7 +230,7 @@ public void testNonConformingCompositeIds() throws Exception { /** Make sure CompositeIdRouter can route random IDs without throwing exceptions */ public void testRandomCompositeIds() throws Exception { - DocRouter router = DocRouter.getDocRouter(CompositeIdRouter.NAME); + DocRouter router = DocRouters.getDocRouter(CompositeIdRouter.NAME); DocCollection coll = createCollection(TestUtil.nextInt(random(), 1, 10), router); StringBuilder idBuilder = new StringBuilder(); for (int i = 0; i < 10000; ++i) { diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java index 1136c64bf1b..a32fd0ade06 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java @@ -45,7 +45,7 @@ import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocCollectionWatcher; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.PerReplicaStates; import org.apache.solr.common.cloud.PerReplicaStatesOps; import org.apache.solr.common.cloud.Replica; @@ -151,7 +151,7 @@ public void testExternalCollectionWatchedNotWatched() throws Exception { "c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -179,7 +179,7 @@ public void testCollectionStateWatcherCaching() throws Exception { "c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -199,7 +199,7 @@ public void testCollectionStateWatcherCaching() throws Exception { "c1", new HashMap<>(), props, - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -241,7 +241,7 @@ public void testWatchedCollectionCreation() throws Exception { "c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -284,7 +284,7 @@ public void testNodeVersion() throws Exception { ConfigSetsHandler.DEFAULT_CONFIGSET_NAME, DocCollection.CollectionStateProps.PER_REPLICA_STATE, "true"), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -405,7 +405,7 @@ public void testForciblyRefreshAllClusterState() throws Exception { "c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -428,7 +428,7 @@ public void testForciblyRefreshAllClusterState() throws Exception { "c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, ref.get().getZNodeVersion(), Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -452,7 +452,7 @@ public void testForciblyRefreshAllClusterState() throws Exception { "c2", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -493,7 +493,7 @@ public void testForciblyRefreshAllClusterStateCompressed() throws Exception { "c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), null); @@ -515,7 +515,7 @@ public void testForciblyRefreshAllClusterStateCompressed() throws Exception { "c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, ref.get().getZNodeVersion(), Instant.now(), null); @@ -538,7 +538,7 @@ public void testForciblyRefreshAllClusterStateCompressed() throws Exception { "c2", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), null); @@ -575,7 +575,7 @@ public void testGetCurrentCollections() throws Exception { "c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -586,7 +586,7 @@ public void testGetCurrentCollections() throws Exception { "c2", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -642,7 +642,7 @@ public void testWatchRaceCondition() throws Exception { Map.of( ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, currentVersion, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -751,7 +751,7 @@ public void testStateWatcherRaceCondition() throws Exception { "c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT, + DocRouters.DEFAULT, dataVersion, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -794,7 +794,7 @@ public void testDeletePrsCollection() throws Exception { collectionName, Map.of(sliceName, slice), Collections.singletonMap(DocCollection.CollectionStateProps.PER_REPLICA_STATE, true), - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java index 4008f694359..5e1c8abbf0f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java @@ -33,7 +33,7 @@ import org.apache.solr.cloud.ZkTestServer; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.PerReplicaStatesOps; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; @@ -169,7 +169,7 @@ public void testZkStateWriterPendingAndNonBatchedTimeExceeded() throws Exception "prs1", new HashMap<>(), prsProps, - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -244,7 +244,7 @@ public void testZkStateWriterPendingAndNonBatchedBatchSizeExceeded() throws Exce "prs1", new HashMap<>(), prsProps, - DocRouter.DEFAULT, + DocRouters.DEFAULT, 0, Instant.now(), PerReplicaStatesOps.getZkClientPrsSupplier( @@ -502,7 +502,7 @@ public void testSingleExternalCollectionCompressedState() throws Exception { new ZkWriteCommand( "c2", DocCollection.create( - "c2", slices, new HashMap<>(), DocRouter.DEFAULT, 0, Instant.now(), null)); + "c2", slices, new HashMap<>(), DocRouters.DEFAULT, 0, Instant.now(), null)); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); writer.writePendingUpdates(); @@ -525,6 +525,6 @@ public void testSingleExternalCollectionCompressedState() throws Exception { private DocCollection createDocCollection(String name, Map props) { return DocCollection.create( - name, new HashMap<>(), props, DocRouter.DEFAULT, 0, Instant.now(), null); + name, new HashMap<>(), props, DocRouters.DEFAULT, 0, Instant.now(), null); } } diff --git a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java index bbb55825445..c087733249f 100644 --- a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java +++ b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java @@ -33,7 +33,7 @@ import org.apache.solr.cloud.ZkController; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.DocRouter; +import org.apache.solr.common.cloud.DocRouters; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; @@ -146,7 +146,7 @@ public void integrationTest() { sliceMap, Collections.singletonMap( ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), - DocRouter.DEFAULT); + DocRouters.DEFAULT); collToState.put(collection, col); } // reverse map diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java index 19bfc2565d6..b7f710cd5d5 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java @@ -345,14 +345,14 @@ public static DocCollection collectionFromObjects( Object routerObj = props.get(CollectionStateProps.DOC_ROUTER); DocRouter router; if (routerObj == null) { - router = DocRouter.DEFAULT; + router = DocRouters.DEFAULT; } else if (routerObj instanceof String) { // back compat with Solr4.4 - router = DocRouter.getDocRouter((String) routerObj); + router = DocRouters.getDocRouter((String) routerObj); } else { @SuppressWarnings({"rawtypes"}) Map routerProps = (Map) routerObj; - router = DocRouter.getDocRouter((String) routerProps.get("name")); + router = DocRouters.getDocRouter((String) routerProps.get("name")); } return DocCollection.create(name, slices, props, router, version, creationTime, prsSupplier); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java b/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java index 2ac50b6b1f5..4b82856b5af 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java @@ -19,13 +19,11 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.solr.cluster.api.HashRange; -import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.DocCollection.CollectionStateProps; import org.apache.solr.common.params.SolrParams; @@ -38,14 +36,24 @@ * @lucene.experimental */ public abstract class DocRouter { - public static final String DEFAULT_NAME = CompositeIdRouter.NAME; - public static final DocRouter DEFAULT; + /** + * @deprecated Use {@link DocRouters#DEFAULT_NAME} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) + public static final String DEFAULT_NAME = DocRouters.DEFAULT_NAME; + + /** + * @deprecated Use {@link DocRouters#DEFAULT} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) + public static final DocRouter DEFAULT = DocRouters.DEFAULT; + /** + * @deprecated Use {@link DocRouters#getDocRouter(String)} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) public static DocRouter getDocRouter(String routerName) { - DocRouter router = routerMap.get(routerName); - if (router != null) return router; - throw new SolrException( - SolrException.ErrorCode.SERVER_ERROR, "Unknown document router '" + routerName + "'"); + return DocRouters.getDocRouter(routerName); } public String getRouteField(DocCollection coll) { @@ -69,23 +77,6 @@ public static Map getRouterSpec(ZkNodeProps props) { return map; } - // currently just an implementation detail... - private static final Map routerMap; - - static { - routerMap = new HashMap<>(); - PlainIdRouter plain = new PlainIdRouter(); - // instead of doing back compat this way, we could always convert the clusterstate on first read - // to "plain" if it doesn't have any properties. - routerMap.put(null, plain); // back compat with 4.0 - routerMap.put(PlainIdRouter.NAME, plain); - routerMap.put(CompositeIdRouter.NAME, new CompositeIdRouter()); - routerMap.put(ImplicitDocRouter.NAME, new ImplicitDocRouter()); - // NOTE: careful that the map keys (the static .NAME members) are filled in by making them final - - DEFAULT = routerMap.get(DEFAULT_NAME); - } - // Hash ranges can't currently "wrap" - i.e. max must be greater or equal to min. // TODO: ranges may not be all contiguous in the future (either that or we will // need an extra class to model a collection of ranges) diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouters.java b/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouters.java new file mode 100644 index 00000000000..b32c4ddb927 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouters.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.common.cloud; + +import java.util.HashMap; +import java.util.Map; +import org.apache.solr.common.SolrException; + +public class DocRouters { + + private DocRouters() {} + + public static final String DEFAULT_NAME = CompositeIdRouter.NAME; + + public static final DocRouter DEFAULT; + + // currently just an implementation detail... + private static final Map routerMap; + + static { + routerMap = new HashMap<>(); + PlainIdRouter plain = new PlainIdRouter(); + // instead of doing back compat this way, we could always convert the clusterstate on first read + // to "plain" if it doesn't have any properties. + routerMap.put(null, plain); // back compat with 4.0 + routerMap.put(PlainIdRouter.NAME, plain); + routerMap.put(CompositeIdRouter.NAME, new CompositeIdRouter()); + routerMap.put(ImplicitDocRouter.NAME, new ImplicitDocRouter()); + // NOTE: careful that the map keys (the static .NAME members) are filled in by making them final + + DEFAULT = routerMap.get(DEFAULT_NAME); + } + + public static DocRouter getDocRouter(String routerName) { + DocRouter router = routerMap.get(routerName); + if (router != null) return router; + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, "Unknown document router '" + routerName + "'"); + } +} diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/RoutingRule.java b/solr/solrj/src/java/org/apache/solr/common/cloud/RoutingRule.java index d4f863a125d..0fef92790d1 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/RoutingRule.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/RoutingRule.java @@ -36,7 +36,7 @@ public RoutingRule(String routeKey, Map propMap) { if (rangesArr != null && rangesArr.length > 0) { this.routeRanges = new ArrayList<>(); for (String r : rangesArr) { - routeRanges.add(DocRouter.DEFAULT.fromString(r)); + routeRanges.add(DocRouters.DEFAULT.fromString(r)); } } else { this.routeRanges = null; diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java b/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java index 99374dcb7af..223703e21ca 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/Slice.java @@ -176,7 +176,7 @@ public Slice( tmpRange = (DocRouter.Range) rangeObj; } else if (rangeObj != null) { // Doesn't support custom implementations of Range, but currently not needed. - tmpRange = DocRouter.DEFAULT.fromString(rangeObj.toString()); + tmpRange = DocRouters.DEFAULT.fromString(rangeObj.toString()); } range = tmpRange; From 5dd13bb7c58f730c402cd979d5bbc289ecf3ed0f Mon Sep 17 00:00:00 2001 From: Christos Malliaridis Date: Wed, 20 Nov 2024 22:45:24 +0100 Subject: [PATCH 4/7] Fix ClassInitializationDeadlock warning for MapWriter --- .../common/cloud/NodesSysPropsCacher.java | 4 +-- .../org/apache/solr/common/MapWriter.java | 7 +++-- .../org/apache/solr/common/MapWriters.java | 27 +++++++++++++++++++ 3 files changed, 34 insertions(+), 4 deletions(-) create mode 100644 solr/solrj/src/java/org/apache/solr/common/MapWriters.java diff --git a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/NodesSysPropsCacher.java b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/NodesSysPropsCacher.java index 37699901323..0a312537585 100644 --- a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/NodesSysPropsCacher.java +++ b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/NodesSysPropsCacher.java @@ -24,7 +24,7 @@ import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.impl.Http2SolrClient; import org.apache.solr.client.solrj.request.GenericSolrRequest; -import org.apache.solr.common.MapWriter; +import org.apache.solr.common.MapWriters; import org.apache.solr.common.NavigableObject; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; @@ -92,7 +92,7 @@ private Map fetchProps(String nodeName, Collection tags) solrClient .requestWithBaseUrl(zkStateReader.getBaseUrlForNodeName(nodeName), null, req) .getResponse(); - NavigableObject metrics = (NavigableObject) response._get("metrics", MapWriter.EMPTY); + NavigableObject metrics = (NavigableObject) response._get("metrics", MapWriters.EMPTY); keys.forEach((tag, key) -> result.put(tag, metrics._get(key, null))); return result; } catch (Exception e) { diff --git a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java index 0bbe2dc50a0..2bfbe421b90 100644 --- a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java @@ -18,7 +18,6 @@ package org.apache.solr.common; import java.io.IOException; -import java.util.Collections; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.BiPredicate; @@ -158,5 +157,9 @@ default BiConsumer getBiConsumer() { } } - MapWriter EMPTY = new MapWriterMap(Collections.emptyMap()); + /** + * @deprecated Use {@link MapWriters#EMPTY} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) + MapWriter EMPTY = MapWriters.EMPTY; } diff --git a/solr/solrj/src/java/org/apache/solr/common/MapWriters.java b/solr/solrj/src/java/org/apache/solr/common/MapWriters.java new file mode 100644 index 00000000000..fe9e0f9448c --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/common/MapWriters.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.common; + +import java.util.Collections; + +public class MapWriters { + + private MapWriters() {} + + public static MapWriter EMPTY = new MapWriterMap(Collections.emptyMap()); +} From cc7249ad0850bf07b42c186a45ef1db1c19b6e51 Mon Sep 17 00:00:00 2001 From: Christos Malliaridis Date: Wed, 20 Nov 2024 22:46:55 +0100 Subject: [PATCH 5/7] Fix ClassInitializationDeadlock warning for QParserPlugin --- .../java/org/apache/solr/core/SolrCore.java | 3 +- .../org/apache/solr/search/QParserPlugin.java | 65 +------------ .../apache/solr/search/QParserPlugins.java | 93 +++++++++++++++++++ .../apache/solr/search/QueryEqualityTest.java | 8 +- .../SignificantTermsQParserPluginTest.java | 6 +- .../solr/search/TestStandardQParsers.java | 16 ++-- 6 files changed, 114 insertions(+), 77 deletions(-) create mode 100644 solr/core/src/java/org/apache/solr/search/QParserPlugins.java diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index 55325568abe..a3c97d50edb 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -144,6 +144,7 @@ import org.apache.solr.schema.ManagedIndexSchema; import org.apache.solr.schema.SimilarityFactory; import org.apache.solr.search.QParserPlugin; +import org.apache.solr.search.QParserPlugins; import org.apache.solr.search.SolrFieldCacheBean; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.ValueSourceParser; @@ -1128,7 +1129,7 @@ protected SolrCore( initIndex(prev != null, reload); initWriters(); - qParserPlugins.init(QParserPlugin.standardPlugins, this); + qParserPlugins.init(QParserPlugins.standardPlugins, this); valueSourceParsers.init(ValueSourceParser.standardValueSourceParsers, this); transformerFactories.init(TransformerFactory.defaultFactories, this); loadSearchComponents(); diff --git a/solr/core/src/java/org/apache/solr/search/QParserPlugin.java b/solr/core/src/java/org/apache/solr/search/QParserPlugin.java index 8146c585cf0..be5d6814b32 100644 --- a/solr/core/src/java/org/apache/solr/search/QParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/QParserPlugin.java @@ -16,22 +16,11 @@ */ package org.apache.solr.search; -import java.util.Collections; -import java.util.HashMap; import java.util.Map; import org.apache.solr.common.params.SolrParams; import org.apache.solr.core.SolrInfoBean; import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.search.join.BlockJoinChildQParserPlugin; -import org.apache.solr.search.join.BlockJoinParentQParserPlugin; -import org.apache.solr.search.join.FiltersQParserPlugin; -import org.apache.solr.search.join.GraphQParserPlugin; -import org.apache.solr.search.join.HashRangeQParserPlugin; -import org.apache.solr.search.mlt.MLTContentQParserPlugin; -import org.apache.solr.search.mlt.MLTQParserPlugin; -import org.apache.solr.search.neural.KnnQParserPlugin; -import org.apache.solr.search.neural.VectorSimilarityQParserPlugin; import org.apache.solr.util.plugin.NamedListInitializedPlugin; public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrInfoBean { @@ -43,57 +32,11 @@ public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrI * {@link QParserPlugin} has own instance of standardPlugins. This leads to cyclic dependencies of * static fields and to case when NAME field is not yet initialized. This result to NPE during * initialization. For every plugin, listed here, NAME field has to be final and static. + * + * @deprecated Use {@link QParserPlugins#standardPlugins} instead. */ - public static final Map standardPlugins; - - static { - HashMap map = new HashMap<>(30, 1); - map.put(LuceneQParserPlugin.NAME, new LuceneQParserPlugin()); - map.put(FunctionQParserPlugin.NAME, new FunctionQParserPlugin()); - map.put(PrefixQParserPlugin.NAME, new PrefixQParserPlugin()); - map.put(BoostQParserPlugin.NAME, new BoostQParserPlugin()); - map.put(DisMaxQParserPlugin.NAME, new DisMaxQParserPlugin()); - map.put(ExtendedDismaxQParserPlugin.NAME, new ExtendedDismaxQParserPlugin()); - map.put(FieldQParserPlugin.NAME, new FieldQParserPlugin()); - map.put(RawQParserPlugin.NAME, new RawQParserPlugin()); - map.put(TermQParserPlugin.NAME, new TermQParserPlugin()); - map.put(TermsQParserPlugin.NAME, new TermsQParserPlugin()); - map.put(NestedQParserPlugin.NAME, new NestedQParserPlugin()); - map.put(FunctionRangeQParserPlugin.NAME, new FunctionRangeQParserPlugin()); - map.put(SpatialFilterQParserPlugin.NAME, new SpatialFilterQParserPlugin()); - map.put(SpatialBoxQParserPlugin.NAME, new SpatialBoxQParserPlugin()); - map.put(JoinQParserPlugin.NAME, new JoinQParserPlugin()); - map.put(SurroundQParserPlugin.NAME, new SurroundQParserPlugin()); - map.put(SwitchQParserPlugin.NAME, new SwitchQParserPlugin()); - map.put(MaxScoreQParserPlugin.NAME, new MaxScoreQParserPlugin()); - map.put(BlockJoinParentQParserPlugin.NAME, new BlockJoinParentQParserPlugin()); - map.put(BlockJoinChildQParserPlugin.NAME, new BlockJoinChildQParserPlugin()); - map.put(FiltersQParserPlugin.NAME, new FiltersQParserPlugin()); - map.put(CollapsingQParserPlugin.NAME, new CollapsingQParserPlugin()); - map.put(SimpleQParserPlugin.NAME, new SimpleQParserPlugin()); - map.put(ComplexPhraseQParserPlugin.NAME, new ComplexPhraseQParserPlugin()); - map.put(ReRankQParserPlugin.NAME, new ReRankQParserPlugin()); - map.put(ExportQParserPlugin.NAME, new ExportQParserPlugin()); - map.put(MLTQParserPlugin.NAME, new MLTQParserPlugin()); - map.put(MLTContentQParserPlugin.NAME, new MLTContentQParserPlugin()); - map.put(HashQParserPlugin.NAME, new HashQParserPlugin()); - map.put(GraphQParserPlugin.NAME, new GraphQParserPlugin()); - map.put(XmlQParserPlugin.NAME, new XmlQParserPlugin()); - map.put(GraphTermsQParserPlugin.NAME, new GraphTermsQParserPlugin()); - map.put(IGainTermsQParserPlugin.NAME, new IGainTermsQParserPlugin()); - map.put(TextLogisticRegressionQParserPlugin.NAME, new TextLogisticRegressionQParserPlugin()); - map.put(SignificantTermsQParserPlugin.NAME, new SignificantTermsQParserPlugin()); - map.put(PayloadScoreQParserPlugin.NAME, new PayloadScoreQParserPlugin()); - map.put(PayloadCheckQParserPlugin.NAME, new PayloadCheckQParserPlugin()); - map.put(BoolQParserPlugin.NAME, new BoolQParserPlugin()); - map.put(MinHashQParserPlugin.NAME, new MinHashQParserPlugin()); - map.put(HashRangeQParserPlugin.NAME, new HashRangeQParserPlugin()); - map.put(RankQParserPlugin.NAME, new RankQParserPlugin()); - map.put(KnnQParserPlugin.NAME, new KnnQParserPlugin()); - map.put(VectorSimilarityQParserPlugin.NAME, new VectorSimilarityQParserPlugin()); - - standardPlugins = Collections.unmodifiableMap(map); - } + @Deprecated(since = "9.8", forRemoval = true) + public static final Map standardPlugins = QParserPlugins.standardPlugins; /** return a {@link QParser} */ public abstract QParser createParser( diff --git a/solr/core/src/java/org/apache/solr/search/QParserPlugins.java b/solr/core/src/java/org/apache/solr/search/QParserPlugins.java new file mode 100644 index 00000000000..2a52f1cee0c --- /dev/null +++ b/solr/core/src/java/org/apache/solr/search/QParserPlugins.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.search; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.apache.solr.search.join.BlockJoinChildQParserPlugin; +import org.apache.solr.search.join.BlockJoinParentQParserPlugin; +import org.apache.solr.search.join.FiltersQParserPlugin; +import org.apache.solr.search.join.GraphQParserPlugin; +import org.apache.solr.search.join.HashRangeQParserPlugin; +import org.apache.solr.search.mlt.MLTContentQParserPlugin; +import org.apache.solr.search.mlt.MLTQParserPlugin; +import org.apache.solr.search.neural.KnnQParserPlugin; +import org.apache.solr.search.neural.VectorSimilarityQParserPlugin; + +public class QParserPlugins { + + private QParserPlugins() {} + + /** + * Internal use - name to parser for the builtin parsers. Each query parser plugin extending + * {@link QParserPlugin} has own instance of standardPlugins. This leads to cyclic dependencies of + * static fields and to case when NAME field is not yet initialized. This result to NPE during + * initialization. For every plugin, listed here, NAME field has to be final and static. + */ + public static final Map standardPlugins; + + static { + HashMap map = new HashMap<>(30, 1); + map.put(LuceneQParserPlugin.NAME, new LuceneQParserPlugin()); + map.put(FunctionQParserPlugin.NAME, new FunctionQParserPlugin()); + map.put(PrefixQParserPlugin.NAME, new PrefixQParserPlugin()); + map.put(BoostQParserPlugin.NAME, new BoostQParserPlugin()); + map.put(DisMaxQParserPlugin.NAME, new DisMaxQParserPlugin()); + map.put(ExtendedDismaxQParserPlugin.NAME, new ExtendedDismaxQParserPlugin()); + map.put(FieldQParserPlugin.NAME, new FieldQParserPlugin()); + map.put(RawQParserPlugin.NAME, new RawQParserPlugin()); + map.put(TermQParserPlugin.NAME, new TermQParserPlugin()); + map.put(TermsQParserPlugin.NAME, new TermsQParserPlugin()); + map.put(NestedQParserPlugin.NAME, new NestedQParserPlugin()); + map.put(FunctionRangeQParserPlugin.NAME, new FunctionRangeQParserPlugin()); + map.put(SpatialFilterQParserPlugin.NAME, new SpatialFilterQParserPlugin()); + map.put(SpatialBoxQParserPlugin.NAME, new SpatialBoxQParserPlugin()); + map.put(JoinQParserPlugin.NAME, new JoinQParserPlugin()); + map.put(SurroundQParserPlugin.NAME, new SurroundQParserPlugin()); + map.put(SwitchQParserPlugin.NAME, new SwitchQParserPlugin()); + map.put(MaxScoreQParserPlugin.NAME, new MaxScoreQParserPlugin()); + map.put(BlockJoinParentQParserPlugin.NAME, new BlockJoinParentQParserPlugin()); + map.put(BlockJoinChildQParserPlugin.NAME, new BlockJoinChildQParserPlugin()); + map.put(FiltersQParserPlugin.NAME, new FiltersQParserPlugin()); + map.put(CollapsingQParserPlugin.NAME, new CollapsingQParserPlugin()); + map.put(SimpleQParserPlugin.NAME, new SimpleQParserPlugin()); + map.put(ComplexPhraseQParserPlugin.NAME, new ComplexPhraseQParserPlugin()); + map.put(ReRankQParserPlugin.NAME, new ReRankQParserPlugin()); + map.put(ExportQParserPlugin.NAME, new ExportQParserPlugin()); + map.put(MLTQParserPlugin.NAME, new MLTQParserPlugin()); + map.put(MLTContentQParserPlugin.NAME, new MLTContentQParserPlugin()); + map.put(HashQParserPlugin.NAME, new HashQParserPlugin()); + map.put(GraphQParserPlugin.NAME, new GraphQParserPlugin()); + map.put(XmlQParserPlugin.NAME, new XmlQParserPlugin()); + map.put(GraphTermsQParserPlugin.NAME, new GraphTermsQParserPlugin()); + map.put(IGainTermsQParserPlugin.NAME, new IGainTermsQParserPlugin()); + map.put(TextLogisticRegressionQParserPlugin.NAME, new TextLogisticRegressionQParserPlugin()); + map.put(SignificantTermsQParserPlugin.NAME, new SignificantTermsQParserPlugin()); + map.put(PayloadScoreQParserPlugin.NAME, new PayloadScoreQParserPlugin()); + map.put(PayloadCheckQParserPlugin.NAME, new PayloadCheckQParserPlugin()); + map.put(BoolQParserPlugin.NAME, new BoolQParserPlugin()); + map.put(MinHashQParserPlugin.NAME, new MinHashQParserPlugin()); + map.put(HashRangeQParserPlugin.NAME, new HashRangeQParserPlugin()); + map.put(RankQParserPlugin.NAME, new RankQParserPlugin()); + map.put(KnnQParserPlugin.NAME, new KnnQParserPlugin()); + map.put(VectorSimilarityQParserPlugin.NAME, new VectorSimilarityQParserPlugin()); + + standardPlugins = Collections.unmodifiableMap(map); + } +} diff --git a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java index 130cf3f9469..f738a153df4 100644 --- a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java +++ b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java @@ -36,8 +36,8 @@ * new default QParser or ValueSourceParser, you will most likely get a failure from {@link * #testParserCoverage} until you add a new test method to this class. * - * @see ValueSourceParser#standardValueSourceParsers - * @see QParserPlugin#standardPlugins + * @see ValueSourceParsers#standardValueSourceParsers + * @see QParserPlugins#standardPlugins * @see QueryUtils */ public class QueryEqualityTest extends SolrTestCaseJ4 { @@ -54,13 +54,13 @@ public static void beforeClass() throws Exception { public static void afterClassParserCoverageTest() { if (!doAssertParserCoverage) return; - for (String name : QParserPlugin.standardPlugins.keySet()) { + for (String name : QParserPlugins.standardPlugins.keySet()) { assertTrue( "testParserCoverage was run w/o any other method explicitly testing qparser: " + name, qParsersTested.contains(name)); } - for (final String name : ValueSourceParser.standardValueSourceParsers.keySet()) { + for (final String name : ValueSourceParsers.standardValueSourceParsers.keySet()) { assertTrue( "testParserCoverage was run w/o any other method explicitly testing val parser: " + name, valParsersTested.contains(name)); diff --git a/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java b/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java index 12c8ec9dfa0..8a37fa6aed3 100644 --- a/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java +++ b/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java @@ -59,14 +59,14 @@ public void testQParserBackwardsCompatibility() { assertEquals("significantTerms", SignificantTermsQParserPlugin.NAME); assertEquals( SignificantTermsQParserPlugin.class, - QParserPlugin.standardPlugins.get(SignificantTermsQParserPlugin.NAME).getClass()); + QParserPlugins.standardPlugins.get(SignificantTermsQParserPlugin.NAME).getClass()); } @Test public void testEmptyCollectionDoesNotThrow() throws Exception { SolrCore emptyCore = h.getCore(); QParserPlugin qParserPlugin = - QParserPlugin.standardPlugins.get(SignificantTermsQParserPlugin.NAME); + QParserPlugins.standardPlugins.get(SignificantTermsQParserPlugin.NAME); Map params = new HashMap<>(); params.put("field", "cat"); QParser parser = @@ -100,7 +100,7 @@ public void testCollectionWithDocuments() throws Exception { addTestDocs(dataCore); QParserPlugin qParserPlugin = - QParserPlugin.standardPlugins.get(SignificantTermsQParserPlugin.NAME); + QParserPlugins.standardPlugins.get(SignificantTermsQParserPlugin.NAME); Map params = new HashMap<>(); params.put("field", "cat"); QParser parser = diff --git a/solr/core/src/test/org/apache/solr/search/TestStandardQParsers.java b/solr/core/src/test/org/apache/solr/search/TestStandardQParsers.java index b7517e1c620..3cc84d4108e 100644 --- a/solr/core/src/test/org/apache/solr/search/TestStandardQParsers.java +++ b/solr/core/src/test/org/apache/solr/search/TestStandardQParsers.java @@ -27,12 +27,12 @@ /** * Check standard query parsers for class loading problems during initialization (NAME field is * final and static). Because every query plugin extend {@link org.apache.solr.search.QParserPlugin} - * and contains own instance of {@link org.apache.solr.search.QParserPlugin#standardPlugins}, There + * and contains own instance of {@link org.apache.solr.search.QParserPlugins#standardPlugins}, There * are a cyclic dependencies of static fields between plugins and order of initialization can be * wrong if NAME field is not final. This leads to NPEs during Solr startup. * * @see SOLR-5526 - * @see org.apache.solr.search.QParserPlugin#standardPlugins + * @see org.apache.solr.search.QParserPlugins#standardPlugins */ public class TestStandardQParsers extends SolrTestCase { /** Field name of constant mandatory for query parser plugin. */ @@ -40,16 +40,16 @@ public class TestStandardQParsers extends SolrTestCase { /** * Test standard query parsers registered in {@link - * org.apache.solr.search.QParserPlugin#standardPlugins} have NAME field which is final, static, + * org.apache.solr.search.QParserPlugins#standardPlugins} have NAME field which is final, static, * and matches the registered name. */ @Test public void testRegisteredName() throws Exception { - List notStatic = new ArrayList<>(QParserPlugin.standardPlugins.size()); - List notFinal = new ArrayList<>(QParserPlugin.standardPlugins.size()); - List mismatch = new ArrayList<>(QParserPlugin.standardPlugins.size()); + List notStatic = new ArrayList<>(QParserPlugins.standardPlugins.size()); + List notFinal = new ArrayList<>(QParserPlugins.standardPlugins.size()); + List mismatch = new ArrayList<>(QParserPlugins.standardPlugins.size()); - for (Map.Entry pair : QParserPlugin.standardPlugins.entrySet()) { + for (Map.Entry pair : QParserPlugins.standardPlugins.entrySet()) { String regName = pair.getKey(); Class clazz = pair.getValue().getClass(); ; @@ -77,7 +77,7 @@ public void testRegisteredName() throws Exception { assertTrue( "DEFAULT_QTYPE is not in the standard set of registered names: " + QParserPlugin.DEFAULT_QTYPE, - QParserPlugin.standardPlugins.keySet().contains(QParserPlugin.DEFAULT_QTYPE)); + QParserPlugins.standardPlugins.keySet().contains(QParserPlugin.DEFAULT_QTYPE)); } /** Test that "lucene" is the default query parser. */ From 137352751e1e6e42f2da3830a1a7c1dc5efe0f23 Mon Sep 17 00:00:00 2001 From: Christos Malliaridis Date: Wed, 20 Nov 2024 22:51:09 +0100 Subject: [PATCH 6/7] Fix ClassInitializationDeadlock warning for ValueSourceParser --- .../java/org/apache/solr/core/SolrCore.java | 3 +- .../apache/solr/search/FunctionQParser.java | 6 +- .../apache/solr/search/ValueSourceParser.java | 1434 +------------ .../solr/search/ValueSourceParsers.java | 1875 +++++++++++++++++ 4 files changed, 1912 insertions(+), 1406 deletions(-) create mode 100644 solr/core/src/java/org/apache/solr/search/ValueSourceParsers.java diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index a3c97d50edb..3f2b70f1f05 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -148,6 +148,7 @@ import org.apache.solr.search.SolrFieldCacheBean; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.ValueSourceParser; +import org.apache.solr.search.ValueSourceParsers; import org.apache.solr.search.stats.LocalStatsCache; import org.apache.solr.search.stats.StatsCache; import org.apache.solr.update.DefaultSolrCoreState; @@ -1130,7 +1131,7 @@ protected SolrCore( initWriters(); qParserPlugins.init(QParserPlugins.standardPlugins, this); - valueSourceParsers.init(ValueSourceParser.standardValueSourceParsers, this); + valueSourceParsers.init(ValueSourceParsers.standardValueSourceParsers, this); transformerFactories.init(TransformerFactory.defaultFactories, this); loadSearchComponents(); updateProcessors.init(Collections.emptyMap(), this); diff --git a/solr/core/src/java/org/apache/solr/search/FunctionQParser.java b/solr/core/src/java/org/apache/solr/search/FunctionQParser.java index 97b1fb35d22..6d51397ba09 100644 --- a/solr/core/src/java/org/apache/solr/search/FunctionQParser.java +++ b/solr/core/src/java/org/apache/solr/search/FunctionQParser.java @@ -398,7 +398,7 @@ protected ValueSource parseValueSource(int flags) throws SyntaxError { if ((ch >= '0' && ch <= '9') || ch == '.' || ch == '+' || ch == '-') { Number num = sp.getNumber(); if (num instanceof Long) { - valueSource = new ValueSourceParser.LongConstValueSource(num.longValue()); + valueSource = new ValueSourceParsers.LongConstValueSource(num.longValue()); } else if (num instanceof Double) { valueSource = new DoubleConstValueSource(num.doubleValue()); } else { @@ -482,9 +482,9 @@ protected ValueSource parseValueSource(int flags) throws SyntaxError { sp.expect(")"); } else { if ("true".equals(id)) { - valueSource = ValueSourceParser.BoolConstValueSource.TRUE; + valueSource = ValueSourceParsers.BoolConstValueSource.TRUE; } else if ("false".equals(id)) { - valueSource = ValueSourceParser.BoolConstValueSource.FALSE; + valueSource = ValueSourceParsers.BoolConstValueSource.FALSE; } else { if ((flags & FLAG_USE_FIELDNAME_SOURCE) != 0) { // Don't try to create a ValueSource for the field, just use a placeholder. diff --git a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java index 2cd071a8938..2cac0bcb7d5 100644 --- a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java +++ b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java @@ -17,108 +17,24 @@ package org.apache.solr.search; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.Date; -import java.util.HashMap; -import java.util.List; import java.util.Map; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; -import org.apache.lucene.queries.function.FunctionScoreQuery; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.docvalues.BoolDocValues; import org.apache.lucene.queries.function.docvalues.DoubleDocValues; import org.apache.lucene.queries.function.docvalues.LongDocValues; import org.apache.lucene.queries.function.valuesource.ConstNumberSource; -import org.apache.lucene.queries.function.valuesource.ConstValueSource; -import org.apache.lucene.queries.function.valuesource.DefFunction; -import org.apache.lucene.queries.function.valuesource.DivFloatFunction; -import org.apache.lucene.queries.function.valuesource.DocFreqValueSource; -import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource; import org.apache.lucene.queries.function.valuesource.DualFloatFunction; -import org.apache.lucene.queries.function.valuesource.IDFValueSource; -import org.apache.lucene.queries.function.valuesource.IfFunction; -import org.apache.lucene.queries.function.valuesource.JoinDocFreqValueSource; -import org.apache.lucene.queries.function.valuesource.LinearFloatFunction; -import org.apache.lucene.queries.function.valuesource.LiteralValueSource; -import org.apache.lucene.queries.function.valuesource.MaxDocValueSource; -import org.apache.lucene.queries.function.valuesource.MaxFloatFunction; -import org.apache.lucene.queries.function.valuesource.MinFloatFunction; -import org.apache.lucene.queries.function.valuesource.MultiBoolFunction; -import org.apache.lucene.queries.function.valuesource.MultiValueSource; -import org.apache.lucene.queries.function.valuesource.NormValueSource; -import org.apache.lucene.queries.function.valuesource.NumDocsValueSource; -import org.apache.lucene.queries.function.valuesource.ProductFloatFunction; -import org.apache.lucene.queries.function.valuesource.QueryValueSource; -import org.apache.lucene.queries.function.valuesource.RangeMapFloatFunction; -import org.apache.lucene.queries.function.valuesource.ReciprocalFloatFunction; -import org.apache.lucene.queries.function.valuesource.ScaleFloatFunction; -import org.apache.lucene.queries.function.valuesource.SimpleBoolFunction; -import org.apache.lucene.queries.function.valuesource.SimpleFloatFunction; import org.apache.lucene.queries.function.valuesource.SingleFunction; -import org.apache.lucene.queries.function.valuesource.SumFloatFunction; -import org.apache.lucene.queries.function.valuesource.SumTotalTermFreqValueSource; -import org.apache.lucene.queries.function.valuesource.TFValueSource; -import org.apache.lucene.queries.function.valuesource.TermFreqValueSource; -import org.apache.lucene.queries.function.valuesource.TotalTermFreqValueSource; -import org.apache.lucene.queries.function.valuesource.VectorValueSource; -import org.apache.lucene.queries.payloads.PayloadDecoder; -import org.apache.lucene.queries.payloads.PayloadFunction; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spell.JaroWinklerDistance; -import org.apache.lucene.search.spell.LevenshteinDistance; -import org.apache.lucene.search.spell.NGramDistance; -import org.apache.lucene.search.spell.StringDistance; -import org.apache.lucene.util.BytesRefBuilder; import org.apache.solr.common.SolrException; import org.apache.solr.request.SolrRequestInfo; -import org.apache.solr.schema.CurrencyFieldType; -import org.apache.solr.schema.FieldType; -import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; -import org.apache.solr.schema.StrField; -import org.apache.solr.schema.TextField; -import org.apache.solr.search.facet.AggValueSource; -import org.apache.solr.search.facet.AvgAgg; -import org.apache.solr.search.facet.CountAgg; -import org.apache.solr.search.facet.CountValsAgg; -import org.apache.solr.search.facet.HLLAgg; -import org.apache.solr.search.facet.MinMaxAgg; -import org.apache.solr.search.facet.MissingAgg; -import org.apache.solr.search.facet.PercentileAgg; -import org.apache.solr.search.facet.RelatednessAgg; -import org.apache.solr.search.facet.StddevAgg; -import org.apache.solr.search.facet.SumAgg; -import org.apache.solr.search.facet.SumsqAgg; -import org.apache.solr.search.facet.UniqueAgg; -import org.apache.solr.search.facet.UniqueBlockFieldAgg; -import org.apache.solr.search.facet.UniqueBlockQueryAgg; -import org.apache.solr.search.facet.VarianceAgg; -import org.apache.solr.search.function.CollapseScoreFunction; -import org.apache.solr.search.function.ConcatStringFunction; -import org.apache.solr.search.function.DualDoubleFunction; -import org.apache.solr.search.function.EqualFunction; -import org.apache.solr.search.function.OrdFieldSource; -import org.apache.solr.search.function.ReverseOrdFieldSource; -import org.apache.solr.search.function.SolrComparisonBoolFunction; -import org.apache.solr.search.function.distance.GeoDistValueSourceParser; -import org.apache.solr.search.function.distance.GeohashFunction; -import org.apache.solr.search.function.distance.GeohashHaversineFunction; -import org.apache.solr.search.function.distance.HaversineFunction; -import org.apache.solr.search.function.distance.SquaredEuclideanFunction; -import org.apache.solr.search.function.distance.StringDistanceFunction; -import org.apache.solr.search.function.distance.VectorDistanceFunction; -import org.apache.solr.search.join.ChildFieldValueSourceParser; import org.apache.solr.util.DateMathParser; -import org.apache.solr.util.PayloadUtils; import org.apache.solr.util.plugin.NamedListInitializedPlugin; -import org.locationtech.spatial4j.distance.DistanceUtils; /** * A factory that parses user queries to generate ValueSource instances. Intended usage is to create @@ -128,1329 +44,19 @@ public abstract class ValueSourceParser implements NamedListInitializedPlugin { /** Parse the user input into a ValueSource. */ public abstract ValueSource parse(FunctionQParser fp) throws SyntaxError; - /** standard functions supported by default, filled in static class initialization */ - private static final Map standardVSParsers = new HashMap<>(); - - /** standard functions supported by default */ - public static final Map standardValueSourceParsers = - Collections.unmodifiableMap(standardVSParsers); - /** - * Adds a new parser for the name and returns any existing one that was overridden. This is not - * thread safe. + * standard functions supported by default + * + * @deprecated Use {@link ValueSourceParsers#standardValueSourceParsers} instead. */ - private static ValueSourceParser addParser(String name, ValueSourceParser p) { - return standardVSParsers.put(name, p); - } + @Deprecated(since = "9.8", forRemoval = true) + public static final Map standardValueSourceParsers = + ValueSourceParsers.standardValueSourceParsers; /** - * Adds a new parser for the name and returns any existing one that was overridden. This is not - * thread safe. + * @deprecated Use {@link ValueSourceParsers.DateValueSourceParser} instead. */ - private static ValueSourceParser addParser(NamedParser p) { - return standardVSParsers.put(p.name(), p); - } - - private static void alias(String source, String dest) { - standardVSParsers.put(dest, standardVSParsers.get(source)); - } - - static { - addParser( - "testfunc", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - final ValueSource source = fp.parseValueSource(); - return new TestValueSource(source); - } - }); - addParser( - "ord", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - String field = fp.parseId(); - return new OrdFieldSource(field); - } - }); - addParser( - "literal", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new LiteralValueSource(fp.parseArg()); - } - }); - addParser( - "threadid", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new LongConstValueSource(Thread.currentThread().threadId()); - } - }); - addParser( - "sleep", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - int ms = fp.parseInt(); - ValueSource source = fp.parseValueSource(); - try { - Thread.sleep(ms); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - return source; - } - }); - addParser( - "rord", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - String field = fp.parseId(); - return new ReverseOrdFieldSource(field); - } - }); - addParser( - "top", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - // top(vs) is now a no-op - ValueSource source = fp.parseValueSource(); - return source; - } - }); - addParser( - "linear", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource source = fp.parseValueSource(); - float slope = fp.parseFloat(); - float intercept = fp.parseFloat(); - return new LinearFloatFunction(source, slope, intercept); - } - }); - addParser( - "recip", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource source = fp.parseValueSource(); - float m = fp.parseFloat(); - float a = fp.parseFloat(); - float b = fp.parseFloat(); - return new ReciprocalFloatFunction(source, m, a, b); - } - }); - addParser( - "scale", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource source = fp.parseValueSource(); - float min = fp.parseFloat(); - float max = fp.parseFloat(); - return new ScaleFloatFunction(source, min, max); - } - }); - addParser( - "div", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource a = fp.parseValueSource(); - ValueSource b = fp.parseValueSource(); - return new DivFloatFunction(a, b); - } - }); - addParser( - "mod", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource a = fp.parseValueSource(); - ValueSource b = fp.parseValueSource(); - return new DualDoubleFunction(a, b) { - @Override - protected String name() { - return "mod"; - } - - @Override - protected double func(int doc, FunctionValues aVals, FunctionValues bVals) - throws IOException { - return aVals.doubleVal(doc) % bVals.doubleVal(doc); - } - }; - } - }); - addParser( - "map", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource source = fp.parseValueSource(); - float min = fp.parseFloat(); - float max = fp.parseFloat(); - ValueSource target = fp.parseValueSource(); - ValueSource def = fp.hasMoreArguments() ? fp.parseValueSource() : null; - return new RangeMapFloatFunction(source, min, max, target, def); - } - }); - - addParser( - "abs", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource source = fp.parseValueSource(); - return new SimpleFloatFunction(source) { - @Override - protected String name() { - return "abs"; - } - - @Override - protected float func(int doc, FunctionValues vals) throws IOException { - return Math.abs(vals.floatVal(doc)); - } - }; - } - }); - addParser( - "cscore", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new CollapseScoreFunction(); - } - }); - addParser( - "sum", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List sources = fp.parseValueSourceList(); - return new SumFloatFunction(sources.toArray(new ValueSource[0])); - } - }); - alias("sum", "add"); - addParser("vectorSimilarity", new VectorSimilaritySourceParser()); - addParser( - "product", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List sources = fp.parseValueSourceList(); - return new ProductFloatFunction(sources.toArray(new ValueSource[0])); - } - }); - alias("product", "mul"); - - addParser( - "sub", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource a = fp.parseValueSource(); - ValueSource b = fp.parseValueSource(); - return new DualFloatFunction(a, b) { - @Override - protected String name() { - return "sub"; - } - - @Override - protected float func(int doc, FunctionValues aVals, FunctionValues bVals) - throws IOException { - return aVals.floatVal(doc) - bVals.floatVal(doc); - } - }; - } - }); - addParser( - "vector", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new VectorValueSource(fp.parseValueSourceList()); - } - }); - addParser( - "query", - new ValueSourceParser() { - // boost(query($q),rating) - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - Query q = fp.parseNestedQuery(); - float defVal = 0.0f; - if (fp.hasMoreArguments()) { - defVal = fp.parseFloat(); - } - return new QueryValueSource(q, defVal); - } - }); - addParser( - "boost", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - Query q = fp.parseNestedQuery(); - ValueSource vs = fp.parseValueSource(); - return new QueryValueSource( - FunctionScoreQuery.boostByValue(q, vs.asDoubleValuesSource()), 0.0f); - } - }); - addParser( - "joindf", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - String f0 = fp.parseArg(); - String qf = fp.parseArg(); - return new JoinDocFreqValueSource(f0, qf); - } - }); - - addParser("geodist", new GeoDistValueSourceParser()); - - addParser( - "hsin", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - - double radius = fp.parseDouble(); - // SOLR-2114, make the convert flag required, since the parser doesn't support much in - // the way of lookahead or the ability to convert a String into a ValueSource - boolean convert = Boolean.parseBoolean(fp.parseArg()); - - MultiValueSource pv1; - MultiValueSource pv2; - - ValueSource one = fp.parseValueSource(); - ValueSource two = fp.parseValueSource(); - if (fp.hasMoreArguments()) { - pv1 = new VectorValueSource(Arrays.asList(one, two)); // x1, y1 - pv2 = - new VectorValueSource( - Arrays.asList(fp.parseValueSource(), fp.parseValueSource())); // x2, y2 - } else { - // check to see if we have multiValue source - if (one instanceof MultiValueSource && two instanceof MultiValueSource) { - pv1 = (MultiValueSource) one; - pv2 = (MultiValueSource) two; - } else { - throw new SolrException( - SolrException.ErrorCode.BAD_REQUEST, - "Input must either be 2 MultiValueSources, or there must be 4 ValueSources"); - } - } - - return new HaversineFunction(pv1, pv2, radius, convert); - } - }); - - addParser( - "ghhsin", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - double radius = fp.parseDouble(); - - ValueSource gh1 = fp.parseValueSource(); - ValueSource gh2 = fp.parseValueSource(); - - return new GeohashHaversineFunction(gh1, gh2, radius); - } - }); - - addParser( - "geohash", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - - ValueSource lat = fp.parseValueSource(); - ValueSource lon = fp.parseValueSource(); - - return new GeohashFunction(lat, lon); - } - }); - addParser( - "strdist", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - - ValueSource str1 = fp.parseValueSource(); - ValueSource str2 = fp.parseValueSource(); - String distClass = fp.parseArg(); - - StringDistance dist = null; - if (distClass.equalsIgnoreCase("jw")) { - dist = new JaroWinklerDistance(); - } else if (distClass.equalsIgnoreCase("edit")) { - dist = new LevenshteinDistance(); - } else if (distClass.equalsIgnoreCase("ngram")) { - int ngram = 2; - if (fp.hasMoreArguments()) { - ngram = fp.parseInt(); - } - dist = new NGramDistance(ngram); - } else { - dist = - fp.req.getCore().getResourceLoader().newInstance(distClass, StringDistance.class); - } - return new StringDistanceFunction(str1, str2, dist); - } - }); - addParser( - "field", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - - String fieldName = fp.parseArg(); - SchemaField f = fp.getReq().getSchema().getField(fieldName); - if (fp.hasMoreArguments()) { - // multivalued selector option - String s = fp.parseArg(); - FieldType.MultiValueSelector selector = FieldType.MultiValueSelector.lookup(s); - if (null == selector) { - throw new SolrException( - SolrException.ErrorCode.BAD_REQUEST, - "Multi-Valued field selector '" + s + "' not supported"); - } - return f.getType().getSingleValueSource(selector, f, fp); - } - // simple field ValueSource - return f.getType().getValueSource(f, fp); - } - }); - addParser( - "currency", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - - String fieldName = fp.parseArg(); - SchemaField f = fp.getReq().getSchema().getField(fieldName); - if (!(f.getType() instanceof CurrencyFieldType ft)) { - throw new SolrException( - SolrException.ErrorCode.BAD_REQUEST, - "Currency function input must be the name of a CurrencyFieldType: " + fieldName); - } - String code = fp.hasMoreArguments() ? fp.parseArg() : null; - return ft.getConvertedValueSource(code, ft.getValueSource(f, fp)); - } - }); - - addParser( - new DoubleParser("rad") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return vals.doubleVal(doc) * DistanceUtils.DEGREES_TO_RADIANS; - } - }); - addParser( - new DoubleParser("deg") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return vals.doubleVal(doc) * DistanceUtils.RADIANS_TO_DEGREES; - } - }); - addParser( - new DoubleParser("sqrt") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.sqrt(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("cbrt") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.cbrt(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("log") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.log10(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("ln") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.log(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("exp") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.exp(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("sin") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.sin(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("cos") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.cos(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("tan") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.tan(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("asin") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.asin(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("acos") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.acos(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("atan") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.atan(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("sinh") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.sinh(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("cosh") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.cosh(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("tanh") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.tanh(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("ceil") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.ceil(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("floor") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.floor(vals.doubleVal(doc)); - } - }); - addParser( - new DoubleParser("rint") { - @Override - public double func(int doc, FunctionValues vals) throws IOException { - return Math.rint(vals.doubleVal(doc)); - } - }); - addParser( - new Double2Parser("pow") { - @Override - public double func(int doc, FunctionValues a, FunctionValues b) throws IOException { - return Math.pow(a.doubleVal(doc), b.doubleVal(doc)); - } - }); - addParser( - new Double2Parser("hypot") { - @Override - public double func(int doc, FunctionValues a, FunctionValues b) throws IOException { - return Math.hypot(a.doubleVal(doc), b.doubleVal(doc)); - } - }); - addParser( - new Double2Parser("atan2") { - @Override - public double func(int doc, FunctionValues a, FunctionValues b) throws IOException { - return Math.atan2(a.doubleVal(doc), b.doubleVal(doc)); - } - }); - addParser( - "max", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List sources = fp.parseValueSourceList(); - return new MaxFloatFunction(sources.toArray(new ValueSource[0])); - } - }); - addParser( - "min", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List sources = fp.parseValueSourceList(); - return new MinFloatFunction(sources.toArray(new ValueSource[0])); - } - }); - - addParser( - "sqedist", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List sources = fp.parseValueSourceList(); - MVResult mvr = getMultiValueSources(sources); - - return new SquaredEuclideanFunction(mvr.mv1, mvr.mv2); - } - }); - - addParser( - "dist", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - float power = fp.parseFloat(); - List sources = fp.parseValueSourceList(); - MVResult mvr = getMultiValueSources(sources); - return new VectorDistanceFunction(power, mvr.mv1, mvr.mv2); - } - }); - addParser("ms", new DateValueSourceParser()); - - addParser( - "pi", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) { - return new DoubleConstValueSource(Math.PI); - } - }); - addParser( - "e", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) { - return new DoubleConstValueSource(Math.E); - } - }); - - addParser( - "docfreq", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - TInfo tinfo = parseTerm(fp); - return new DocFreqValueSource( - tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); - } - }); - - addParser( - "totaltermfreq", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - TInfo tinfo = parseTerm(fp); - return new TotalTermFreqValueSource( - tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); - } - }); - alias("totaltermfreq", "ttf"); - - addParser( - "sumtotaltermfreq", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - String field = fp.parseArg(); - return new SumTotalTermFreqValueSource(field); - } - }); - alias("sumtotaltermfreq", "sttf"); - - addParser( - "idf", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - TInfo tinfo = parseTerm(fp); - return new IDFValueSource( - tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); - } - }); - - addParser( - "termfreq", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - TInfo tinfo = parseTerm(fp); - return new TermFreqValueSource( - tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); - } - }); - - addParser( - "tf", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - TInfo tinfo = parseTerm(fp); - return new TFValueSource( - tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); - } - }); - - addParser( - "norm", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - String field = fp.parseArg(); - return new NormValueSource(field); - } - }); - - addParser( - "maxdoc", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) { - return new MaxDocValueSource(); - } - }); - - addParser( - "numdocs", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) { - return new NumDocsValueSource(); - } - }); - - addParser( - "payload", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - // payload(field,value[,default, ['min|max|average|first']]) - // defaults to "average" and 0.0 default value - - // would have made this parser a new separate class and registered it, but - // this handy method is private :/ - TInfo tinfo = parseTerm(fp); - - ValueSource defaultValueSource; - if (fp.hasMoreArguments()) { - defaultValueSource = fp.parseValueSource(); - } else { - defaultValueSource = new ConstValueSource(0.0f); - } - - PayloadFunction payloadFunction = null; - String func = "average"; - if (fp.hasMoreArguments()) { - func = fp.parseArg(); - } - payloadFunction = PayloadUtils.getPayloadFunction(func); - - // Support func="first" by payloadFunction=null - if (payloadFunction == null && !"first".equals(func)) { - // not "first" (or average, min, or max) - throw new SolrException( - SolrException.ErrorCode.BAD_REQUEST, "Invalid payload function: " + func); - } - - IndexSchema schema = fp.getReq().getCore().getLatestSchema(); - PayloadDecoder decoder = schema.getPayloadDecoder(tinfo.field); - - if (decoder == null) { - throw new SolrException( - SolrException.ErrorCode.BAD_REQUEST, - "No payload decoder found for field: " + tinfo.field); - } - - return new FloatPayloadValueSource( - tinfo.field, - tinfo.val, - tinfo.indexedField, - tinfo.indexedBytes.get(), - decoder, - payloadFunction, - defaultValueSource); - } - }); - - addParser( - "true", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) { - return BoolConstValueSource.TRUE; - } - }); - - addParser( - "false", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) { - return BoolConstValueSource.FALSE; - } - }); - - addParser( - "exists", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource vs = fp.parseValueSource(); - return new SimpleBoolFunction(vs) { - @Override - protected String name() { - return "exists"; - } - - @Override - protected boolean func(int doc, FunctionValues vals) throws IOException { - return vals.exists(doc); - } - }; - } - }); - - addParser( - "isnan", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource vs = fp.parseValueSource(); - return new SimpleBoolFunction(vs) { - @Override - protected String name() { - return "isnan"; - } - - @Override - protected boolean func(int doc, FunctionValues vals) throws IOException { - return Float.isNaN(vals.floatVal(doc)); - } - }; - } - }); - - addParser( - "not", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource vs = fp.parseValueSource(); - return new SimpleBoolFunction(vs) { - @Override - protected boolean func(int doc, FunctionValues vals) throws IOException { - return !vals.boolVal(doc); - } - - @Override - protected String name() { - return "not"; - } - }; - } - }); - - addParser( - "and", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List sources = fp.parseValueSourceList(); - return new MultiBoolFunction(sources) { - @Override - protected String name() { - return "and"; - } - - @Override - protected boolean func(int doc, FunctionValues[] vals) throws IOException { - for (FunctionValues dv : vals) if (!dv.boolVal(doc)) return false; - return true; - } - }; - } - }); - - addParser( - "or", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List sources = fp.parseValueSourceList(); - return new MultiBoolFunction(sources) { - @Override - protected String name() { - return "or"; - } - - @Override - protected boolean func(int doc, FunctionValues[] vals) throws IOException { - for (FunctionValues dv : vals) if (dv.boolVal(doc)) return true; - return false; - } - }; - } - }); - - addParser( - "xor", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List sources = fp.parseValueSourceList(); - return new MultiBoolFunction(sources) { - @Override - protected String name() { - return "xor"; - } - - @Override - protected boolean func(int doc, FunctionValues[] vals) throws IOException { - int nTrue = 0, nFalse = 0; - for (FunctionValues dv : vals) { - if (dv.boolVal(doc)) nTrue++; - else nFalse++; - } - return nTrue != 0 && nFalse != 0; - } - }; - } - }); - - addParser( - "if", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource ifValueSource = fp.parseValueSource(); - ValueSource trueValueSource = fp.parseValueSource(); - ValueSource falseValueSource = fp.parseValueSource(); - - return new IfFunction(ifValueSource, trueValueSource, falseValueSource); - } - }); - - addParser( - "gt", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource lhsValSource = fp.parseValueSource(); - ValueSource rhsValSource = fp.parseValueSource(); - - return new SolrComparisonBoolFunction( - lhsValSource, rhsValSource, "gt", (cmp) -> cmp > 0); - } - }); - - addParser( - "lt", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource lhsValSource = fp.parseValueSource(); - ValueSource rhsValSource = fp.parseValueSource(); - - return new SolrComparisonBoolFunction( - lhsValSource, rhsValSource, "lt", (cmp) -> cmp < 0); - } - }); - - addParser( - "gte", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource lhsValSource = fp.parseValueSource(); - ValueSource rhsValSource = fp.parseValueSource(); - - return new SolrComparisonBoolFunction( - lhsValSource, rhsValSource, "gte", (cmp) -> cmp >= 0); - } - }); - - addParser( - "lte", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource lhsValSource = fp.parseValueSource(); - ValueSource rhsValSource = fp.parseValueSource(); - - return new SolrComparisonBoolFunction( - lhsValSource, rhsValSource, "lte", (cmp) -> cmp <= 0); - } - }); - - addParser( - "eq", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - ValueSource lhsValSource = fp.parseValueSource(); - ValueSource rhsValSource = fp.parseValueSource(); - - return new EqualFunction(lhsValSource, rhsValSource, "eq"); - } - }); - - addParser( - "def", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new DefFunction(fp.parseValueSourceList()); - } - }); - - addParser( - "concat", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List sources = fp.parseValueSourceList(); - return new ConcatStringFunction(sources.toArray(new ValueSource[0])); - } - }); - - addParser( - "agg", - new ValueSourceParser() { - @Override - public AggValueSource parse(FunctionQParser fp) throws SyntaxError { - return fp.parseAgg(FunctionQParser.FLAG_DEFAULT); - } - }); - - addParser( - "agg_count", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new CountAgg(); - } - }); - - addParser( - "agg_unique", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new UniqueAgg(fp.parseArg()); - } - }); - - addParser( - "agg_uniqueBlock", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - if (fp.sp.peek() == QueryParsing.LOCALPARAM_START.charAt(0)) { - return new UniqueBlockQueryAgg(fp.parseNestedQuery()); - } - return new UniqueBlockFieldAgg(fp.parseArg()); - } - }); - - addParser( - "agg_hll", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new HLLAgg(fp.parseArg()); - } - }); - - addParser( - "agg_sum", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new SumAgg( - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); - } - }); - - addParser( - "agg_avg", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new AvgAgg( - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); - } - }); - - addParser( - "agg_sumsq", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new SumsqAgg( - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); - } - }); - - addParser( - "agg_variance", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new VarianceAgg( - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); - } - }); - - addParser( - "agg_stddev", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new StddevAgg( - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); - } - }); - - addParser( - "agg_missing", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new MissingAgg( - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); - } - }); - - addParser( - "agg_countvals", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new CountValsAgg( - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); - } - }); - - /* - addParser("agg_multistat", new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return null; - } - }); - */ - - addParser( - "agg_min", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new MinMaxAgg( - "min", - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); - } - }); - - addParser( - "agg_max", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - return new MinMaxAgg( - "max", - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); - } - }); - - addParser( - "agg_percentile", - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - List percentiles = new ArrayList<>(); - ValueSource vs = - fp.parseValueSource( - FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE); - while (fp.hasMoreArguments()) { - double val = fp.parseDouble(); - if (val < 0 || val > 100) { - throw new SyntaxError( - "requested percentile must be between 0 and 100. got " + val); - } - percentiles.add(val); - } - - if (percentiles.isEmpty()) { - throw new SyntaxError( - "expected percentile(valsource,percent1[,percent2]*) EXAMPLE:percentile(myfield,50)"); - } - - return new PercentileAgg(vs, percentiles); - } - }); - - addParser( - "agg_" + RelatednessAgg.NAME, - new ValueSourceParser() { - @Override - public ValueSource parse(FunctionQParser fp) throws SyntaxError { - // TODO: (fore & back)-ground should be optional -- use hasMoreArguments - // if only one arg, assume it's the foreground - // (background is the one that will most commonly just be "*:*") - // see notes in RelatednessAgg constructor about why we don't do this yet - RelatednessAgg agg = new RelatednessAgg(fp.parseNestedQuery(), fp.parseNestedQuery()); - agg.setOpts(fp); - return agg; - } - }); - - addParser("childfield", new ChildFieldValueSourceParser()); - } - - /////////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////// - - private static TInfo parseTerm(FunctionQParser fp) throws SyntaxError { - TInfo tinfo = new TInfo(); - - tinfo.indexedField = tinfo.field = fp.parseArg(); - tinfo.val = fp.parseArg(); - tinfo.indexedBytes = new BytesRefBuilder(); - - FieldType ft = fp.getReq().getSchema().getFieldTypeNoEx(tinfo.field); - if (ft == null) ft = new StrField(); - - if (ft instanceof TextField) { - // need to do analysis on the term - String indexedVal = tinfo.val; - Query q = - ft.getFieldQuery(fp, fp.getReq().getSchema().getFieldOrNull(tinfo.field), tinfo.val); - if (q instanceof TermQuery) { - Term term = ((TermQuery) q).getTerm(); - tinfo.indexedField = term.field(); - indexedVal = term.text(); - } - tinfo.indexedBytes.copyChars(indexedVal); - } else { - ft.readableToIndexed(tinfo.val, tinfo.indexedBytes); - } - - return tinfo; - } - - private static void splitSources( - int dim, List sources, List dest1, List dest2) { - // Get dim value sources for the first vector - for (int i = 0; i < dim; i++) { - dest1.add(sources.get(i)); - } - // Get dim value sources for the second vector - for (int i = dim; i < sources.size(); i++) { - dest2.add(sources.get(i)); - } - } - - private static MVResult getMultiValueSources(List sources) { - MVResult mvr = new MVResult(); - if (sources.size() % 2 != 0) { - throw new SolrException( - SolrException.ErrorCode.BAD_REQUEST, - "Illegal number of sources. There must be an even number of sources"); - } - if (sources.size() == 2) { - - // check to see if these are MultiValueSource - boolean s1MV = sources.get(0) instanceof MultiValueSource; - boolean s2MV = sources.get(1) instanceof MultiValueSource; - if (s1MV && s2MV) { - mvr.mv1 = (MultiValueSource) sources.get(0); - mvr.mv2 = (MultiValueSource) sources.get(1); - } else if (s1MV || s2MV) { - // if one is a MultiValueSource, than the other one needs to be too. - throw new SolrException( - SolrException.ErrorCode.BAD_REQUEST, - "Illegal number of sources. There must be an even number of sources"); - } else { - mvr.mv1 = new VectorValueSource(Collections.singletonList(sources.get(0))); - mvr.mv2 = new VectorValueSource(Collections.singletonList(sources.get(1))); - } - } else { - int dim = sources.size() / 2; - List sources1 = new ArrayList<>(dim); - List sources2 = new ArrayList<>(dim); - // Get dim value sources for the first vector - splitSources(dim, sources, sources1, sources2); - mvr.mv1 = new VectorValueSource(sources1); - mvr.mv2 = new VectorValueSource(sources2); - } - - return mvr; - } - - private static class MVResult { - MultiValueSource mv1; - MultiValueSource mv2; - } - - private static class TInfo { - String field; - String val; - String indexedField; - BytesRefBuilder indexedBytes; - } - + @Deprecated(since = "9.8", forRemoval = true) static class DateValueSourceParser extends ValueSourceParser { public Date getDate(FunctionQParser fp, String arg) { if (arg == null) return null; @@ -1550,6 +156,10 @@ protected float func(int doc, FunctionValues aVals, FunctionValues bVals) } } + /** + * @deprecated Use {@link ValueSourceParsers.LongConstValueSource} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) // Private for now - we need to revisit how to handle typing in function queries static class LongConstValueSource extends ConstNumberSource { final long constant; @@ -1640,6 +250,10 @@ public boolean getBool() { } } + /** + * @deprecated Use {@link ValueSourceParsers.NamedParser} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) abstract static class NamedParser extends ValueSourceParser { private final String name; @@ -1652,6 +266,10 @@ public String name() { } } + /** + * @deprecated Use {@link ValueSourceParsers.DoubleParser} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) abstract static class DoubleParser extends NamedParser { public DoubleParser(String name) { super(name); @@ -1693,6 +311,10 @@ public String toString(int doc) throws IOException { } } + /** + * @deprecated Use {@link ValueSourceParsers.Double2Parser} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) abstract static class Double2Parser extends NamedParser { public Double2Parser(String name) { super(name); @@ -1763,6 +385,10 @@ public boolean equals(Object o) { } } + /** + * @deprecated Use {@link ValueSourceParsers.BoolConstValueSource} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) static class BoolConstValueSource extends ConstNumberSource { public static final BoolConstValueSource TRUE = new BoolConstValueSource(true); public static final BoolConstValueSource FALSE = new BoolConstValueSource(false); @@ -1831,6 +457,10 @@ public boolean getBool() { } } + /** + * @deprecated Use {@link ValueSourceParsers.TestValueSource} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) static class TestValueSource extends ValueSource { ValueSource source; diff --git a/solr/core/src/java/org/apache/solr/search/ValueSourceParsers.java b/solr/core/src/java/org/apache/solr/search/ValueSourceParsers.java new file mode 100644 index 00000000000..d1c17bcc098 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/search/ValueSourceParsers.java @@ -0,0 +1,1875 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.search; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.queries.function.FunctionScoreQuery; +import org.apache.lucene.queries.function.FunctionValues; +import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.queries.function.docvalues.BoolDocValues; +import org.apache.lucene.queries.function.docvalues.DoubleDocValues; +import org.apache.lucene.queries.function.docvalues.LongDocValues; +import org.apache.lucene.queries.function.valuesource.ConstNumberSource; +import org.apache.lucene.queries.function.valuesource.ConstValueSource; +import org.apache.lucene.queries.function.valuesource.DefFunction; +import org.apache.lucene.queries.function.valuesource.DivFloatFunction; +import org.apache.lucene.queries.function.valuesource.DocFreqValueSource; +import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource; +import org.apache.lucene.queries.function.valuesource.DualFloatFunction; +import org.apache.lucene.queries.function.valuesource.IDFValueSource; +import org.apache.lucene.queries.function.valuesource.IfFunction; +import org.apache.lucene.queries.function.valuesource.JoinDocFreqValueSource; +import org.apache.lucene.queries.function.valuesource.LinearFloatFunction; +import org.apache.lucene.queries.function.valuesource.LiteralValueSource; +import org.apache.lucene.queries.function.valuesource.MaxDocValueSource; +import org.apache.lucene.queries.function.valuesource.MaxFloatFunction; +import org.apache.lucene.queries.function.valuesource.MinFloatFunction; +import org.apache.lucene.queries.function.valuesource.MultiBoolFunction; +import org.apache.lucene.queries.function.valuesource.MultiValueSource; +import org.apache.lucene.queries.function.valuesource.NormValueSource; +import org.apache.lucene.queries.function.valuesource.NumDocsValueSource; +import org.apache.lucene.queries.function.valuesource.ProductFloatFunction; +import org.apache.lucene.queries.function.valuesource.QueryValueSource; +import org.apache.lucene.queries.function.valuesource.RangeMapFloatFunction; +import org.apache.lucene.queries.function.valuesource.ReciprocalFloatFunction; +import org.apache.lucene.queries.function.valuesource.ScaleFloatFunction; +import org.apache.lucene.queries.function.valuesource.SimpleBoolFunction; +import org.apache.lucene.queries.function.valuesource.SimpleFloatFunction; +import org.apache.lucene.queries.function.valuesource.SingleFunction; +import org.apache.lucene.queries.function.valuesource.SumFloatFunction; +import org.apache.lucene.queries.function.valuesource.SumTotalTermFreqValueSource; +import org.apache.lucene.queries.function.valuesource.TFValueSource; +import org.apache.lucene.queries.function.valuesource.TermFreqValueSource; +import org.apache.lucene.queries.function.valuesource.TotalTermFreqValueSource; +import org.apache.lucene.queries.function.valuesource.VectorValueSource; +import org.apache.lucene.queries.payloads.PayloadDecoder; +import org.apache.lucene.queries.payloads.PayloadFunction; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.spell.JaroWinklerDistance; +import org.apache.lucene.search.spell.LevenshteinDistance; +import org.apache.lucene.search.spell.NGramDistance; +import org.apache.lucene.search.spell.StringDistance; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.solr.common.SolrException; +import org.apache.solr.request.SolrRequestInfo; +import org.apache.solr.schema.CurrencyFieldType; +import org.apache.solr.schema.FieldType; +import org.apache.solr.schema.IndexSchema; +import org.apache.solr.schema.SchemaField; +import org.apache.solr.schema.StrField; +import org.apache.solr.schema.TextField; +import org.apache.solr.search.facet.AggValueSource; +import org.apache.solr.search.facet.AvgAgg; +import org.apache.solr.search.facet.CountAgg; +import org.apache.solr.search.facet.CountValsAgg; +import org.apache.solr.search.facet.HLLAgg; +import org.apache.solr.search.facet.MinMaxAgg; +import org.apache.solr.search.facet.MissingAgg; +import org.apache.solr.search.facet.PercentileAgg; +import org.apache.solr.search.facet.RelatednessAgg; +import org.apache.solr.search.facet.StddevAgg; +import org.apache.solr.search.facet.SumAgg; +import org.apache.solr.search.facet.SumsqAgg; +import org.apache.solr.search.facet.UniqueAgg; +import org.apache.solr.search.facet.UniqueBlockFieldAgg; +import org.apache.solr.search.facet.UniqueBlockQueryAgg; +import org.apache.solr.search.facet.VarianceAgg; +import org.apache.solr.search.function.CollapseScoreFunction; +import org.apache.solr.search.function.ConcatStringFunction; +import org.apache.solr.search.function.DualDoubleFunction; +import org.apache.solr.search.function.EqualFunction; +import org.apache.solr.search.function.OrdFieldSource; +import org.apache.solr.search.function.ReverseOrdFieldSource; +import org.apache.solr.search.function.SolrComparisonBoolFunction; +import org.apache.solr.search.function.distance.GeoDistValueSourceParser; +import org.apache.solr.search.function.distance.GeohashFunction; +import org.apache.solr.search.function.distance.GeohashHaversineFunction; +import org.apache.solr.search.function.distance.HaversineFunction; +import org.apache.solr.search.function.distance.SquaredEuclideanFunction; +import org.apache.solr.search.function.distance.StringDistanceFunction; +import org.apache.solr.search.function.distance.VectorDistanceFunction; +import org.apache.solr.search.join.ChildFieldValueSourceParser; +import org.apache.solr.util.DateMathParser; +import org.apache.solr.util.PayloadUtils; +import org.locationtech.spatial4j.distance.DistanceUtils; + +public class ValueSourceParsers { + + private ValueSourceParsers() {} + + /** standard functions supported by default, filled in static class initialization */ + private static final Map standardVSParsers = new HashMap<>(); + + /** standard functions supported by default */ + public static final Map standardValueSourceParsers = + Collections.unmodifiableMap(standardVSParsers); + + static { + addParser( + "testfunc", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + final ValueSource source = fp.parseValueSource(); + return new ValueSourceParsers.TestValueSource(source); + } + }); + addParser( + "ord", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + String field = fp.parseId(); + return new OrdFieldSource(field); + } + }); + addParser( + "literal", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new LiteralValueSource(fp.parseArg()); + } + }); + addParser( + "threadid", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new ValueSourceParsers.LongConstValueSource(Thread.currentThread().threadId()); + } + }); + addParser( + "sleep", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + int ms = fp.parseInt(); + ValueSource source = fp.parseValueSource(); + try { + Thread.sleep(ms); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + return source; + } + }); + addParser( + "rord", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + String field = fp.parseId(); + return new ReverseOrdFieldSource(field); + } + }); + addParser( + "top", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + // top(vs) is now a no-op + ValueSource source = fp.parseValueSource(); + return source; + } + }); + addParser( + "linear", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource source = fp.parseValueSource(); + float slope = fp.parseFloat(); + float intercept = fp.parseFloat(); + return new LinearFloatFunction(source, slope, intercept); + } + }); + addParser( + "recip", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource source = fp.parseValueSource(); + float m = fp.parseFloat(); + float a = fp.parseFloat(); + float b = fp.parseFloat(); + return new ReciprocalFloatFunction(source, m, a, b); + } + }); + addParser( + "scale", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource source = fp.parseValueSource(); + float min = fp.parseFloat(); + float max = fp.parseFloat(); + return new ScaleFloatFunction(source, min, max); + } + }); + addParser( + "div", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource a = fp.parseValueSource(); + ValueSource b = fp.parseValueSource(); + return new DivFloatFunction(a, b); + } + }); + addParser( + "mod", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource a = fp.parseValueSource(); + ValueSource b = fp.parseValueSource(); + return new DualDoubleFunction(a, b) { + @Override + protected String name() { + return "mod"; + } + + @Override + protected double func(int doc, FunctionValues aVals, FunctionValues bVals) + throws IOException { + return aVals.doubleVal(doc) % bVals.doubleVal(doc); + } + }; + } + }); + addParser( + "map", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource source = fp.parseValueSource(); + float min = fp.parseFloat(); + float max = fp.parseFloat(); + ValueSource target = fp.parseValueSource(); + ValueSource def = fp.hasMoreArguments() ? fp.parseValueSource() : null; + return new RangeMapFloatFunction(source, min, max, target, def); + } + }); + + addParser( + "abs", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource source = fp.parseValueSource(); + return new SimpleFloatFunction(source) { + @Override + protected String name() { + return "abs"; + } + + @Override + protected float func(int doc, FunctionValues vals) throws IOException { + return Math.abs(vals.floatVal(doc)); + } + }; + } + }); + addParser( + "cscore", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new CollapseScoreFunction(); + } + }); + addParser( + "sum", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List sources = fp.parseValueSourceList(); + return new SumFloatFunction(sources.toArray(new ValueSource[0])); + } + }); + alias("sum", "add"); + addParser("vectorSimilarity", new VectorSimilaritySourceParser()); + addParser( + "product", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List sources = fp.parseValueSourceList(); + return new ProductFloatFunction(sources.toArray(new ValueSource[0])); + } + }); + alias("product", "mul"); + + addParser( + "sub", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource a = fp.parseValueSource(); + ValueSource b = fp.parseValueSource(); + return new DualFloatFunction(a, b) { + @Override + protected String name() { + return "sub"; + } + + @Override + protected float func(int doc, FunctionValues aVals, FunctionValues bVals) + throws IOException { + return aVals.floatVal(doc) - bVals.floatVal(doc); + } + }; + } + }); + addParser( + "vector", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new VectorValueSource(fp.parseValueSourceList()); + } + }); + addParser( + "query", + new ValueSourceParser() { + // boost(query($q),rating) + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + Query q = fp.parseNestedQuery(); + float defVal = 0.0f; + if (fp.hasMoreArguments()) { + defVal = fp.parseFloat(); + } + return new QueryValueSource(q, defVal); + } + }); + addParser( + "boost", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + Query q = fp.parseNestedQuery(); + ValueSource vs = fp.parseValueSource(); + return new QueryValueSource( + FunctionScoreQuery.boostByValue(q, vs.asDoubleValuesSource()), 0.0f); + } + }); + addParser( + "joindf", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + String f0 = fp.parseArg(); + String qf = fp.parseArg(); + return new JoinDocFreqValueSource(f0, qf); + } + }); + + addParser("geodist", new GeoDistValueSourceParser()); + + addParser( + "hsin", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + + double radius = fp.parseDouble(); + // SOLR-2114, make the convert flag required, since the parser doesn't support much in + // the way of lookahead or the ability to convert a String into a ValueSource + boolean convert = Boolean.parseBoolean(fp.parseArg()); + + MultiValueSource pv1; + MultiValueSource pv2; + + ValueSource one = fp.parseValueSource(); + ValueSource two = fp.parseValueSource(); + if (fp.hasMoreArguments()) { + pv1 = new VectorValueSource(Arrays.asList(one, two)); // x1, y1 + pv2 = + new VectorValueSource( + Arrays.asList(fp.parseValueSource(), fp.parseValueSource())); // x2, y2 + } else { + // check to see if we have multiValue source + if (one instanceof MultiValueSource && two instanceof MultiValueSource) { + pv1 = (MultiValueSource) one; + pv2 = (MultiValueSource) two; + } else { + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, + "Input must either be 2 MultiValueSources, or there must be 4 ValueSources"); + } + } + + return new HaversineFunction(pv1, pv2, radius, convert); + } + }); + + addParser( + "ghhsin", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + double radius = fp.parseDouble(); + + ValueSource gh1 = fp.parseValueSource(); + ValueSource gh2 = fp.parseValueSource(); + + return new GeohashHaversineFunction(gh1, gh2, radius); + } + }); + + addParser( + "geohash", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + + ValueSource lat = fp.parseValueSource(); + ValueSource lon = fp.parseValueSource(); + + return new GeohashFunction(lat, lon); + } + }); + addParser( + "strdist", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + + ValueSource str1 = fp.parseValueSource(); + ValueSource str2 = fp.parseValueSource(); + String distClass = fp.parseArg(); + + StringDistance dist = null; + if (distClass.equalsIgnoreCase("jw")) { + dist = new JaroWinklerDistance(); + } else if (distClass.equalsIgnoreCase("edit")) { + dist = new LevenshteinDistance(); + } else if (distClass.equalsIgnoreCase("ngram")) { + int ngram = 2; + if (fp.hasMoreArguments()) { + ngram = fp.parseInt(); + } + dist = new NGramDistance(ngram); + } else { + dist = + fp.req.getCore().getResourceLoader().newInstance(distClass, StringDistance.class); + } + return new StringDistanceFunction(str1, str2, dist); + } + }); + addParser( + "field", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + + String fieldName = fp.parseArg(); + SchemaField f = fp.getReq().getSchema().getField(fieldName); + if (fp.hasMoreArguments()) { + // multivalued selector option + String s = fp.parseArg(); + FieldType.MultiValueSelector selector = FieldType.MultiValueSelector.lookup(s); + if (null == selector) { + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, + "Multi-Valued field selector '" + s + "' not supported"); + } + return f.getType().getSingleValueSource(selector, f, fp); + } + // simple field ValueSource + return f.getType().getValueSource(f, fp); + } + }); + addParser( + "currency", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + + String fieldName = fp.parseArg(); + SchemaField f = fp.getReq().getSchema().getField(fieldName); + if (!(f.getType() instanceof CurrencyFieldType)) { + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, + "Currency function input must be the name of a CurrencyFieldType: " + fieldName); + } + CurrencyFieldType ft = (CurrencyFieldType) f.getType(); + String code = fp.hasMoreArguments() ? fp.parseArg() : null; + return ft.getConvertedValueSource(code, ft.getValueSource(f, fp)); + } + }); + + addParser( + new DoubleParser("rad") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return vals.doubleVal(doc) * DistanceUtils.DEGREES_TO_RADIANS; + } + }); + addParser( + new DoubleParser("deg") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return vals.doubleVal(doc) * DistanceUtils.RADIANS_TO_DEGREES; + } + }); + addParser( + new DoubleParser("sqrt") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.sqrt(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("cbrt") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.cbrt(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("log") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.log10(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("ln") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.log(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("exp") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.exp(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("sin") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.sin(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("cos") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.cos(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("tan") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.tan(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("asin") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.asin(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("acos") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.acos(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("atan") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.atan(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("sinh") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.sinh(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("cosh") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.cosh(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("tanh") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.tanh(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("ceil") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.ceil(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("floor") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.floor(vals.doubleVal(doc)); + } + }); + addParser( + new DoubleParser("rint") { + @Override + public double func(int doc, FunctionValues vals) throws IOException { + return Math.rint(vals.doubleVal(doc)); + } + }); + addParser( + new Double2Parser("pow") { + @Override + public double func(int doc, FunctionValues a, FunctionValues b) throws IOException { + return Math.pow(a.doubleVal(doc), b.doubleVal(doc)); + } + }); + addParser( + new Double2Parser("hypot") { + @Override + public double func(int doc, FunctionValues a, FunctionValues b) throws IOException { + return Math.hypot(a.doubleVal(doc), b.doubleVal(doc)); + } + }); + addParser( + new Double2Parser("atan2") { + @Override + public double func(int doc, FunctionValues a, FunctionValues b) throws IOException { + return Math.atan2(a.doubleVal(doc), b.doubleVal(doc)); + } + }); + addParser( + "max", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List sources = fp.parseValueSourceList(); + return new MaxFloatFunction(sources.toArray(new ValueSource[0])); + } + }); + addParser( + "min", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List sources = fp.parseValueSourceList(); + return new MinFloatFunction(sources.toArray(new ValueSource[0])); + } + }); + + addParser( + "sqedist", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List sources = fp.parseValueSourceList(); + MVResult mvr = getMultiValueSources(sources); + + return new SquaredEuclideanFunction(mvr.mv1, mvr.mv2); + } + }); + + addParser( + "dist", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + float power = fp.parseFloat(); + List sources = fp.parseValueSourceList(); + MVResult mvr = getMultiValueSources(sources); + return new VectorDistanceFunction(power, mvr.mv1, mvr.mv2); + } + }); + addParser("ms", new DateValueSourceParser()); + + addParser( + "pi", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) { + return new DoubleConstValueSource(Math.PI); + } + }); + addParser( + "e", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) { + return new DoubleConstValueSource(Math.E); + } + }); + + addParser( + "docfreq", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + TInfo tinfo = parseTerm(fp); + return new DocFreqValueSource( + tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); + } + }); + + addParser( + "totaltermfreq", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + TInfo tinfo = parseTerm(fp); + return new TotalTermFreqValueSource( + tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); + } + }); + alias("totaltermfreq", "ttf"); + + addParser( + "sumtotaltermfreq", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + String field = fp.parseArg(); + return new SumTotalTermFreqValueSource(field); + } + }); + alias("sumtotaltermfreq", "sttf"); + + addParser( + "idf", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + TInfo tinfo = parseTerm(fp); + return new IDFValueSource( + tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); + } + }); + + addParser( + "termfreq", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + TInfo tinfo = parseTerm(fp); + return new TermFreqValueSource( + tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); + } + }); + + addParser( + "tf", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + TInfo tinfo = parseTerm(fp); + return new TFValueSource( + tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes.get()); + } + }); + + addParser( + "norm", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + String field = fp.parseArg(); + return new NormValueSource(field); + } + }); + + addParser( + "maxdoc", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) { + return new MaxDocValueSource(); + } + }); + + addParser( + "numdocs", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) { + return new NumDocsValueSource(); + } + }); + + addParser( + "payload", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + // payload(field,value[,default, ['min|max|average|first']]) + // defaults to "average" and 0.0 default value + + // would have made this parser a new separate class and registered it, but + // this handy method is private :/ + TInfo tinfo = parseTerm(fp); + + ValueSource defaultValueSource; + if (fp.hasMoreArguments()) { + defaultValueSource = fp.parseValueSource(); + } else { + defaultValueSource = new ConstValueSource(0.0f); + } + + PayloadFunction payloadFunction = null; + String func = "average"; + if (fp.hasMoreArguments()) { + func = fp.parseArg(); + } + payloadFunction = PayloadUtils.getPayloadFunction(func); + + // Support func="first" by payloadFunction=null + if (payloadFunction == null && !"first".equals(func)) { + // not "first" (or average, min, or max) + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, "Invalid payload function: " + func); + } + + IndexSchema schema = fp.getReq().getCore().getLatestSchema(); + PayloadDecoder decoder = schema.getPayloadDecoder(tinfo.field); + + if (decoder == null) { + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, + "No payload decoder found for field: " + tinfo.field); + } + + return new FloatPayloadValueSource( + tinfo.field, + tinfo.val, + tinfo.indexedField, + tinfo.indexedBytes.get(), + decoder, + payloadFunction, + defaultValueSource); + } + }); + + addParser( + "true", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) { + return ValueSourceParsers.BoolConstValueSource.TRUE; + } + }); + + addParser( + "false", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) { + return ValueSourceParsers.BoolConstValueSource.FALSE; + } + }); + + addParser( + "exists", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource vs = fp.parseValueSource(); + return new SimpleBoolFunction(vs) { + @Override + protected String name() { + return "exists"; + } + + @Override + protected boolean func(int doc, FunctionValues vals) throws IOException { + return vals.exists(doc); + } + }; + } + }); + + addParser( + "isnan", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource vs = fp.parseValueSource(); + return new SimpleBoolFunction(vs) { + @Override + protected String name() { + return "isnan"; + } + + @Override + protected boolean func(int doc, FunctionValues vals) throws IOException { + return Float.isNaN(vals.floatVal(doc)); + } + }; + } + }); + + addParser( + "not", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource vs = fp.parseValueSource(); + return new SimpleBoolFunction(vs) { + @Override + protected boolean func(int doc, FunctionValues vals) throws IOException { + return !vals.boolVal(doc); + } + + @Override + protected String name() { + return "not"; + } + }; + } + }); + + addParser( + "and", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List sources = fp.parseValueSourceList(); + return new MultiBoolFunction(sources) { + @Override + protected String name() { + return "and"; + } + + @Override + protected boolean func(int doc, FunctionValues[] vals) throws IOException { + for (FunctionValues dv : vals) if (!dv.boolVal(doc)) return false; + return true; + } + }; + } + }); + + addParser( + "or", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List sources = fp.parseValueSourceList(); + return new MultiBoolFunction(sources) { + @Override + protected String name() { + return "or"; + } + + @Override + protected boolean func(int doc, FunctionValues[] vals) throws IOException { + for (FunctionValues dv : vals) if (dv.boolVal(doc)) return true; + return false; + } + }; + } + }); + + addParser( + "xor", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List sources = fp.parseValueSourceList(); + return new MultiBoolFunction(sources) { + @Override + protected String name() { + return "xor"; + } + + @Override + protected boolean func(int doc, FunctionValues[] vals) throws IOException { + int nTrue = 0, nFalse = 0; + for (FunctionValues dv : vals) { + if (dv.boolVal(doc)) nTrue++; + else nFalse++; + } + return nTrue != 0 && nFalse != 0; + } + }; + } + }); + + addParser( + "if", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource ifValueSource = fp.parseValueSource(); + ValueSource trueValueSource = fp.parseValueSource(); + ValueSource falseValueSource = fp.parseValueSource(); + + return new IfFunction(ifValueSource, trueValueSource, falseValueSource); + } + }); + + addParser( + "gt", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource lhsValSource = fp.parseValueSource(); + ValueSource rhsValSource = fp.parseValueSource(); + + return new SolrComparisonBoolFunction( + lhsValSource, rhsValSource, "gt", (cmp) -> cmp > 0); + } + }); + + addParser( + "lt", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource lhsValSource = fp.parseValueSource(); + ValueSource rhsValSource = fp.parseValueSource(); + + return new SolrComparisonBoolFunction( + lhsValSource, rhsValSource, "lt", (cmp) -> cmp < 0); + } + }); + + addParser( + "gte", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource lhsValSource = fp.parseValueSource(); + ValueSource rhsValSource = fp.parseValueSource(); + + return new SolrComparisonBoolFunction( + lhsValSource, rhsValSource, "gte", (cmp) -> cmp >= 0); + } + }); + + addParser( + "lte", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource lhsValSource = fp.parseValueSource(); + ValueSource rhsValSource = fp.parseValueSource(); + + return new SolrComparisonBoolFunction( + lhsValSource, rhsValSource, "lte", (cmp) -> cmp <= 0); + } + }); + + addParser( + "eq", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + ValueSource lhsValSource = fp.parseValueSource(); + ValueSource rhsValSource = fp.parseValueSource(); + + return new EqualFunction(lhsValSource, rhsValSource, "eq"); + } + }); + + addParser( + "def", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new DefFunction(fp.parseValueSourceList()); + } + }); + + addParser( + "concat", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List sources = fp.parseValueSourceList(); + return new ConcatStringFunction(sources.toArray(new ValueSource[0])); + } + }); + + addParser( + "agg", + new ValueSourceParser() { + @Override + public AggValueSource parse(FunctionQParser fp) throws SyntaxError { + return fp.parseAgg(FunctionQParser.FLAG_DEFAULT); + } + }); + + addParser( + "agg_count", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new CountAgg(); + } + }); + + addParser( + "agg_unique", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new UniqueAgg(fp.parseArg()); + } + }); + + addParser( + "agg_uniqueBlock", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + if (fp.sp.peek() == QueryParsing.LOCALPARAM_START.charAt(0)) { + return new UniqueBlockQueryAgg(fp.parseNestedQuery()); + } + return new UniqueBlockFieldAgg(fp.parseArg()); + } + }); + + addParser( + "agg_hll", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new HLLAgg(fp.parseArg()); + } + }); + + addParser( + "agg_sum", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new SumAgg( + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); + } + }); + + addParser( + "agg_avg", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new AvgAgg( + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); + } + }); + + addParser( + "agg_sumsq", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new SumsqAgg( + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); + } + }); + + addParser( + "agg_variance", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new VarianceAgg( + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); + } + }); + + addParser( + "agg_stddev", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new StddevAgg( + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); + } + }); + + addParser( + "agg_missing", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new MissingAgg( + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); + } + }); + + addParser( + "agg_countvals", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new CountValsAgg( + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); + } + }); + + /* + addParser("agg_multistat", new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return null; + } + }); + */ + + addParser( + "agg_min", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new MinMaxAgg( + "min", + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); + } + }); + + addParser( + "agg_max", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new MinMaxAgg( + "max", + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE)); + } + }); + + addParser( + "agg_percentile", + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + List percentiles = new ArrayList<>(); + ValueSource vs = + fp.parseValueSource( + FunctionQParser.FLAG_DEFAULT | FunctionQParser.FLAG_USE_FIELDNAME_SOURCE); + while (fp.hasMoreArguments()) { + double val = fp.parseDouble(); + if (val < 0 || val > 100) { + throw new SyntaxError( + "requested percentile must be between 0 and 100. got " + val); + } + percentiles.add(val); + } + + if (percentiles.isEmpty()) { + throw new SyntaxError( + "expected percentile(valsource,percent1[,percent2]*) EXAMPLE:percentile(myfield,50)"); + } + + return new PercentileAgg(vs, percentiles); + } + }); + + addParser( + "agg_" + RelatednessAgg.NAME, + new ValueSourceParser() { + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + // TODO: (fore & back)-ground should be optional -- use hasMoreArguments + // if only one arg, assume it's the foreground + // (background is the one that will most commonly just be "*:*") + // see notes in RelatednessAgg constructor about why we don't do this yet + RelatednessAgg agg = new RelatednessAgg(fp.parseNestedQuery(), fp.parseNestedQuery()); + agg.setOpts(fp); + return agg; + } + }); + + addParser("childfield", new ChildFieldValueSourceParser()); + } + + /** + * Adds a new parser for the name and returns any existing one that was overridden. This is not + * thread safe. + */ + private static ValueSourceParser addParser(String name, ValueSourceParser p) { + return standardVSParsers.put(name, p); + } + + /** + * Adds a new parser for the name and returns any existing one that was overridden. This is not + * thread safe. + */ + private static ValueSourceParser addParser(NamedParser p) { + return standardVSParsers.put(p.name(), p); + } + + private static void alias(String source, String dest) { + standardVSParsers.put(dest, standardVSParsers.get(source)); + } + + private static TInfo parseTerm(FunctionQParser fp) throws SyntaxError { + TInfo tinfo = new TInfo(); + + tinfo.indexedField = tinfo.field = fp.parseArg(); + tinfo.val = fp.parseArg(); + tinfo.indexedBytes = new BytesRefBuilder(); + + FieldType ft = fp.getReq().getSchema().getFieldTypeNoEx(tinfo.field); + if (ft == null) ft = new StrField(); + + if (ft instanceof TextField) { + // need to do analysis on the term + String indexedVal = tinfo.val; + Query q = + ft.getFieldQuery(fp, fp.getReq().getSchema().getFieldOrNull(tinfo.field), tinfo.val); + if (q instanceof TermQuery) { + Term term = ((TermQuery) q).getTerm(); + tinfo.indexedField = term.field(); + indexedVal = term.text(); + } + tinfo.indexedBytes.copyChars(indexedVal); + } else { + ft.readableToIndexed(tinfo.val, tinfo.indexedBytes); + } + + return tinfo; + } + + private static void splitSources( + int dim, List sources, List dest1, List dest2) { + // Get dim value sources for the first vector + for (int i = 0; i < dim; i++) { + dest1.add(sources.get(i)); + } + // Get dim value sources for the second vector + for (int i = dim; i < sources.size(); i++) { + dest2.add(sources.get(i)); + } + } + + private static MVResult getMultiValueSources(List sources) { + MVResult mvr = new MVResult(); + if (sources.size() % 2 != 0) { + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, + "Illegal number of sources. There must be an even number of sources"); + } + if (sources.size() == 2) { + + // check to see if these are MultiValueSource + boolean s1MV = sources.get(0) instanceof MultiValueSource; + boolean s2MV = sources.get(1) instanceof MultiValueSource; + if (s1MV && s2MV) { + mvr.mv1 = (MultiValueSource) sources.get(0); + mvr.mv2 = (MultiValueSource) sources.get(1); + } else if (s1MV || s2MV) { + // if one is a MultiValueSource, than the other one needs to be too. + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, + "Illegal number of sources. There must be an even number of sources"); + } else { + mvr.mv1 = new VectorValueSource(Collections.singletonList(sources.get(0))); + mvr.mv2 = new VectorValueSource(Collections.singletonList(sources.get(1))); + } + } else { + int dim = sources.size() / 2; + List sources1 = new ArrayList<>(dim); + List sources2 = new ArrayList<>(dim); + // Get dim value sources for the first vector + splitSources(dim, sources, sources1, sources2); + mvr.mv1 = new VectorValueSource(sources1); + mvr.mv2 = new VectorValueSource(sources2); + } + + return mvr; + } + + private static class MVResult { + MultiValueSource mv1; + MultiValueSource mv2; + } + + private static class TInfo { + String field; + String val; + String indexedField; + BytesRefBuilder indexedBytes; + } + + static class DateValueSourceParser extends ValueSourceParser { + public Date getDate(FunctionQParser fp, String arg) { + if (arg == null) return null; + // check character index 1 to be a digit. Index 0 might be a +/-. + if (arg.startsWith("NOW") || (arg.length() > 1 && Character.isDigit(arg.charAt(1)))) { + Date now = null; // TODO pull from params? + return DateMathParser.parseMath(now, arg); + } + return null; + } + + public ValueSource getValueSource(FunctionQParser fp, String arg) { + if (arg == null) return null; + SchemaField f = fp.req.getSchema().getField(arg); + return f.getType().getValueSource(f, fp); + } + + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + String first = fp.parseArg(); + String second = fp.parseArg(); + if (first == null) first = "NOW"; + + Date d1 = getDate(fp, first); + ValueSource v1 = d1 == null ? getValueSource(fp, first) : null; + + Date d2 = getDate(fp, second); + ValueSource v2 = d2 == null ? getValueSource(fp, second) : null; + + // d constant + // v field + // dd constant + // dv subtract field from constant + // vd subtract constant from field + // vv subtract fields + + final long ms1 = (d1 == null) ? 0 : d1.getTime(); + final long ms2 = (d2 == null) ? 0 : d2.getTime(); + + // "d,dd" handle both constant cases + + if (d1 != null && v2 == null) { + return new ValueSourceParsers.LongConstValueSource(ms1 - ms2); + } + + // "v" just the date field + if (v1 != null && v2 == null && d2 == null) { + return v1; + } + + // "dv" + if (d1 != null && v2 != null) + return new DualFloatFunction(new ValueSourceParsers.LongConstValueSource(ms1), v2) { + @Override + protected String name() { + return "ms"; + } + + @Override + protected float func(int doc, FunctionValues aVals, FunctionValues bVals) + throws IOException { + return ms1 - bVals.longVal(doc); + } + }; + + // "vd" + if (v1 != null && d2 != null) + return new DualFloatFunction(v1, new ValueSourceParsers.LongConstValueSource(ms2)) { + @Override + protected String name() { + return "ms"; + } + + @Override + protected float func(int doc, FunctionValues aVals, FunctionValues bVals) + throws IOException { + return aVals.longVal(doc) - ms2; + } + }; + + // "vv" + if (v1 != null && v2 != null) + return new DualFloatFunction(v1, v2) { + @Override + protected String name() { + return "ms"; + } + + @Override + protected float func(int doc, FunctionValues aVals, FunctionValues bVals) + throws IOException { + return aVals.longVal(doc) - bVals.longVal(doc); + } + }; + + return null; // shouldn't happen + } + } + + // Private for now - we need to revisit how to handle typing in function queries + static class LongConstValueSource extends ConstNumberSource { + final long constant; + final double dv; + final float fv; + + public LongConstValueSource(long constant) { + this.constant = constant; + this.dv = constant; + this.fv = constant; + } + + @Override + public String description() { + return "const(" + constant + ")"; + } + + @Override + public FunctionValues getValues(Map context, LeafReaderContext readerContext) + throws IOException { + return new LongDocValues(this) { + @Override + public float floatVal(int doc) { + return fv; + } + + @Override + public int intVal(int doc) { + return (int) constant; + } + + @Override + public long longVal(int doc) { + return constant; + } + + @Override + public double doubleVal(int doc) { + return dv; + } + + @Override + public String toString(int doc) { + return description(); + } + }; + } + + @Override + public int hashCode() { + return (int) constant + (int) (constant >>> 32); + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof LongConstValueSource other)) return false; + return this.constant == other.constant; + } + + @Override + public int getInt() { + return (int) constant; + } + + @Override + public long getLong() { + return constant; + } + + @Override + public float getFloat() { + return fv; + } + + @Override + public double getDouble() { + return dv; + } + + @Override + public Number getNumber() { + return constant; + } + + @Override + public boolean getBool() { + return constant != 0; + } + } + + abstract static class NamedParser extends ValueSourceParser { + private final String name; + + public NamedParser(String name) { + this.name = name; + } + + public String name() { + return name; + } + } + + abstract static class DoubleParser extends NamedParser { + public DoubleParser(String name) { + super(name); + } + + public abstract double func(int doc, FunctionValues vals) throws IOException; + + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new Function(fp.parseValueSource()); + } + + class Function extends SingleFunction { + public Function(ValueSource source) { + super(source); + } + + @Override + public String name() { + return ValueSourceParsers.DoubleParser.this.name(); + } + + @Override + public FunctionValues getValues(Map context, LeafReaderContext readerContext) + throws IOException { + final FunctionValues vals = source.getValues(context, readerContext); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) throws IOException { + return func(doc, vals); + } + + @Override + public String toString(int doc) throws IOException { + return name() + '(' + vals.toString(doc) + ')'; + } + }; + } + } + } + + abstract static class Double2Parser extends NamedParser { + public Double2Parser(String name) { + super(name); + } + + public abstract double func(int doc, FunctionValues a, FunctionValues b) throws IOException; + + @Override + public ValueSource parse(FunctionQParser fp) throws SyntaxError { + return new Function(fp.parseValueSource(), fp.parseValueSource()); + } + + class Function extends ValueSource { + private final ValueSource a; + private final ValueSource b; + + /** + * @param a the base. + * @param b the exponent. + */ + public Function(ValueSource a, ValueSource b) { + this.a = a; + this.b = b; + } + + @Override + public String description() { + return name() + "(" + a.description() + "," + b.description() + ")"; + } + + @Override + public FunctionValues getValues(Map context, LeafReaderContext readerContext) + throws IOException { + final FunctionValues aVals = a.getValues(context, readerContext); + final FunctionValues bVals = b.getValues(context, readerContext); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) throws IOException { + return func(doc, aVals, bVals); + } + + @Override + public String toString(int doc) throws IOException { + return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')'; + } + }; + } + + @Override + public void createWeight(Map context, IndexSearcher searcher) + throws IOException {} + + @Override + public int hashCode() { + int h = a.hashCode(); + h ^= (h << 13) | (h >>> 20); + h += b.hashCode(); + h ^= (h << 23) | (h >>> 10); + h += name().hashCode(); + return h; + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof Function other)) return false; + return this.a.equals(other.a) && this.b.equals(other.b); + } + } + } + + static class BoolConstValueSource extends ConstNumberSource { + public static final BoolConstValueSource TRUE = new BoolConstValueSource(true); + public static final BoolConstValueSource FALSE = new BoolConstValueSource(false); + + final boolean constant; + + private BoolConstValueSource(boolean constant) { + this.constant = constant; + } + + @Override + public String description() { + return "const(" + constant + ")"; + } + + @Override + public FunctionValues getValues(Map context, LeafReaderContext readerContext) + throws IOException { + return new BoolDocValues(this) { + @Override + public boolean boolVal(int doc) { + return constant; + } + }; + } + + @Override + public int hashCode() { + return constant ? 0x12345678 : 0x87654321; + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof BoolConstValueSource other)) return false; + return this.constant == other.constant; + } + + @Override + public int getInt() { + return constant ? 1 : 0; + } + + @Override + public long getLong() { + return constant ? 1 : 0; + } + + @Override + public float getFloat() { + return constant ? 1 : 0; + } + + @Override + public double getDouble() { + return constant ? 1 : 0; + } + + @Override + public Number getNumber() { + return constant ? 1 : 0; + } + + @Override + public boolean getBool() { + return constant; + } + } + + static class TestValueSource extends ValueSource { + ValueSource source; + + public TestValueSource(ValueSource source) { + this.source = source; + } + + @Override + public FunctionValues getValues(Map context, LeafReaderContext readerContext) + throws IOException { + if (context.get(this) == null) { + SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo(); + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, + "testfunc: unweighted value source detected. delegate=" + + source + + " request=" + + (requestInfo == null ? "null" : requestInfo.getReq())); + } + return source.getValues(context, readerContext); + } + + @Override + public boolean equals(Object o) { + return o instanceof TestValueSource && source.equals(((TestValueSource) o).source); + } + + @Override + public int hashCode() { + return source.hashCode() + TestValueSource.class.hashCode(); + } + + @Override + public String description() { + return "testfunc(" + source.description() + ')'; + } + + @Override + public void createWeight(Map context, IndexSearcher searcher) + throws IOException { + context.put(this, this); + } + + @Override + public SortField getSortField(boolean reverse) { + return super.getSortField(reverse); + } + } +} From dff22d4315bd951f5cca0cd61ce1316cbdd30e6b Mon Sep 17 00:00:00 2001 From: Christos Malliaridis Date: Wed, 20 Nov 2024 22:52:49 +0100 Subject: [PATCH 7/7] Fix ClassInitializationDeadlock warning for TransformerFactory --- .../java/org/apache/solr/core/SolrCore.java | 3 +- .../transform/TransformerFactories.java | 41 +++++++++++++++++++ .../transform/TransformerFactory.java | 21 +++------- .../solr/cloud/TestRandomFlRTGCloud.java | 8 ++-- 4 files changed, 53 insertions(+), 20 deletions(-) create mode 100644 solr/core/src/java/org/apache/solr/response/transform/TransformerFactories.java diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index 3f2b70f1f05..2c41bd90cd4 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -135,6 +135,7 @@ import org.apache.solr.response.SmileResponseWriter; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.XMLResponseWriter; +import org.apache.solr.response.transform.TransformerFactories; import org.apache.solr.response.transform.TransformerFactory; import org.apache.solr.rest.ManagedResourceStorage; import org.apache.solr.rest.ManagedResourceStorage.StorageIO; @@ -1132,7 +1133,7 @@ protected SolrCore( initWriters(); qParserPlugins.init(QParserPlugins.standardPlugins, this); valueSourceParsers.init(ValueSourceParsers.standardValueSourceParsers, this); - transformerFactories.init(TransformerFactory.defaultFactories, this); + transformerFactories.init(TransformerFactories.defaultFactories, this); loadSearchComponents(); updateProcessors.init(Collections.emptyMap(), this); diff --git a/solr/core/src/java/org/apache/solr/response/transform/TransformerFactories.java b/solr/core/src/java/org/apache/solr/response/transform/TransformerFactories.java new file mode 100644 index 00000000000..7aaaf634d39 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/response/transform/TransformerFactories.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.response.transform; + +import java.util.HashMap; +import java.util.Map; + +public class TransformerFactories { + + private TransformerFactories() {} + + public static final Map defaultFactories = new HashMap<>(9, 1.0f); + + static { + defaultFactories.put("explain", new ExplainAugmenterFactory()); + defaultFactories.put("value", new ValueAugmenterFactory()); + defaultFactories.put("docid", new DocIdAugmenterFactory()); + defaultFactories.put("shard", new ShardAugmenterFactory()); + defaultFactories.put("child", new ChildDocTransformerFactory()); + defaultFactories.put("subquery", new SubQueryAugmenterFactory()); + defaultFactories.put("json", new RawValueTransformerFactory("json")); + defaultFactories.put("xml", new RawValueTransformerFactory("xml")); + defaultFactories.put("geo", new GeoTransformerFactory()); + defaultFactories.put("core", new CoreAugmenterFactory()); + } +} diff --git a/solr/core/src/java/org/apache/solr/response/transform/TransformerFactory.java b/solr/core/src/java/org/apache/solr/response/transform/TransformerFactory.java index 249a3a9e1d7..8103b21bd59 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/TransformerFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/TransformerFactory.java @@ -16,7 +16,6 @@ */ package org.apache.solr.response.transform; -import java.util.HashMap; import java.util.Map; import java.util.Set; import org.apache.solr.common.params.SolrParams; @@ -105,18 +104,10 @@ default boolean mayModifyValue() { } } - public static final Map defaultFactories = new HashMap<>(9, 1.0f); - - static { - defaultFactories.put("explain", new ExplainAugmenterFactory()); - defaultFactories.put("value", new ValueAugmenterFactory()); - defaultFactories.put("docid", new DocIdAugmenterFactory()); - defaultFactories.put("shard", new ShardAugmenterFactory()); - defaultFactories.put("child", new ChildDocTransformerFactory()); - defaultFactories.put("subquery", new SubQueryAugmenterFactory()); - defaultFactories.put("json", new RawValueTransformerFactory("json")); - defaultFactories.put("xml", new RawValueTransformerFactory("xml")); - defaultFactories.put("geo", new GeoTransformerFactory()); - defaultFactories.put("core", new CoreAugmenterFactory()); - } + /** + * @deprecated Use {@link TransformerFactories#defaultFactories} instead. + */ + @Deprecated(since = "9.8", forRemoval = true) + public static final Map defaultFactories = + TransformerFactories.defaultFactories; } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java index 26c0797407e..6f5b2f1dda0 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java @@ -60,7 +60,7 @@ import org.apache.solr.embedded.JettySolrRunner; import org.apache.solr.response.transform.DocTransformer; import org.apache.solr.response.transform.RawValueTransformerFactory; -import org.apache.solr.response.transform.TransformerFactory; +import org.apache.solr.response.transform.TransformerFactories; import org.apache.solr.util.RandomizeSSL; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -198,11 +198,11 @@ public static void afterClass() throws Exception { * * @see FlValidator#getDefaultTransformerFactoryName * @see #FL_VALIDATORS - * @see TransformerFactory#defaultFactories + * @see TransformerFactories#defaultFactories */ public void testCoverage() { final Set implicit = new LinkedHashSet<>(); - for (String t : TransformerFactory.defaultFactories.keySet()) { + for (String t : TransformerFactories.defaultFactories.keySet()) { implicit.add(t); } @@ -704,7 +704,7 @@ public default boolean requiresRealtimeSearcherReOpen() { } /** - * the name of a transformer listed in {@link TransformerFactory#defaultFactories} that this + * the name of a transformer listed in {@link TransformerFactories#defaultFactories} that this * validator corresponds to, or null if not applicable. Used for testing coverage of Solr's * implicitly supported transformers. *