From b357936227df04f88c980b6cbf7cd7cde982a25b Mon Sep 17 00:00:00 2001 From: Dmitriy Burlutskiy Date: Mon, 16 Dec 2024 15:36:50 +0100 Subject: [PATCH 1/2] Revert "Support mTLS in Elastic Inference Service plugin (#116423)" (#118765) This reverts commit 74a4484101dd65a0194f4adc3bd23fe39c2f2bd7. --- docs/changelog/116423.yaml | 5 - .../xpack/core/ssl/SSLService.java | 2 - .../core/LocalStateCompositeXPackPlugin.java | 2 +- .../xpack/core/ssl/SSLServiceTests.java | 3 +- .../ShardBulkInferenceActionFilterIT.java | 3 +- .../integration/ModelRegistryIT.java | 4 +- .../inference/src/main/java/module-info.java | 1 - .../xpack/inference/InferencePlugin.java | 101 +++++------------- .../external/http/HttpClientManager.java | 44 -------- .../TextSimilarityRankRetrieverBuilder.java | 11 +- .../ElasticInferenceServiceSettings.java | 24 +---- .../SemanticTextClusterMetadataTests.java | 3 +- .../xpack/inference/InferencePluginTests.java | 65 ----------- .../inference/LocalStateInferencePlugin.java | 71 ------------ .../elasticsearch/xpack/inference/Utils.java | 15 +++ ...emanticTextNonDynamicFieldMapperTests.java | 3 +- .../TextSimilarityRankMultiNodeTests.java | 4 +- ...SimilarityRankRetrieverTelemetryTests.java | 5 +- .../TextSimilarityRankTests.java | 4 +- .../xpack/ml/LocalStateMachineLearning.java | 7 -- .../xpack/ml/support/BaseMlIntegTestCase.java | 4 +- .../security/CrossClusterShardTests.java | 2 + 22 files changed, 69 insertions(+), 314 deletions(-) delete mode 100644 docs/changelog/116423.yaml delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java diff --git a/docs/changelog/116423.yaml b/docs/changelog/116423.yaml deleted file mode 100644 index d6d10eab410e4..0000000000000 --- a/docs/changelog/116423.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116423 -summary: Support mTLS for the Elastic Inference Service integration inside the inference API -area: Machine Learning -type: feature -issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java index d0d5e463f9652..9704335776f11 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java @@ -596,8 +596,6 @@ static Map getSSLSettingsMap(Settings settings) { sslSettingsMap.put(WatcherField.EMAIL_NOTIFICATION_SSL_PREFIX, settings.getByPrefix(WatcherField.EMAIL_NOTIFICATION_SSL_PREFIX)); sslSettingsMap.put(XPackSettings.TRANSPORT_SSL_PREFIX, settings.getByPrefix(XPackSettings.TRANSPORT_SSL_PREFIX)); sslSettingsMap.putAll(getTransportProfileSSLSettings(settings)); - // Mount Elastic Inference Service (part of the Inference plugin) configuration - sslSettingsMap.put("xpack.inference.elastic.http.ssl", settings.getByPrefix("xpack.inference.elastic.http.ssl.")); // Only build remote cluster server SSL if the port is enabled if (REMOTE_CLUSTER_SERVER_ENABLED.get(settings)) { sslSettingsMap.put(XPackSettings.REMOTE_CLUSTER_SERVER_SSL_PREFIX, getRemoteClusterServerSslSettings(settings)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index d50f7bb27a5df..1f2c89c473a62 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -623,7 +623,7 @@ public Map getSnapshotCommitSup } @SuppressWarnings("unchecked") - protected List filterPlugins(Class type) { + private List filterPlugins(Class type) { return plugins.stream().filter(x -> type.isAssignableFrom(x.getClass())).map(p -> ((T) p)).collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index bfac286bc3c35..9663e41a647a8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -614,8 +614,7 @@ public void testGetConfigurationByContextName() throws Exception { "xpack.security.authc.realms.ldap.realm1.ssl", "xpack.security.authc.realms.saml.realm2.ssl", "xpack.monitoring.exporters.mon1.ssl", - "xpack.monitoring.exporters.mon2.ssl", - "xpack.inference.elastic.http.ssl" }; + "xpack.monitoring.exporters.mon2.ssl" }; assumeTrue("Not enough cipher suites are available to support this test", getCipherSuites.length >= contextNames.length); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index c7b3a9d42f579..3b0fc869c8124 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -22,7 +22,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; @@ -59,7 +58,7 @@ public void setup() throws Exception { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateInferencePlugin.class); + return Arrays.asList(Utils.TestInferencePlugin.class); } public void testBulkOperations() throws Exception { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index d5c156d1d4f46..be6b3725b0f35 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalModel; @@ -76,7 +76,7 @@ public void createComponents() { @Override protected Collection> getPlugins() { - return pluginList(ReindexPlugin.class, LocalStateInferencePlugin.class); + return pluginList(ReindexPlugin.class, InferencePlugin.class); } public void testStoreModel() throws Exception { diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 1c2240e8c5217..53974657e4e23 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -34,7 +34,6 @@ requires software.amazon.awssdk.retries.api; requires org.reactivestreams; requires org.elasticsearch.logging; - requires org.elasticsearch.sslconfig; exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 93743a5485c2c..ea92b7d98fe30 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -28,7 +28,6 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.node.PluginComponentBinding; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; @@ -46,7 +45,6 @@ import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceEndpointAction; import org.elasticsearch.xpack.core.inference.action.GetInferenceDiagnosticsAction; @@ -56,7 +54,6 @@ import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; -import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceEndpointAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceDiagnosticsAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; @@ -121,6 +118,7 @@ import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; +import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.singletonList; @@ -154,7 +152,6 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); - private final SetOnce elasicInferenceServiceFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce elasticInferenceServiceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); @@ -237,31 +234,31 @@ public Collection createComponents(PluginServices services) { var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - if (isElasticInferenceServiceEnabled()) { - // Create a separate instance of HTTPClientManager with its own SSL configuration (`xpack.inference.elastic.http.ssl.*`). - var elasticInferenceServiceHttpClientManager = HttpClientManager.create( - settings, - services.threadPool(), - services.clusterService(), - throttlerManager, - getSslService() - ); + // Set elasticInferenceUrl based on feature flags to support transitioning to the new Elastic Inference Service URL without exposing + // internal names like "eis" or "gateway". + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + + String elasticInferenceUrl = null; - var elasticInferenceServiceRequestSenderFactory = new HttpRequestSender.Factory( - serviceComponents.get(), - elasticInferenceServiceHttpClientManager, - services.clusterService() + if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); + } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + log.warn( + "Deprecated flag {} detected for enabling {}. Please use {}.", + ELASTIC_INFERENCE_SERVICE_IDENTIFIER, + DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, + ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG ); - elasicInferenceServiceFactory.set(elasticInferenceServiceRequestSenderFactory); + elasticInferenceUrl = inferenceServiceSettings.getEisGatewayUrl(); + } - ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); - String elasticInferenceUrl = this.getElasticInferenceServiceUrl(inferenceServiceSettings); + if (elasticInferenceUrl != null) { elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); inferenceServices.add( () -> List.of( context -> new ElasticInferenceService( - elasicInferenceServiceFactory.get(), + httpFactory.get(), serviceComponents.get(), elasticInferenceServiceComponents.get() ) @@ -384,21 +381,16 @@ public static ExecutorBuilder inferenceUtilityExecutor(Settings settings) { @Override public List> getSettings() { - ArrayList> settings = new ArrayList<>(); - settings.addAll(HttpSettings.getSettingsDefinitions()); - settings.addAll(HttpClientManager.getSettingsDefinitions()); - settings.addAll(ThrottlerManager.getSettingsDefinitions()); - settings.addAll(RetrySettings.getSettingsDefinitions()); - settings.addAll(Truncator.getSettingsDefinitions()); - settings.addAll(RequestExecutorServiceSettings.getSettingsDefinitions()); - settings.add(SKIP_VALIDATE_AND_START); - - // Register Elastic Inference Service settings definitions if the corresponding feature flag is enabled. - if (isElasticInferenceServiceEnabled()) { - settings.addAll(ElasticInferenceServiceSettings.getSettingsDefinitions()); - } - - return settings; + return Stream.of( + HttpSettings.getSettingsDefinitions(), + HttpClientManager.getSettingsDefinitions(), + ThrottlerManager.getSettingsDefinitions(), + RetrySettings.getSettingsDefinitions(), + ElasticInferenceServiceSettings.getSettingsDefinitions(), + Truncator.getSettingsDefinitions(), + RequestExecutorServiceSettings.getSettingsDefinitions(), + List.of(SKIP_VALIDATE_AND_START) + ).flatMap(Collection::stream).collect(Collectors.toList()); } @Override @@ -446,10 +438,7 @@ public List getQueryRewriteInterceptors() { @Override public List> getRetrievers() { return List.of( - new RetrieverSpec<>( - new ParseField(TextSimilarityRankBuilder.NAME), - (parser, context) -> TextSimilarityRankRetrieverBuilder.fromXContent(parser, context, getLicenseState()) - ), + new RetrieverSpec<>(new ParseField(TextSimilarityRankBuilder.NAME), TextSimilarityRankRetrieverBuilder::fromXContent), new RetrieverSpec<>(new ParseField(RandomRankBuilder.NAME), RandomRankRetrieverBuilder::fromXContent) ); } @@ -458,36 +447,4 @@ public List> getRetrievers() { public Map getHighlighters() { return Map.of(SemanticTextHighlighter.NAME, new SemanticTextHighlighter()); } - - // Get Elastic Inference service URL based on feature flags to support transitioning - // to the new Elastic Inference Service URL. - private String getElasticInferenceServiceUrl(ElasticInferenceServiceSettings settings) { - String elasticInferenceUrl = null; - - if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - elasticInferenceUrl = settings.getElasticInferenceServiceUrl(); - } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - log.warn( - "Deprecated flag {} detected for enabling {}. Please use {}.", - ELASTIC_INFERENCE_SERVICE_IDENTIFIER, - DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, - ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG - ); - elasticInferenceUrl = settings.getEisGatewayUrl(); - } - - return elasticInferenceUrl; - } - - protected Boolean isElasticInferenceServiceEnabled() { - return (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()); - } - - protected SSLService getSslService() { - return XPackPlugin.getSharedSslService(); - } - - protected XPackLicenseState getLicenseState() { - return XPackPlugin.getSharedLicenseState(); - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java index 6d09c9e67b363..e5d76b9bb5570 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java @@ -7,14 +7,9 @@ package org.elasticsearch.xpack.inference.external.http; -import org.apache.http.config.Registry; -import org.apache.http.config.RegistryBuilder; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.impl.nio.reactor.IOReactorConfig; -import org.apache.http.nio.conn.NoopIOSessionStrategy; -import org.apache.http.nio.conn.SchemeIOSessionStrategy; -import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.nio.reactor.ConnectingIOReactor; import org.apache.http.nio.reactor.IOReactorException; import org.apache.http.pool.PoolStats; @@ -26,7 +21,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; @@ -34,13 +28,11 @@ import java.util.List; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX; public class HttpClientManager implements Closeable { private static final Logger logger = LogManager.getLogger(HttpClientManager.class); /** * The maximum number of total connections the connection pool can lease to all routes. - * The configuration applies to each instance of HTTPClientManager (max_total_connections=10 and instances=5 leads to 50 connections). * From googling around the connection pools maxTotal value should be close to the number of available threads. * * https://stackoverflow.com/questions/30989637/how-to-decide-optimal-settings-for-setmaxtotal-and-setdefaultmaxperroute @@ -55,7 +47,6 @@ public class HttpClientManager implements Closeable { /** * The max number of connections a single route can lease. - * This configuration applies to each instance of HttpClientManager. */ public static final Setting MAX_ROUTE_CONNECTIONS = Setting.intSetting( "xpack.inference.http.max_route_connections", @@ -107,22 +98,6 @@ public static HttpClientManager create( return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); } - public static HttpClientManager create( - Settings settings, - ThreadPool threadPool, - ClusterService clusterService, - ThrottlerManager throttlerManager, - SSLService sslService - ) { - // Set the sslStrategy to ensure an encrypted connection, as Elastic Inference Service requires it. - SSLIOSessionStrategy sslioSessionStrategy = sslService.sslIOSessionStrategy( - sslService.getSSLConfiguration(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX) - ); - - PoolingNHttpClientConnectionManager connectionManager = createConnectionManager(sslioSessionStrategy); - return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); - } - // Default for testing HttpClientManager( Settings settings, @@ -146,25 +121,6 @@ public static HttpClientManager create( this.addSettingsUpdateConsumers(clusterService); } - private static PoolingNHttpClientConnectionManager createConnectionManager(SSLIOSessionStrategy sslStrategy) { - ConnectingIOReactor ioReactor; - try { - var configBuilder = IOReactorConfig.custom().setSoKeepAlive(true); - ioReactor = new DefaultConnectingIOReactor(configBuilder.build()); - } catch (IOReactorException e) { - var message = "Failed to initialize HTTP client manager with SSL."; - logger.error(message, e); - throw new ElasticsearchException(message, e); - } - - Registry registry = RegistryBuilder.create() - .register("http", NoopIOSessionStrategy.INSTANCE) - .register("https", sslStrategy) - .build(); - - return new PoolingNHttpClientConnectionManager(ioReactor, registry); - } - private static PoolingNHttpClientConnectionManager createConnectionManager() { ConnectingIOReactor ioReactor; try { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index f54696895a818..fd2427dc8ac6a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -12,7 +12,6 @@ import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.LicenseUtils; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; @@ -22,6 +21,7 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.XPackPlugin; import java.io.IOException; import java.util.List; @@ -73,11 +73,8 @@ public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder RetrieverBuilder.declareBaseParserFields(TextSimilarityRankBuilder.NAME, PARSER); } - public static TextSimilarityRankRetrieverBuilder fromXContent( - XContentParser parser, - RetrieverParserContext context, - XPackLicenseState licenceState - ) throws IOException { + public static TextSimilarityRankRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) + throws IOException { if (context.clusterSupportsFeature(TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED) == false) { throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + TextSimilarityRankBuilder.NAME + "]"); } @@ -86,7 +83,7 @@ public static TextSimilarityRankRetrieverBuilder fromXContent( "[text_similarity_reranker] retriever composition feature is not supported by all nodes in the cluster" ); } - if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(licenceState) == false) { + if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) { throw LicenseUtils.newComplianceException(TextSimilarityRankBuilder.NAME); } return PARSER.apply(parser, context); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 431a3647e2879..bc2daddc2a346 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -9,9 +9,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; -import java.util.ArrayList; import java.util.List; public class ElasticInferenceServiceSettings { @@ -19,8 +17,6 @@ public class ElasticInferenceServiceSettings { @Deprecated static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); - public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl."; - static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( "xpack.inference.elastic.url", Setting.Property.NodeScope @@ -35,27 +31,11 @@ public class ElasticInferenceServiceSettings { public ElasticInferenceServiceSettings(Settings settings) { eisGatewayUrl = EIS_GATEWAY_URL.get(settings); elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); - } - - public static final SSLConfigurationSettings ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS = SSLConfigurationSettings.withPrefix( - ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX, - false - ); - public static final Setting ELASTIC_INFERENCE_SERVICE_SSL_ENABLED = Setting.boolSetting( - ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX + "enabled", - true, - Setting.Property.NodeScope - ); + } public static List> getSettingsDefinitions() { - ArrayList> settings = new ArrayList<>(); - settings.add(EIS_GATEWAY_URL); - settings.add(ELASTIC_INFERENCE_SERVICE_URL); - settings.add(ELASTIC_INFERENCE_SERVICE_SSL_ENABLED); - settings.addAll(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS.getEnabledSettings()); - - return settings; + return List.of(EIS_GATEWAY_URL, ELASTIC_INFERENCE_SERVICE_URL); } @Deprecated diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java index 61033a0211065..bfec2d5ac3484 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.inference.InferencePlugin; import org.hamcrest.Matchers; @@ -29,7 +28,7 @@ public class SemanticTextClusterMetadataTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return List.of(XPackPlugin.class, InferencePlugin.class); + return List.of(InferencePlugin.class); } public void testCreateIndexWithSemanticTextField() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java deleted file mode 100644 index d1db5b8b12cc6..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; -import org.junit.After; -import org.junit.Before; - -import static org.hamcrest.Matchers.is; - -public class InferencePluginTests extends ESTestCase { - private InferencePlugin inferencePlugin; - - private Boolean elasticInferenceServiceEnabled = true; - - private void setElasticInferenceServiceEnabled(Boolean elasticInferenceServiceEnabled) { - this.elasticInferenceServiceEnabled = elasticInferenceServiceEnabled; - } - - @Before - public void setUp() throws Exception { - super.setUp(); - - Settings settings = Settings.builder().build(); - inferencePlugin = new InferencePlugin(settings) { - @Override - protected Boolean isElasticInferenceServiceEnabled() { - return elasticInferenceServiceEnabled; - } - }; - } - - @After - public void tearDown() throws Exception { - super.tearDown(); - } - - public void testElasticInferenceServiceSettingsPresent() throws Exception { - setElasticInferenceServiceEnabled(true); // enable elastic inference service - boolean anyMatch = inferencePlugin.getSettings() - .stream() - .map(Setting::getKey) - .anyMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); - - assertThat("xpack.inference.elastic settings are present", anyMatch, is(true)); - } - - public void testElasticInferenceServiceSettingsNotPresent() throws Exception { - setElasticInferenceServiceEnabled(false); // disable elastic inference service - boolean noneMatch = inferencePlugin.getSettings() - .stream() - .map(Setting::getKey) - .noneMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); - - assertThat("xpack.inference.elastic settings are not present", noneMatch, is(true)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java deleted file mode 100644 index 68ea175bd9870..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.action.support.MappedActionFilter; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.inference.InferenceServiceExtension; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; -import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; -import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; - -import java.nio.file.Path; -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import static java.util.stream.Collectors.toList; - -public class LocalStateInferencePlugin extends LocalStateCompositeXPackPlugin { - private final InferencePlugin inferencePlugin; - - public LocalStateInferencePlugin(final Settings settings, final Path configPath) throws Exception { - super(settings, configPath); - LocalStateInferencePlugin thisVar = this; - this.inferencePlugin = new InferencePlugin(settings) { - @Override - protected SSLService getSslService() { - return thisVar.getSslService(); - } - - @Override - protected XPackLicenseState getLicenseState() { - return thisVar.getLicenseState(); - } - - @Override - public List getInferenceServiceFactories() { - return List.of( - TestSparseInferenceServiceExtension.TestInferenceService::new, - TestDenseInferenceServiceExtension.TestInferenceService::new - ); - } - }; - plugins.add(inferencePlugin); - } - - @Override - public List> getRetrievers() { - return this.filterPlugins(SearchPlugin.class).stream().flatMap(p -> p.getRetrievers().stream()).collect(toList()); - } - - @Override - public Map getMappers() { - return inferencePlugin.getMappers(); - } - - @Override - public Collection getMappedActionFilters() { - return inferencePlugin.getMappedActionFilters(); - } - -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java index 0f322e64755be..9395ae222e9ba 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -142,6 +143,20 @@ private static void blockingCall( latch.await(); } + public static class TestInferencePlugin extends InferencePlugin { + public TestInferencePlugin(Settings settings) { + super(settings); + } + + @Override + public List getInferenceServiceFactories() { + return List.of( + TestSparseInferenceServiceExtension.TestInferenceService::new, + TestDenseInferenceServiceExtension.TestInferenceService::new + ); + } + } + public static Model getInvalidModel(String inferenceEntityId, String serviceName) { var mockConfigs = mock(ModelConfigurations.class); when(mockConfigs.getInferenceEntityId()).thenReturn(inferenceEntityId); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java index 24183b21f73e7..1f58c4165056d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.index.mapper.NonDynamicFieldMapperTests; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; import org.junit.Before; @@ -27,7 +26,7 @@ public void setup() throws Exception { @Override protected Collection> getPlugins() { - return List.of(LocalStateInferencePlugin.class); + return List.of(Utils.TestInferencePlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java index daed03c198e0d..6d6403b69ea11 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.inference.InferencePlugin; import java.util.Collection; import java.util.List; @@ -40,7 +40,7 @@ protected RankBuilder getThrowingRankBuilder(int rankWindowSize, String rankFeat @Override protected Collection> pluginsNeeded() { - return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); } public void testQueryPhaseShardThrowingAllShardsFail() throws Exception { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java index ba6924ba0ff3b..084a7f3de4a53 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java @@ -24,7 +24,8 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.junit.Before; import java.io.IOException; @@ -46,7 +47,7 @@ protected boolean addMockHttpTransport() { @Override protected Collection> nodePlugins() { - return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(InferencePlugin.class, XPackPlugin.class, TextSimilarityTestPlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java index f81f2965c392e..a042fca44fdb5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.junit.Before; import java.util.Collection; @@ -108,7 +108,7 @@ protected InferenceAction.Request generateRequest(List docFeatures) { @Override protected Collection> getPlugins() { - return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); } @Before diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java index ff1a1d19779df..bab012afc3101 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.monitoring.Monitoring; import org.elasticsearch.xpack.security.Security; @@ -87,12 +86,6 @@ protected XPackLicenseState getLicenseState() { } }); plugins.add(new MockedRollupPlugin()); - plugins.add(new InferencePlugin(settings) { - @Override - protected SSLService getSslService() { - return thisVar.getSslService(); - } - }); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index 5cf15454e47f2..aeebfabdce704 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -82,6 +82,7 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; @@ -160,7 +161,8 @@ protected Collection> nodePlugins() { DataStreamsPlugin.class, // To remove errors from parsing build in templates that contain scaled_float MapperExtrasPlugin.class, - Wildcard.class + Wildcard.class, + InferencePlugin.class ); } diff --git a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java index 057ebdece5c61..ab5be0f48f5f3 100644 --- a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java +++ b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.frozen.FrozenIndices; import org.elasticsearch.xpack.graph.Graph; import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.profiling.ProfilingPlugin; import org.elasticsearch.xpack.rollup.Rollup; import org.elasticsearch.xpack.search.AsyncSearch; @@ -88,6 +89,7 @@ protected Collection> getPlugins() { FrozenIndices.class, Graph.class, IndexLifecycle.class, + InferencePlugin.class, IngestCommonPlugin.class, IngestTestPlugin.class, MustachePlugin.class, From cf73860b583e14c11eab28197b15b247236e4021 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 18 Dec 2024 15:57:12 +0000 Subject: [PATCH 2/2] Revert "Remove pre-7.2 token serialization support (#118057)" (#118967) * Revert "Remove pre-7.2 token serialization support (#118057)" This reverts commit ec66857ca13e2f5e7f9088a30aa48ea5ddab17fa. * Add missing constant --- .../org/elasticsearch/TransportVersions.java | 3 + .../security/SecurityFeatureSetUsage.java | 12 +- .../support/TokensInvalidationResult.java | 6 + .../security/authc/TokenAuthIntegTests.java | 37 +-- .../xpack/security/authc/TokenService.java | 236 ++++++++++++----- .../security/authc/TokenServiceTests.java | 241 +++++++++++++++++- 6 files changed, 447 insertions(+), 88 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 388123e86c882..fd8a3987cf4d3 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -52,7 +52,10 @@ static TransportVersion def(int id) { @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remove the transport versions with which v9 will not need to interact public static final TransportVersion ZERO = def(0); public static final TransportVersion V_7_0_0 = def(7_00_00_99); + public static final TransportVersion V_7_1_0 = def(7_01_00_99); + public static final TransportVersion V_7_2_0 = def(7_02_00_99); public static final TransportVersion V_7_3_0 = def(7_03_00_99); + public static final TransportVersion V_7_3_2 = def(7_03_02_99); public static final TransportVersion V_7_4_0 = def(7_04_00_99); public static final TransportVersion V_7_6_0 = def(7_06_00_99); public static final TransportVersion V_7_7_0 = def(7_07_00_99); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java index f44409daa37f8..3ebfad04a0f13 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java @@ -55,8 +55,10 @@ public SecurityFeatureSetUsage(StreamInput in) throws IOException { realmsUsage = in.readGenericMap(); rolesStoreUsage = in.readGenericMap(); sslUsage = in.readGenericMap(); - tokenServiceUsage = in.readGenericMap(); - apiKeyServiceUsage = in.readGenericMap(); + if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_2_0)) { + tokenServiceUsage = in.readGenericMap(); + apiKeyServiceUsage = in.readGenericMap(); + } auditUsage = in.readGenericMap(); ipFilterUsage = in.readGenericMap(); anonymousUsage = in.readGenericMap(); @@ -121,8 +123,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeGenericMap(realmsUsage); out.writeGenericMap(rolesStoreUsage); out.writeGenericMap(sslUsage); - out.writeGenericMap(tokenServiceUsage); - out.writeGenericMap(apiKeyServiceUsage); + if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_2_0)) { + out.writeGenericMap(tokenServiceUsage); + out.writeGenericMap(apiKeyServiceUsage); + } out.writeGenericMap(auditUsage); out.writeGenericMap(ipFilterUsage); out.writeGenericMap(anonymousUsage); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java index 59c16fc8a7a72..8fe018a825468 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java @@ -59,6 +59,9 @@ public TokensInvalidationResult(StreamInput in) throws IOException { this.invalidatedTokens = in.readStringCollectionAsList(); this.previouslyInvalidatedTokens = in.readStringCollectionAsList(); this.errors = in.readCollectionAsList(StreamInput::readException); + if (in.getTransportVersion().before(TransportVersions.V_7_2_0)) { + in.readVInt(); + } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { this.restStatus = RestStatus.readFrom(in); } @@ -108,6 +111,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(invalidatedTokens); out.writeStringCollection(previouslyInvalidatedTokens); out.writeCollection(errors, StreamOutput::writeException); + if (out.getTransportVersion().before(TransportVersions.V_7_2_0)) { + out.writeVInt(5); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { RestStatus.writeTo(out, restStatus); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index b56ea7ae3e456..fef1a98ca67e9 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -327,8 +327,8 @@ public void testInvalidateNotValidAccessTokens() throws Exception { ResponseException.class, () -> invalidateAccessToken( tokenService.prependVersionAndEncodeAccessToken( - TransportVersions.MINIMUM_COMPATIBLE, - tokenService.getRandomTokenBytes(TransportVersions.MINIMUM_COMPATIBLE, randomBoolean()).v1() + TransportVersions.V_7_3_2, + tokenService.getRandomTokenBytes(TransportVersions.V_7_3_2, randomBoolean()).v1() ) ) ); @@ -347,7 +347,7 @@ public void testInvalidateNotValidAccessTokens() throws Exception { byte[] longerAccessToken = new byte[randomIntBetween(17, 24)]; random().nextBytes(longerAccessToken); invalidateResponse = invalidateAccessToken( - tokenService.prependVersionAndEncodeAccessToken(TransportVersions.MINIMUM_COMPATIBLE, longerAccessToken) + tokenService.prependVersionAndEncodeAccessToken(TransportVersions.V_7_3_2, longerAccessToken) ); assertThat(invalidateResponse.invalidated(), equalTo(0)); assertThat(invalidateResponse.previouslyInvalidated(), equalTo(0)); @@ -365,7 +365,7 @@ public void testInvalidateNotValidAccessTokens() throws Exception { byte[] shorterAccessToken = new byte[randomIntBetween(12, 15)]; random().nextBytes(shorterAccessToken); invalidateResponse = invalidateAccessToken( - tokenService.prependVersionAndEncodeAccessToken(TransportVersions.MINIMUM_COMPATIBLE, shorterAccessToken) + tokenService.prependVersionAndEncodeAccessToken(TransportVersions.V_7_3_2, shorterAccessToken) ); assertThat(invalidateResponse.invalidated(), equalTo(0)); assertThat(invalidateResponse.previouslyInvalidated(), equalTo(0)); @@ -394,8 +394,8 @@ public void testInvalidateNotValidAccessTokens() throws Exception { invalidateResponse = invalidateAccessToken( tokenService.prependVersionAndEncodeAccessToken( - TransportVersions.MINIMUM_COMPATIBLE, - tokenService.getRandomTokenBytes(TransportVersions.MINIMUM_COMPATIBLE, randomBoolean()).v1() + TransportVersions.V_7_3_2, + tokenService.getRandomTokenBytes(TransportVersions.V_7_3_2, randomBoolean()).v1() ) ); assertThat(invalidateResponse.invalidated(), equalTo(0)); @@ -420,8 +420,8 @@ public void testInvalidateNotValidRefreshTokens() throws Exception { ResponseException.class, () -> invalidateRefreshToken( TokenService.prependVersionAndEncodeRefreshToken( - TransportVersions.MINIMUM_COMPATIBLE, - tokenService.getRandomTokenBytes(TransportVersions.MINIMUM_COMPATIBLE, true).v2() + TransportVersions.V_7_3_2, + tokenService.getRandomTokenBytes(TransportVersions.V_7_3_2, true).v2() ) ) ); @@ -441,7 +441,7 @@ public void testInvalidateNotValidRefreshTokens() throws Exception { byte[] longerRefreshToken = new byte[randomIntBetween(17, 24)]; random().nextBytes(longerRefreshToken); invalidateResponse = invalidateRefreshToken( - TokenService.prependVersionAndEncodeRefreshToken(TransportVersions.MINIMUM_COMPATIBLE, longerRefreshToken) + TokenService.prependVersionAndEncodeRefreshToken(TransportVersions.V_7_3_2, longerRefreshToken) ); assertThat(invalidateResponse.invalidated(), equalTo(0)); assertThat(invalidateResponse.previouslyInvalidated(), equalTo(0)); @@ -459,7 +459,7 @@ public void testInvalidateNotValidRefreshTokens() throws Exception { byte[] shorterRefreshToken = new byte[randomIntBetween(12, 15)]; random().nextBytes(shorterRefreshToken); invalidateResponse = invalidateRefreshToken( - TokenService.prependVersionAndEncodeRefreshToken(TransportVersions.MINIMUM_COMPATIBLE, shorterRefreshToken) + TokenService.prependVersionAndEncodeRefreshToken(TransportVersions.V_7_3_2, shorterRefreshToken) ); assertThat(invalidateResponse.invalidated(), equalTo(0)); assertThat(invalidateResponse.previouslyInvalidated(), equalTo(0)); @@ -488,8 +488,8 @@ public void testInvalidateNotValidRefreshTokens() throws Exception { invalidateResponse = invalidateRefreshToken( TokenService.prependVersionAndEncodeRefreshToken( - TransportVersions.MINIMUM_COMPATIBLE, - tokenService.getRandomTokenBytes(TransportVersions.MINIMUM_COMPATIBLE, true).v2() + TransportVersions.V_7_3_2, + tokenService.getRandomTokenBytes(TransportVersions.V_7_3_2, true).v2() ) ); assertThat(invalidateResponse.invalidated(), equalTo(0)); @@ -758,11 +758,18 @@ public void testAuthenticateWithWrongToken() throws Exception { assertAuthenticateWithToken(response.accessToken(), TEST_USER_NAME); // Now attempt to authenticate with an invalid access token string assertUnauthorizedToken(randomAlphaOfLengthBetween(0, 128)); - // Now attempt to authenticate with an invalid access token with valid structure (after 8.0 pre 8.10) + // Now attempt to authenticate with an invalid access token with valid structure (pre 7.2) assertUnauthorizedToken( tokenService.prependVersionAndEncodeAccessToken( - TransportVersions.V_8_0_0, - tokenService.getRandomTokenBytes(TransportVersions.V_8_0_0, randomBoolean()).v1() + TransportVersions.V_7_1_0, + tokenService.getRandomTokenBytes(TransportVersions.V_7_1_0, randomBoolean()).v1() + ) + ); + // Now attempt to authenticate with an invalid access token with valid structure (after 7.2 pre 8.10) + assertUnauthorizedToken( + tokenService.prependVersionAndEncodeAccessToken( + TransportVersions.V_7_4_0, + tokenService.getRandomTokenBytes(TransportVersions.V_7_4_0, randomBoolean()).v1() ) ); // Now attempt to authenticate with an invalid access token with valid structure (current version) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 900436a1fd874..4f7ba7808b823 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -48,7 +48,9 @@ import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -57,6 +59,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Streams; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -90,8 +93,10 @@ import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; +import java.io.OutputStream; import java.io.UncheckedIOException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; @@ -127,6 +132,7 @@ import javax.crypto.Cipher; import javax.crypto.CipherInputStream; +import javax.crypto.CipherOutputStream; import javax.crypto.NoSuchPaddingException; import javax.crypto.SecretKey; import javax.crypto.SecretKeyFactory; @@ -195,8 +201,14 @@ public class TokenService { // UUIDs are 16 bytes encoded base64 without padding, therefore the length is (16 / 3) * 4 + ((16 % 3) * 8 + 5) / 6 chars private static final int TOKEN_LENGTH = 22; private static final String TOKEN_DOC_ID_PREFIX = TOKEN_DOC_TYPE + "_"; + static final int LEGACY_MINIMUM_BYTES = VERSION_BYTES + SALT_BYTES + IV_BYTES + 1; static final int MINIMUM_BYTES = VERSION_BYTES + TOKEN_LENGTH + 1; + static final int LEGACY_MINIMUM_BASE64_BYTES = Double.valueOf(Math.ceil((4 * LEGACY_MINIMUM_BYTES) / 3)).intValue(); public static final int MINIMUM_BASE64_BYTES = Double.valueOf(Math.ceil((4 * MINIMUM_BYTES) / 3)).intValue(); + static final TransportVersion VERSION_HASHED_TOKENS = TransportVersions.V_7_2_0; + static final TransportVersion VERSION_TOKENS_INDEX_INTRODUCED = TransportVersions.V_7_2_0; + static final TransportVersion VERSION_ACCESS_TOKENS_AS_UUIDS = TransportVersions.V_7_2_0; + static final TransportVersion VERSION_MULTIPLE_CONCURRENT_REFRESHES = TransportVersions.V_7_2_0; static final TransportVersion VERSION_CLIENT_AUTH_FOR_REFRESH = TransportVersions.V_8_2_0; static final TransportVersion VERSION_GET_TOKEN_DOC_FOR_REFRESH = TransportVersions.V_8_10_X; @@ -261,7 +273,8 @@ public TokenService( /** * Creates an access token and optionally a refresh token as well, based on the provided authentication and metadata with - * auto-generated values. The created tokens are stored a specific security tokens index. + * auto-generated values. The created tokens are stored in the security index for versions up to + * {@link #VERSION_TOKENS_INDEX_INTRODUCED} and to a specific security tokens index for later versions. */ public void createOAuth2Tokens( Authentication authentication, @@ -278,7 +291,8 @@ public void createOAuth2Tokens( /** * Creates an access token and optionally a refresh token as well from predefined values, based on the provided authentication and - * metadata. The created tokens are stored in a specific security tokens index. + * metadata. The created tokens are stored in the security index for versions up to {@link #VERSION_TOKENS_INDEX_INTRODUCED} and to a + * specific security tokens index for later versions. */ // public for testing public void createOAuth2Tokens( @@ -300,15 +314,21 @@ public void createOAuth2Tokens( * * @param accessTokenBytes The predefined seed value for the access token. This will then be *
    - *
  • Hashed before stored
  • - *
  • Stored in a specific security tokens index
  • + *
  • Encrypted before stored for versions before {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Hashed before stored for versions after {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Stored in the security index for versions up to {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Stored in a specific security tokens index for versions after + * {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • *
  • Prepended with a version ID and Base64 encoded before returned to the caller of the APIs
  • *
* @param refreshTokenBytes The predefined seed value for the access token. This will then be *
    - *
  • Hashed before stored
  • - *
  • Stored in a specific security tokens index
  • - *
  • Prepended with a version ID and Base64 encoded before returned to the caller of the APIs
  • + *
  • Hashed before stored for versions after {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Stored in the security index for versions up to {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Stored in a specific security tokens index for versions after + * {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Prepended with a version ID and encoded with Base64 before returned to the caller of the APIs + * for versions after {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • *
* @param tokenVersion The version of the nodes with which these tokens will be compatible. * @param authentication The authentication object representing the user for which the tokens are created @@ -364,7 +384,7 @@ private void createOAuth2Tokens( } else { refreshTokenToStore = refreshTokenToReturn = null; } - } else { + } else if (tokenVersion.onOrAfter(VERSION_HASHED_TOKENS)) { assert accessTokenBytes.length == RAW_TOKEN_BYTES_LENGTH; userTokenId = hashTokenString(Strings.BASE_64_NO_PADDING_URL_ENCODER.encodeToString(accessTokenBytes)); accessTokenToStore = null; @@ -375,6 +395,18 @@ private void createOAuth2Tokens( } else { refreshTokenToStore = refreshTokenToReturn = null; } + } else { + assert accessTokenBytes.length == RAW_TOKEN_BYTES_LENGTH; + userTokenId = Strings.BASE_64_NO_PADDING_URL_ENCODER.encodeToString(accessTokenBytes); + accessTokenToStore = null; + if (refreshTokenBytes != null) { + assert refreshTokenBytes.length == RAW_TOKEN_BYTES_LENGTH; + refreshTokenToStore = refreshTokenToReturn = Strings.BASE_64_NO_PADDING_URL_ENCODER.encodeToString( + refreshTokenBytes + ); + } else { + refreshTokenToStore = refreshTokenToReturn = null; + } } UserToken userToken = new UserToken(userTokenId, tokenVersion, tokenAuth, getExpirationTime(), metadata); tokenDocument = createTokenDocument(userToken, accessTokenToStore, refreshTokenToStore, originatingClientAuth); @@ -387,22 +419,23 @@ private void createOAuth2Tokens( final RefreshPolicy tokenCreationRefreshPolicy = tokenVersion.onOrAfter(VERSION_GET_TOKEN_DOC_FOR_REFRESH) ? RefreshPolicy.NONE : RefreshPolicy.WAIT_UNTIL; + final SecurityIndexManager tokensIndex = getTokensIndexForVersion(tokenVersion); logger.debug( () -> format( "Using refresh policy [%s] when creating token doc [%s] in the security index [%s]", tokenCreationRefreshPolicy, documentId, - securityTokensIndex.aliasName() + tokensIndex.aliasName() ) ); - final IndexRequest indexTokenRequest = client.prepareIndex(securityTokensIndex.aliasName()) + final IndexRequest indexTokenRequest = client.prepareIndex(tokensIndex.aliasName()) .setId(documentId) .setOpType(OpType.CREATE) .setSource(tokenDocument, XContentType.JSON) .setRefreshPolicy(tokenCreationRefreshPolicy) .request(); - securityTokensIndex.prepareIndexIfNeededThenExecute( - ex -> listener.onFailure(traceLog("prepare tokens index [" + securityTokensIndex.aliasName() + "]", documentId, ex)), + tokensIndex.prepareIndexIfNeededThenExecute( + ex -> listener.onFailure(traceLog("prepare tokens index [" + tokensIndex.aliasName() + "]", documentId, ex)), () -> executeAsyncWithOrigin( client, SECURITY_ORIGIN, @@ -521,16 +554,17 @@ private void getTokenDocById( @Nullable String storedRefreshToken, ActionListener listener ) { - final SecurityIndexManager frozenTokensIndex = securityTokensIndex.defensiveCopy(); + final SecurityIndexManager tokensIndex = getTokensIndexForVersion(tokenVersion); + final SecurityIndexManager frozenTokensIndex = tokensIndex.defensiveCopy(); if (frozenTokensIndex.isAvailable(PRIMARY_SHARDS) == false) { - logger.warn("failed to get access token [{}] because index [{}] is not available", tokenId, securityTokensIndex.aliasName()); + logger.warn("failed to get access token [{}] because index [{}] is not available", tokenId, tokensIndex.aliasName()); listener.onFailure(frozenTokensIndex.getUnavailableReason(PRIMARY_SHARDS)); return; } - final GetRequest getRequest = client.prepareGet(securityTokensIndex.aliasName(), getTokenDocumentId(tokenId)).request(); + final GetRequest getRequest = client.prepareGet(tokensIndex.aliasName(), getTokenDocumentId(tokenId)).request(); final Consumer onFailure = ex -> listener.onFailure(traceLog("get token from id", tokenId, ex)); - securityTokensIndex.checkIndexVersionThenExecute( - ex -> listener.onFailure(traceLog("prepare tokens index [" + securityTokensIndex.aliasName() + "]", tokenId, ex)), + tokensIndex.checkIndexVersionThenExecute( + ex -> listener.onFailure(traceLog("prepare tokens index [" + tokensIndex.aliasName() + "]", tokenId, ex)), () -> executeAsyncWithOrigin( client.threadPool().getThreadContext(), SECURITY_ORIGIN, @@ -576,11 +610,7 @@ private void getTokenDocById( // if the index or the shard is not there / available we assume that // the token is not valid if (isShardNotAvailableException(e)) { - logger.warn( - "failed to get token doc [{}] because index [{}] is not available", - tokenId, - securityTokensIndex.aliasName() - ); + logger.warn("failed to get token doc [{}] because index [{}] is not available", tokenId, tokensIndex.aliasName()); } else { logger.error(() -> "failed to get token doc [" + tokenId + "]", e); } @@ -620,7 +650,7 @@ void decodeToken(String token, boolean validateUserToken, ActionListener VERSION_ACCESS_TOKENS_UUIDS cluster if (in.available() < MINIMUM_BYTES) { logger.debug("invalid token, smaller than [{}] bytes", MINIMUM_BYTES); @@ -630,6 +660,41 @@ void decodeToken(String token, boolean validateUserToken, ActionListener { + if (decodeKey != null) { + try { + final Cipher cipher = getDecryptionCipher(iv, decodeKey, version, decodedSalt); + final String tokenId = decryptTokenId(encryptedTokenId, cipher, version); + getAndValidateUserToken(tokenId, version, null, validateUserToken, listener); + } catch (IOException | GeneralSecurityException e) { + // could happen with a token that is not ours + logger.warn("invalid token", e); + listener.onResponse(null); + } + } else { + // could happen with a token that is not ours + listener.onResponse(null); + } + }, listener::onFailure)); + } else { + logger.debug(() -> format("invalid key %s key: %s", passphraseHash, keyCache.cache.keySet())); + listener.onResponse(null); + } } } catch (Exception e) { // could happen with a token that is not ours @@ -787,7 +852,11 @@ private void indexInvalidation( final Set idsOfOlderTokens = new HashSet<>(); boolean anyOlderTokensBeforeRefreshViaGet = false; for (UserToken userToken : userTokens) { - idsOfRecentTokens.add(userToken.getId()); + if (userToken.getTransportVersion().onOrAfter(VERSION_TOKENS_INDEX_INTRODUCED)) { + idsOfRecentTokens.add(userToken.getId()); + } else { + idsOfOlderTokens.add(userToken.getId()); + } anyOlderTokensBeforeRefreshViaGet |= userToken.getTransportVersion().before(VERSION_GET_TOKEN_DOC_FOR_REFRESH); } final RefreshPolicy tokensInvalidationRefreshPolicy = anyOlderTokensBeforeRefreshViaGet @@ -1055,7 +1124,7 @@ private void findTokenFromRefreshToken(String refreshToken, Iterator ); getTokenDocById(userTokenId, version, null, storedRefreshToken, listener); } - } else { + } else if (version.onOrAfter(VERSION_HASHED_TOKENS)) { final String unencodedRefreshToken = in.readString(); if (unencodedRefreshToken.length() != TOKEN_LENGTH) { logger.debug("Decoded refresh token [{}] with version [{}] is invalid.", unencodedRefreshToken, version); @@ -1064,6 +1133,9 @@ private void findTokenFromRefreshToken(String refreshToken, Iterator final String hashedRefreshToken = hashTokenString(unencodedRefreshToken); findTokenFromRefreshToken(hashedRefreshToken, securityTokensIndex, backoff, listener); } + } else { + logger.debug("Unrecognized refresh token version [{}].", version); + listener.onResponse(null); } } catch (IOException e) { logger.debug(() -> "Could not decode refresh token [" + refreshToken + "].", e); @@ -1178,6 +1250,7 @@ private void innerRefresh( return; } final RefreshTokenStatus refreshTokenStatus = checkRefreshResult.v1(); + final SecurityIndexManager refreshedTokenIndex = getTokensIndexForVersion(refreshTokenStatus.getTransportVersion()); if (refreshTokenStatus.isRefreshed()) { logger.debug( "Token document [{}] was recently refreshed, when a new token document was generated. Reusing that result.", @@ -1185,29 +1258,31 @@ private void innerRefresh( ); final Tuple parsedTokens = parseTokensFromDocument(tokenDoc.sourceAsMap(), null); Authentication authentication = parsedTokens.v1().getAuthentication(); - decryptAndReturnSupersedingTokens(refreshToken, refreshTokenStatus, securityTokensIndex, authentication, listener); + decryptAndReturnSupersedingTokens(refreshToken, refreshTokenStatus, refreshedTokenIndex, authentication, listener); } else { final TransportVersion newTokenVersion = getTokenVersionCompatibility(); final Tuple newTokenBytes = getRandomTokenBytes(newTokenVersion, true); final Map updateMap = new HashMap<>(); updateMap.put("refreshed", true); - updateMap.put("refresh_time", clock.instant().toEpochMilli()); - try { - final byte[] iv = getRandomBytes(IV_BYTES); - final byte[] salt = getRandomBytes(SALT_BYTES); - String encryptedAccessAndRefreshToken = encryptSupersedingTokens( - newTokenBytes.v1(), - newTokenBytes.v2(), - refreshToken, - iv, - salt - ); - updateMap.put("superseding.encrypted_tokens", encryptedAccessAndRefreshToken); - updateMap.put("superseding.encryption_iv", Base64.getEncoder().encodeToString(iv)); - updateMap.put("superseding.encryption_salt", Base64.getEncoder().encodeToString(salt)); - } catch (GeneralSecurityException e) { - logger.warn("could not encrypt access token and refresh token string", e); - onFailure.accept(invalidGrantException("could not refresh the requested token")); + if (newTokenVersion.onOrAfter(VERSION_MULTIPLE_CONCURRENT_REFRESHES)) { + updateMap.put("refresh_time", clock.instant().toEpochMilli()); + try { + final byte[] iv = getRandomBytes(IV_BYTES); + final byte[] salt = getRandomBytes(SALT_BYTES); + String encryptedAccessAndRefreshToken = encryptSupersedingTokens( + newTokenBytes.v1(), + newTokenBytes.v2(), + refreshToken, + iv, + salt + ); + updateMap.put("superseding.encrypted_tokens", encryptedAccessAndRefreshToken); + updateMap.put("superseding.encryption_iv", Base64.getEncoder().encodeToString(iv)); + updateMap.put("superseding.encryption_salt", Base64.getEncoder().encodeToString(salt)); + } catch (GeneralSecurityException e) { + logger.warn("could not encrypt access token and refresh token string", e); + onFailure.accept(invalidGrantException("could not refresh the requested token")); + } } assert tokenDoc.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO : "expected an assigned sequence number"; assert tokenDoc.primaryTerm() != SequenceNumbers.UNASSIGNED_PRIMARY_TERM : "expected an assigned primary term"; @@ -1218,17 +1293,17 @@ private void innerRefresh( "Using refresh policy [%s] when updating token doc [%s] for refresh in the security index [%s]", tokenRefreshUpdateRefreshPolicy, tokenDoc.id(), - securityTokensIndex.aliasName() + refreshedTokenIndex.aliasName() ) ); - final UpdateRequestBuilder updateRequest = client.prepareUpdate(securityTokensIndex.aliasName(), tokenDoc.id()) + final UpdateRequestBuilder updateRequest = client.prepareUpdate(refreshedTokenIndex.aliasName(), tokenDoc.id()) .setDoc("refresh_token", updateMap) .setFetchSource(logger.isDebugEnabled()) .setRefreshPolicy(tokenRefreshUpdateRefreshPolicy) .setIfSeqNo(tokenDoc.seqNo()) .setIfPrimaryTerm(tokenDoc.primaryTerm()); - securityTokensIndex.prepareIndexIfNeededThenExecute( - ex -> listener.onFailure(traceLog("prepare index [" + securityTokensIndex.aliasName() + "]", ex)), + refreshedTokenIndex.prepareIndexIfNeededThenExecute( + ex -> listener.onFailure(traceLog("prepare index [" + refreshedTokenIndex.aliasName() + "]", ex)), () -> executeAsyncWithOrigin( client.threadPool().getThreadContext(), SECURITY_ORIGIN, @@ -1274,7 +1349,7 @@ private void innerRefresh( if (cause instanceof VersionConflictEngineException) { // The document has been updated by another thread, get it again. logger.debug("version conflict while updating document [{}], attempting to get it again", tokenDoc.id()); - getTokenDocAsync(tokenDoc.id(), securityTokensIndex, true, new ActionListener<>() { + getTokenDocAsync(tokenDoc.id(), refreshedTokenIndex, true, new ActionListener<>() { @Override public void onResponse(GetResponse response) { if (response.isExists()) { @@ -1293,7 +1368,7 @@ public void onFailure(Exception e) { logger.info("could not get token document [{}] for refresh, retrying", tokenDoc.id()); client.threadPool() .schedule( - () -> getTokenDocAsync(tokenDoc.id(), securityTokensIndex, true, this), + () -> getTokenDocAsync(tokenDoc.id(), refreshedTokenIndex, true, this), backoff.next(), client.threadPool().generic() ); @@ -1614,13 +1689,17 @@ private static Optional checkMultipleRefreshes( RefreshTokenStatus refreshTokenStatus ) { if (refreshTokenStatus.isRefreshed()) { - if (refreshRequested.isAfter(refreshTokenStatus.getRefreshInstant().plus(30L, ChronoUnit.SECONDS))) { - return Optional.of(invalidGrantException("token has already been refreshed more than 30 seconds in the past")); - } - if (refreshRequested.isBefore(refreshTokenStatus.getRefreshInstant().minus(30L, ChronoUnit.SECONDS))) { - return Optional.of( - invalidGrantException("token has been refreshed more than 30 seconds in the future, clock skew too great") - ); + if (refreshTokenStatus.getTransportVersion().onOrAfter(VERSION_MULTIPLE_CONCURRENT_REFRESHES)) { + if (refreshRequested.isAfter(refreshTokenStatus.getRefreshInstant().plus(30L, ChronoUnit.SECONDS))) { + return Optional.of(invalidGrantException("token has already been refreshed more than 30 seconds in the past")); + } + if (refreshRequested.isBefore(refreshTokenStatus.getRefreshInstant().minus(30L, ChronoUnit.SECONDS))) { + return Optional.of( + invalidGrantException("token has been refreshed more than 30 seconds in the future, clock skew too great") + ); + } + } else { + return Optional.of(invalidGrantException("token has already been refreshed")); } } return Optional.empty(); @@ -1900,6 +1979,21 @@ private void ensureEnabled() { } } + /** + * In version {@code #VERSION_TOKENS_INDEX_INTRODUCED} security tokens were moved into a separate index, away from the other entities in + * the main security index, due to their ephemeral nature. They moved "seamlessly" - without manual user intervention. In this way, new + * tokens are created in the new index, while the existing ones were left in place - to be accessed from the old index - and due to be + * removed automatically by the {@code ExpiredTokenRemover} periodic job. Therefore, in general, when searching for a token we need to + * consider both the new and the old indices. + */ + private SecurityIndexManager getTokensIndexForVersion(TransportVersion version) { + if (version.onOrAfter(VERSION_TOKENS_INDEX_INTRODUCED)) { + return securityTokensIndex; + } else { + return securityMainIndex; + } + } + public TimeValue getExpirationDelay() { return expirationDelay; } @@ -1928,13 +2022,41 @@ public String prependVersionAndEncodeAccessToken(TransportVersion version, byte[ out.writeByteArray(accessTokenBytes); return Base64.getEncoder().encodeToString(out.bytes().toBytesRef().bytes); } - } else { + } else if (version.onOrAfter(VERSION_ACCESS_TOKENS_AS_UUIDS)) { try (BytesStreamOutput out = new BytesStreamOutput(MINIMUM_BASE64_BYTES)) { out.setTransportVersion(version); TransportVersion.writeVersion(version, out); out.writeString(Strings.BASE_64_NO_PADDING_URL_ENCODER.encodeToString(accessTokenBytes)); return Base64.getEncoder().encodeToString(out.bytes().toBytesRef().bytes); } + } else { + // we know that the minimum length is larger than the default of the ByteArrayOutputStream so set the size to this explicitly + try ( + ByteArrayOutputStream os = new ByteArrayOutputStream(LEGACY_MINIMUM_BASE64_BYTES); + OutputStream base64 = Base64.getEncoder().wrap(os); + StreamOutput out = new OutputStreamStreamOutput(base64) + ) { + out.setTransportVersion(version); + KeyAndCache keyAndCache = keyCache.activeKeyCache; + TransportVersion.writeVersion(version, out); + out.writeByteArray(keyAndCache.getSalt().bytes); + out.writeByteArray(keyAndCache.getKeyHash().bytes); + final byte[] initializationVector = getRandomBytes(IV_BYTES); + out.writeByteArray(initializationVector); + try ( + CipherOutputStream encryptedOutput = new CipherOutputStream( + out, + getEncryptionCipher(initializationVector, keyAndCache, version) + ); + StreamOutput encryptedStreamOutput = new OutputStreamStreamOutput(encryptedOutput) + ) { + encryptedStreamOutput.setTransportVersion(version); + encryptedStreamOutput.writeString(Strings.BASE_64_NO_PADDING_URL_ENCODER.encodeToString(accessTokenBytes)); + // StreamOutput needs to be closed explicitly because it wraps CipherOutputStream + encryptedStreamOutput.close(); + return new String(os.toByteArray(), StandardCharsets.UTF_8); + } + } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 702af75141093..75c2507a1dc5f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -126,6 +126,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; @@ -147,6 +148,7 @@ public class TokenServiceTests extends ESTestCase { private SecurityIndexManager securityMainIndex; private SecurityIndexManager securityTokensIndex; private ClusterService clusterService; + private DiscoveryNode pre72OldNode; private DiscoveryNode pre8500040OldNode; private Settings tokenServiceEnabledSettings = Settings.builder() .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), true) @@ -226,12 +228,31 @@ public void setupClient() { licenseState = mock(MockLicenseState.class); when(licenseState.isAllowed(Security.TOKEN_SERVICE_FEATURE)).thenReturn(true); + if (randomBoolean()) { + // version 7.2 was an "inflection" point in the Token Service development (access_tokens as UUIDS, multiple concurrent + // refreshes, + // tokens docs on a separate index) + pre72OldNode = addAnother7071DataNode(this.clusterService); + } if (randomBoolean()) { // before refresh tokens used GET, i.e. TokenService#VERSION_GET_TOKEN_DOC_FOR_REFRESH pre8500040OldNode = addAnotherPre8500DataNode(this.clusterService); } } + private static DiscoveryNode addAnother7071DataNode(ClusterService clusterService) { + Version version; + TransportVersion transportVersion; + if (randomBoolean()) { + version = Version.V_7_0_0; + transportVersion = TransportVersions.V_7_0_0; + } else { + version = Version.V_7_1_0; + transportVersion = TransportVersions.V_7_1_0; + } + return addAnotherDataNodeWithVersion(clusterService, version, transportVersion); + } + private static DiscoveryNode addAnotherPre8500DataNode(ClusterService clusterService) { Version version; TransportVersion transportVersion; @@ -280,6 +301,53 @@ public static void shutdownThreadpool() { threadPool = null; } + public void testAttachAndGetToken() throws Exception { + TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // This test only makes sense in mixed clusters with pre v7.2.0 nodes where the Token Service Key is used (to encrypt tokens) + if (null == pre72OldNode) { + pre72OldNode = addAnother7071DataNode(this.clusterService); + } + Authentication authentication = AuthenticationTestHelper.builder() + .user(new User("joe", "admin")) + .realmRef(new RealmRef("native_realm", "native", "node1")) + .build(false); + PlainActionFuture tokenFuture = new PlainActionFuture<>(); + Tuple newTokenBytes = tokenService.getRandomTokenBytes(randomBoolean()); + tokenService.createOAuth2Tokens( + newTokenBytes.v1(), + newTokenBytes.v2(), + authentication, + authentication, + Collections.emptyMap(), + tokenFuture + ); + final String accessToken = tokenFuture.get().getAccessToken(); + assertNotNull(accessToken); + mockGetTokenFromAccessTokenBytes(tokenService, newTokenBytes.v1(), authentication, false, null); + + ThreadContext requestContext = new ThreadContext(Settings.EMPTY); + requestContext.putHeader("Authorization", randomFrom("Bearer ", "BEARER ", "bearer ") + accessToken); + + try (ThreadContext.StoredContext ignore = requestContext.newStoredContextPreservingResponseHeaders()) { + PlainActionFuture future = new PlainActionFuture<>(); + final SecureString bearerToken = Authenticator.extractBearerTokenFromHeader(requestContext); + tokenService.tryAuthenticateToken(bearerToken, future); + UserToken serialized = future.get(); + assertAuthentication(authentication, serialized.getAuthentication()); + } + + try (ThreadContext.StoredContext ignore = requestContext.newStoredContextPreservingResponseHeaders()) { + // verify a second separate token service with its own salt can also verify + TokenService anotherService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + anotherService.refreshMetadata(tokenService.getTokenMetadata()); + PlainActionFuture future = new PlainActionFuture<>(); + final SecureString bearerToken = Authenticator.extractBearerTokenFromHeader(requestContext); + anotherService.tryAuthenticateToken(bearerToken, future); + UserToken fromOtherService = future.get(); + assertAuthentication(authentication, fromOtherService.getAuthentication()); + } + } + public void testInvalidAuthorizationHeader() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); @@ -296,6 +364,89 @@ public void testInvalidAuthorizationHeader() throws Exception { } } + public void testPassphraseWorks() throws Exception { + TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // This test only makes sense in mixed clusters with pre v7.1.0 nodes where the Key is actually used + if (null == pre72OldNode) { + pre72OldNode = addAnother7071DataNode(this.clusterService); + } + Authentication authentication = AuthenticationTestHelper.builder() + .user(new User("joe", "admin")) + .realmRef(new RealmRef("native_realm", "native", "node1")) + .build(false); + PlainActionFuture tokenFuture = new PlainActionFuture<>(); + Tuple newTokenBytes = tokenService.getRandomTokenBytes(randomBoolean()); + tokenService.createOAuth2Tokens( + newTokenBytes.v1(), + newTokenBytes.v2(), + authentication, + authentication, + Collections.emptyMap(), + tokenFuture + ); + final String accessToken = tokenFuture.get().getAccessToken(); + assertNotNull(accessToken); + mockGetTokenFromAccessTokenBytes(tokenService, newTokenBytes.v1(), authentication, false, null); + + ThreadContext requestContext = new ThreadContext(Settings.EMPTY); + storeTokenHeader(requestContext, accessToken); + + try (ThreadContext.StoredContext ignore = requestContext.newStoredContextPreservingResponseHeaders()) { + PlainActionFuture future = new PlainActionFuture<>(); + final SecureString bearerToken = Authenticator.extractBearerTokenFromHeader(requestContext); + tokenService.tryAuthenticateToken(bearerToken, future); + UserToken serialized = future.get(); + assertAuthentication(authentication, serialized.getAuthentication()); + } + + try (ThreadContext.StoredContext ignore = requestContext.newStoredContextPreservingResponseHeaders()) { + // verify a second separate token service with its own passphrase cannot verify + TokenService anotherService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + PlainActionFuture future = new PlainActionFuture<>(); + final SecureString bearerToken = Authenticator.extractBearerTokenFromHeader(requestContext); + anotherService.tryAuthenticateToken(bearerToken, future); + assertNull(future.get()); + } + } + + public void testGetTokenWhenKeyCacheHasExpired() throws Exception { + TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // This test only makes sense in mixed clusters with pre v7.1.0 nodes where the Key is actually used + if (null == pre72OldNode) { + pre72OldNode = addAnother7071DataNode(this.clusterService); + } + Authentication authentication = AuthenticationTestHelper.builder() + .user(new User("joe", "admin")) + .realmRef(new RealmRef("native_realm", "native", "node1")) + .build(false); + + PlainActionFuture tokenFuture = new PlainActionFuture<>(); + Tuple newTokenBytes = tokenService.getRandomTokenBytes(randomBoolean()); + tokenService.createOAuth2Tokens( + newTokenBytes.v1(), + newTokenBytes.v2(), + authentication, + authentication, + Collections.emptyMap(), + tokenFuture + ); + String accessToken = tokenFuture.get().getAccessToken(); + assertThat(accessToken, notNullValue()); + + tokenService.clearActiveKeyCache(); + + tokenService.createOAuth2Tokens( + newTokenBytes.v1(), + newTokenBytes.v2(), + authentication, + authentication, + Collections.emptyMap(), + tokenFuture + ); + accessToken = tokenFuture.get().getAccessToken(); + assertThat(accessToken, notNullValue()); + } + public void testAuthnWithInvalidatedToken() throws Exception { when(securityMainIndex.indexExists()).thenReturn(true); TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); @@ -669,6 +820,57 @@ public void testMalformedRefreshTokens() throws Exception { } } + public void testNonExistingPre72Token() throws Exception { + TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // mock another random token so that we don't find a token in TokenService#getUserTokenFromId + Authentication authentication = AuthenticationTestHelper.builder() + .user(new User("joe", "admin")) + .realmRef(new RealmRef("native_realm", "native", "node1")) + .build(false); + mockGetTokenFromAccessTokenBytes(tokenService, tokenService.getRandomTokenBytes(randomBoolean()).v1(), authentication, false, null); + ThreadContext requestContext = new ThreadContext(Settings.EMPTY); + storeTokenHeader( + requestContext, + tokenService.prependVersionAndEncodeAccessToken( + TransportVersions.V_7_1_0, + tokenService.getRandomTokenBytes(TransportVersions.V_7_1_0, randomBoolean()).v1() + ) + ); + + try (ThreadContext.StoredContext ignore = requestContext.newStoredContextPreservingResponseHeaders()) { + PlainActionFuture future = new PlainActionFuture<>(); + final SecureString bearerToken = Authenticator.extractBearerTokenFromHeader(requestContext); + tokenService.tryAuthenticateToken(bearerToken, future); + assertNull(future.get()); + } + } + + public void testNonExistingUUIDToken() throws Exception { + TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // mock another random token so that we don't find a token in TokenService#getUserTokenFromId + Authentication authentication = AuthenticationTestHelper.builder() + .user(new User("joe", "admin")) + .realmRef(new RealmRef("native_realm", "native", "node1")) + .build(false); + mockGetTokenFromAccessTokenBytes(tokenService, tokenService.getRandomTokenBytes(randomBoolean()).v1(), authentication, false, null); + ThreadContext requestContext = new ThreadContext(Settings.EMPTY); + TransportVersion uuidTokenVersion = randomFrom(TransportVersions.V_7_2_0, TransportVersions.V_7_3_2); + storeTokenHeader( + requestContext, + tokenService.prependVersionAndEncodeAccessToken( + uuidTokenVersion, + tokenService.getRandomTokenBytes(uuidTokenVersion, randomBoolean()).v1() + ) + ); + + try (ThreadContext.StoredContext ignore = requestContext.newStoredContextPreservingResponseHeaders()) { + PlainActionFuture future = new PlainActionFuture<>(); + final SecureString bearerToken = Authenticator.extractBearerTokenFromHeader(requestContext); + tokenService.tryAuthenticateToken(bearerToken, future); + assertNull(future.get()); + } + } + public void testNonExistingLatestTokenVersion() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); // mock another random token so that we don't find a token in TokenService#getUserTokenFromId @@ -723,11 +925,18 @@ public void testIndexNotAvailable() throws Exception { return Void.TYPE; }).when(client).get(any(GetRequest.class), anyActionListener()); - final SecurityIndexManager tokensIndex = securityTokensIndex; - when(securityMainIndex.isAvailable(SecurityIndexManager.Availability.PRIMARY_SHARDS)).thenReturn(false); - when(securityMainIndex.indexExists()).thenReturn(false); - when(securityMainIndex.defensiveCopy()).thenReturn(securityMainIndex); - + final SecurityIndexManager tokensIndex; + if (pre72OldNode != null) { + tokensIndex = securityMainIndex; + when(securityTokensIndex.isAvailable(SecurityIndexManager.Availability.PRIMARY_SHARDS)).thenReturn(false); + when(securityTokensIndex.indexExists()).thenReturn(false); + when(securityTokensIndex.defensiveCopy()).thenReturn(securityTokensIndex); + } else { + tokensIndex = securityTokensIndex; + when(securityMainIndex.isAvailable(SecurityIndexManager.Availability.PRIMARY_SHARDS)).thenReturn(false); + when(securityMainIndex.indexExists()).thenReturn(false); + when(securityMainIndex.defensiveCopy()).thenReturn(securityMainIndex); + } try (ThreadContext.StoredContext ignore = requestContext.newStoredContextPreservingResponseHeaders()) { PlainActionFuture future = new PlainActionFuture<>(); final SecureString bearerToken3 = Authenticator.extractBearerTokenFromHeader(requestContext); @@ -779,6 +988,7 @@ public void testGetAuthenticationWorksWithExpiredUserToken() throws Exception { } public void testSupersedingTokenEncryption() throws Exception { + assumeTrue("Superseding tokens are only created in post 7.2 clusters", pre72OldNode == null); TokenService tokenService = createTokenService(tokenServiceEnabledSettings, Clock.systemUTC()); Authentication authentication = AuthenticationTests.randomAuthentication(null, null); PlainActionFuture tokenFuture = new PlainActionFuture<>(); @@ -813,11 +1023,13 @@ public void testSupersedingTokenEncryption() throws Exception { authentication, tokenFuture ); - - assertThat( - tokenService.prependVersionAndEncodeAccessToken(version, newTokenBytes.v1()), - equalTo(tokenFuture.get().getAccessToken()) - ); + if (version.onOrAfter(TokenService.VERSION_ACCESS_TOKENS_AS_UUIDS)) { + // previous versions serialized the access token encrypted and the cipher text was different each time (due to different IVs) + assertThat( + tokenService.prependVersionAndEncodeAccessToken(version, newTokenBytes.v1()), + equalTo(tokenFuture.get().getAccessToken()) + ); + } assertThat( TokenService.prependVersionAndEncodeRefreshToken(version, newTokenBytes.v2()), equalTo(tokenFuture.get().getRefreshToken()) @@ -946,8 +1158,10 @@ public static String tokenDocIdFromAccessTokenBytes(byte[] accessTokenBytes, Tra MessageDigest userTokenIdDigest = sha256(); userTokenIdDigest.update(accessTokenBytes, RAW_TOKEN_BYTES_LENGTH, RAW_TOKEN_DOC_ID_BYTES_LENGTH); return Base64.getUrlEncoder().withoutPadding().encodeToString(userTokenIdDigest.digest()); - } else { + } else if (tokenVersion.onOrAfter(TokenService.VERSION_ACCESS_TOKENS_AS_UUIDS)) { return TokenService.hashTokenString(Base64.getUrlEncoder().withoutPadding().encodeToString(accessTokenBytes)); + } else { + return Base64.getUrlEncoder().withoutPadding().encodeToString(accessTokenBytes); } } @@ -964,9 +1178,12 @@ private void mockTokenForRefreshToken( if (userToken.getTransportVersion().onOrAfter(VERSION_GET_TOKEN_DOC_FOR_REFRESH)) { storedAccessToken = Base64.getUrlEncoder().withoutPadding().encodeToString(sha256().digest(accessTokenBytes)); storedRefreshToken = Base64.getUrlEncoder().withoutPadding().encodeToString(sha256().digest(refreshTokenBytes)); - } else { + } else if (userToken.getTransportVersion().onOrAfter(TokenService.VERSION_HASHED_TOKENS)) { storedAccessToken = null; storedRefreshToken = TokenService.hashTokenString(Base64.getUrlEncoder().withoutPadding().encodeToString(refreshTokenBytes)); + } else { + storedAccessToken = null; + storedRefreshToken = Base64.getUrlEncoder().withoutPadding().encodeToString(refreshTokenBytes); } final RealmRef realmRef = new RealmRef( refreshTokenStatus == null ? randomAlphaOfLength(6) : refreshTokenStatus.getAssociatedRealm(),