Skip to content

Commit

Permalink
Add flint opensearch metrics
Browse files Browse the repository at this point in the history
Signed-off-by: Vamsi Manohar <[email protected]>
  • Loading branch information
vmmusings committed Dec 27, 2023
1 parent 9061eb9 commit 2b7eb5d
Show file tree
Hide file tree
Showing 5 changed files with 187 additions and 3 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.apache.spark.metrics.source

import com.codahale.metrics.MetricRegistry

class FlintMetricSource(val sourceName: String) extends Source {
override val metricRegistry: MetricRegistry = new MetricRegistry
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

package org.opensearch.flint.core;

import org.opensearch.flint.core.metrics.FlintOpensearchClientMetricsWrapper;
import org.opensearch.flint.core.storage.FlintOpenSearchClient;

/**
Expand All @@ -13,6 +14,6 @@
public class FlintClientBuilder {

public static FlintClient build(FlintOptions options) {
return new FlintOpenSearchClient(options);
return new FlintOpensearchClientMetricsWrapper(new FlintOpenSearchClient(options));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.opensearch.flint.core.metrics;

import com.amazonaws.services.opensearch.model.AccessDeniedException;
import com.codahale.metrics.Counter;
import java.util.List;
import java.util.function.Supplier;
import org.apache.spark.SparkEnv;
import org.apache.spark.metrics.source.FlintMetricSource;
import org.opensearch.client.RestHighLevelClient;
import org.opensearch.flint.core.FlintClient;
import org.opensearch.flint.core.metadata.FlintMetadata;
import org.opensearch.flint.core.metadata.log.OptimisticTransaction;
import org.opensearch.flint.core.metrics.reporter.DimensionedName;
import org.opensearch.flint.core.storage.FlintOpenSearchClient;
import org.opensearch.flint.core.storage.FlintReader;
import org.opensearch.flint.core.storage.FlintWriter;

/**
* This class wraps FlintOpensearchClient and emit spark metrics to FlintMetricSource.
*/
public class FlintOpensearchClientMetricsWrapper implements FlintClient {

private final FlintOpenSearchClient delegate;

public FlintOpensearchClientMetricsWrapper(FlintOpenSearchClient delegate) {
this.delegate = delegate;
}

@Override
public <T> OptimisticTransaction<T> startTransaction(String indexName, String dataSourceName) {
return handleExceptions(() -> delegate.startTransaction(indexName, dataSourceName));
}

@Override
public <T> OptimisticTransaction<T> startTransaction(String indexName, String dataSourceName,
boolean forceInit) {
return handleExceptions(() -> delegate.startTransaction(indexName, dataSourceName, forceInit));
}

@Override
public void createIndex(String indexName, FlintMetadata metadata) {
try {
delegate.createIndex(indexName, metadata);
} catch (AccessDeniedException exception){
handleAccessDeniedException();
throw exception;
} catch (Throwable t) {
handleThrowable();
throw t;
}
}

@Override
public boolean exists(String indexName) {
return handleExceptions(() -> delegate.exists(indexName));
}

@Override
public List<FlintMetadata> getAllIndexMetadata(String indexNamePattern) {
return handleExceptions(() -> delegate.getAllIndexMetadata(indexNamePattern));
}

@Override
public FlintMetadata getIndexMetadata(String indexName) {
return handleExceptions(() -> delegate.getIndexMetadata(indexName));
}

@Override
public void deleteIndex(String indexName) {
try {
delegate.deleteIndex(indexName);
} catch (AccessDeniedException exception){
handleAccessDeniedException();
throw exception;
} catch (Throwable t) {
handleThrowable();
throw t;
}
}

@Override
public FlintReader createReader(String indexName, String query) {
return handleExceptions(() -> delegate.createReader(indexName, query));
}

@Override
public FlintWriter createWriter(String indexName) {
return handleExceptions(() -> delegate.createWriter(indexName));
}

@Override
public RestHighLevelClient createClient() {
return handleExceptions(delegate::createClient);
}

private <T> T handleExceptions(Supplier<T> function) {
try {
return function.get();
} catch (AccessDeniedException exception) {
handleAccessDeniedException();
throw exception;
} catch (Throwable t) {
handleThrowable();
throw new RuntimeException(t);
}
}

private void handleThrowable(){
String clusterName = System.getenv("FLINT_AUTH_DOMAIN_IDENTIFIER");
if (clusterName == null) {
clusterName = "unknown";
}
DimensionedName metricName = DimensionedName.withName("FlintOpenSearchAccessError")
.withDimension("domain_ident", clusterName)
.build();
publishMetric(metricName);
}

private void handleAccessDeniedException() {
String clusterName = System.getenv("FLINT_AUTH_DOMAIN_IDENTIFIER");
if (clusterName == null) {
clusterName = "unknown";
}
DimensionedName metricName = DimensionedName.withName("FlintOpenSearchAccessDeniedError")
.withDimension("domain_ident", clusterName)
.build();
publishMetric(metricName);
}

private void publishMetric(DimensionedName metricName) {
FlintMetricSource flintMetricSource =
(FlintMetricSource) SparkEnv.get().metricsSystem().getSourcesByName("FlintMetricSource");
if (flintMetricSource != null) {
Counter flintOpenSearchAccessError =
flintMetricSource.metricRegistry().getCounters().get(metricName.encode());
if (flintOpenSearchAccessError == null) {
flintMetricSource.metricRegistry().counter(metricName.encode());
}
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import scala.concurrent.{ExecutionContext, Future, TimeoutException}
import scala.concurrent.duration.{Duration, MINUTES}

import com.amazonaws.services.s3.model.AmazonS3Exception
import org.apache.commons.lang3.StringUtils
import org.opensearch.flint.core.FlintClient
import org.opensearch.flint.core.metadata.FlintMetadata
import play.api.libs.json.{JsArray, JsBoolean, JsObject, Json, JsString, JsValue}
Expand Down Expand Up @@ -84,6 +85,7 @@ trait FlintJobExecutor {
.set(
"spark.sql.extensions",
"org.opensearch.flint.spark.FlintPPLSparkExtensions,org.opensearch.flint.spark.FlintSparkExtensions")
.set("spark.metrics.namespace", StringUtils.EMPTY)
}

def createSparkSession(conf: SparkConf): SparkSession = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,12 @@ import org.opensearch.action.get.GetResponse
import org.opensearch.common.Strings
import org.opensearch.flint.app.{FlintCommand, FlintInstance}
import org.opensearch.flint.app.FlintInstance.formats
import org.opensearch.flint.core.metrics.reporter.DimensionedName
import org.opensearch.flint.core.storage.{FlintReader, OpenSearchUpdater}

import org.apache.spark.SparkConf
import org.apache.spark.{SparkConf, SparkEnv}
import org.apache.spark.internal.Logging
import org.apache.spark.metrics.source.FlintMetricSource
import org.apache.spark.sql.flint.config.FlintSparkConf
import org.apache.spark.sql.util.{DefaultShutdownHookManager, ShutdownHookManagerTrait}
import org.apache.spark.util.ThreadUtils
Expand Down Expand Up @@ -91,10 +93,30 @@ object FlintREPL extends Logging with FlintJobExecutor {
}

val spark = createSparkSession(conf)
val flintMetricSource = new FlintMetricSource("FlintMetricSource")
SparkEnv.get.metricsSystem.registerSource(flintMetricSource)

val dimensionedName1 = DimensionedName
.withName("FlintOpensearchErrorCount")
.withDimension("domain_ident", "88888:hello")
.build()
flintMetricSource.metricRegistry.counter(dimensionedName1.encode())
(1 to 10).foreach(_ =>
flintMetricSource.metricRegistry.getCounters().get(dimensionedName1.encode()).inc())
val dimensionedName2 = DimensionedName
.withName("OpensearchErrorCount")
.withDimension("domain_ident", "88888:hello")
.build();
flintMetricSource.metricRegistry.counter(dimensionedName2.encode())
(1 to 10).foreach(_ =>
flintMetricSource.metricRegistry.getCounters().get(dimensionedName2.encode()).inc())
logInfo(
"Reached after metrics")
val osClient = new OSClient(FlintSparkConf().flintOptions())
val jobId = sys.env.getOrElse("SERVERLESS_EMR_JOB_ID", "unknown")
val applicationId = sys.env.getOrElse("SERVERLESS_EMR_VIRTUAL_CLUSTER_ID", "unknown")

val doesVamsiExist = osClient.doesIndexExist("vamsi")
logInfo(s"""Vamsi Exists : $doesVamsiExist""")
// Read the values from the Spark configuration or fall back to the default values
val inactivityLimitMillis: Long =
conf.getLong("spark.flint.job.inactivityLimitMillis", DEFAULT_INACTIVITY_LIMIT_MILLIS)
Expand Down

0 comments on commit 2b7eb5d

Please sign in to comment.