-
Notifications
You must be signed in to change notification settings - Fork 33
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Signed-off-by: Tomoyuki Morita <[email protected]>
- Loading branch information
Showing
9 changed files
with
315 additions
and
24 deletions.
There are no files selected for viewing
68 changes: 68 additions & 0 deletions
68
flint-core/src/main/java/org/opensearch/flint/core/metrics/HistoricGauge.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,68 @@ | ||
/* | ||
* Copyright OpenSearch Contributors | ||
* SPDX-License-Identifier: Apache-2.0 | ||
*/ | ||
|
||
package org.opensearch.flint.core.metrics; | ||
|
||
import com.codahale.metrics.Gauge; | ||
import java.util.ArrayList; | ||
import java.util.Collections; | ||
import java.util.LinkedList; | ||
import java.util.List; | ||
|
||
/** | ||
* Gauge which stores historic data points with timestamps. | ||
* This is used for emitting separate data points per request, instead of single aggregated metrics. | ||
*/ | ||
public class HistoricGauge implements Gauge<Long> { | ||
public static class DataPoint { | ||
Long value; | ||
long timestamp; | ||
|
||
DataPoint(long value, long timestamp) { | ||
this.value = value; | ||
this.timestamp = timestamp; | ||
} | ||
|
||
public Long getValue() { | ||
return value; | ||
} | ||
|
||
public long getTimestamp() { | ||
return timestamp; | ||
} | ||
} | ||
|
||
private final List<DataPoint> dataPoints = Collections.synchronizedList(new LinkedList<>()); | ||
|
||
/** | ||
* This method will just return first value. | ||
* @return | ||
*/ | ||
@Override | ||
public Long getValue() { | ||
if (!dataPoints.isEmpty()) { | ||
return dataPoints.get(0).value; | ||
} else { | ||
return null; | ||
} | ||
} | ||
|
||
public void addDataPoint(Long value) { | ||
dataPoints.add(new DataPoint(value, System.currentTimeMillis())); | ||
} | ||
|
||
/** | ||
* Return copy of dataPoints and remove them from internal list | ||
* @return copy of the data points | ||
*/ | ||
public List<DataPoint> pollDataPoints() { | ||
int size = dataPoints.size(); | ||
List<DataPoint> result = new ArrayList<>(dataPoints.subList(0, size)); | ||
if (size > 0) { | ||
dataPoints.subList(0, size).clear(); | ||
} | ||
return result; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
58 changes: 58 additions & 0 deletions
58
...t-core/src/main/scala/org/opensearch/flint/core/metrics/ReadWriteBytesSparkListener.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
/* | ||
* Copyright OpenSearch Contributors | ||
* SPDX-License-Identifier: Apache-2.0 | ||
*/ | ||
|
||
package org.opensearch.flint.core.metrics | ||
|
||
import org.apache.spark.internal.Logging | ||
import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd} | ||
import org.apache.spark.sql.SparkSession | ||
|
||
/** | ||
* Collect and emit bytesRead/Written and recordsRead/Written metrics | ||
*/ | ||
class ReadWriteBytesSparkListener extends SparkListener with Logging { | ||
var bytesRead: Long = 0 | ||
var recordsRead: Long = 0 | ||
var bytesWritten: Long = 0 | ||
var recordsWritten: Long = 0 | ||
|
||
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = { | ||
val inputMetrics = taskEnd.taskMetrics.inputMetrics | ||
val outputMetrics = taskEnd.taskMetrics.outputMetrics | ||
val ids = s"(${taskEnd.taskInfo.taskId}, ${taskEnd.taskInfo.partitionId})" | ||
logInfo( | ||
s"${ids} Input: bytesRead=${inputMetrics.bytesRead}, recordsRead=${inputMetrics.recordsRead}") | ||
logInfo( | ||
s"${ids} Output: bytesWritten=${outputMetrics.bytesWritten}, recordsWritten=${outputMetrics.recordsWritten}") | ||
|
||
bytesRead += inputMetrics.bytesRead | ||
recordsRead += inputMetrics.recordsRead | ||
bytesWritten += outputMetrics.bytesWritten | ||
recordsWritten += outputMetrics.recordsWritten | ||
} | ||
|
||
def emitMetrics(): Unit = { | ||
logInfo(s"Input: totalBytesRead=${bytesRead}, totalRecordsRead=${recordsRead}") | ||
logInfo(s"Output: totalBytesWritten=${bytesWritten}, totalRecordsWritten=${recordsWritten}") | ||
MetricsUtil.addHistoricGauge(MetricConstants.INPUT_TOTAL_BYTES_READ, bytesRead) | ||
MetricsUtil.addHistoricGauge(MetricConstants.INPUT_TOTAL_RECORDS_READ, recordsRead) | ||
MetricsUtil.addHistoricGauge(MetricConstants.OUTPUT_TOTAL_BYTES_WRITTEN, bytesWritten) | ||
MetricsUtil.addHistoricGauge(MetricConstants.OUTPUT_TOTAL_RECORDS_WRITTEN, recordsWritten) | ||
} | ||
} | ||
|
||
object ReadWriteBytesSparkListener { | ||
def withMetrics[T](spark: SparkSession, lambda: () => T): T = { | ||
val listener = new ReadWriteBytesSparkListener() | ||
spark.sparkContext.addSparkListener(listener) | ||
|
||
val result = lambda() | ||
|
||
spark.sparkContext.removeSparkListener(listener) | ||
listener.emitMetrics() | ||
|
||
result | ||
} | ||
} |
79 changes: 79 additions & 0 deletions
79
flint-core/src/test/java/org/opensearch/flint/core/metrics/HistoricGaugeTest.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
/* | ||
* Copyright OpenSearch Contributors | ||
* SPDX-License-Identifier: Apache-2.0 | ||
*/ | ||
|
||
package org.opensearch.flint.core.metrics; | ||
|
||
import org.junit.Test; | ||
import static org.junit.Assert.*; | ||
import org.opensearch.flint.core.metrics.HistoricGauge.DataPoint; | ||
|
||
import java.util.List; | ||
|
||
public class HistoricGaugeTest { | ||
|
||
@Test | ||
public void testGetValue_EmptyGauge_ShouldReturnNull() { | ||
HistoricGauge gauge= new HistoricGauge(); | ||
assertNull(gauge.getValue()); | ||
} | ||
|
||
@Test | ||
public void testGetValue_WithSingleDataPoint_ShouldReturnFirstValue() { | ||
HistoricGauge gauge= new HistoricGauge(); | ||
Long value = 100L; | ||
gauge.addDataPoint(value); | ||
|
||
assertEquals(value, gauge.getValue()); | ||
} | ||
|
||
@Test | ||
public void testGetValue_WithMultipleDataPoints_ShouldReturnFirstValue() { | ||
HistoricGauge gauge= new HistoricGauge(); | ||
Long firstValue = 100L; | ||
Long secondValue = 200L; | ||
gauge.addDataPoint(firstValue); | ||
gauge.addDataPoint(secondValue); | ||
|
||
assertEquals(firstValue, gauge.getValue()); | ||
} | ||
|
||
@Test | ||
public void testPollDataPoints_WithMultipleDataPoints_ShouldReturnAndClearDataPoints() { | ||
HistoricGauge gauge= new HistoricGauge(); | ||
gauge.addDataPoint(100L); | ||
gauge.addDataPoint(200L); | ||
gauge.addDataPoint(300L); | ||
|
||
List<DataPoint> dataPoints = gauge.pollDataPoints(); | ||
|
||
assertEquals(3, dataPoints.size()); | ||
assertEquals(Long.valueOf(100L), dataPoints.get(0).getValue()); | ||
assertEquals(Long.valueOf(200L), dataPoints.get(1).getValue()); | ||
assertEquals(Long.valueOf(300L), dataPoints.get(2).getValue()); | ||
|
||
assertTrue(gauge.pollDataPoints().isEmpty()); | ||
} | ||
|
||
@Test | ||
public void testAddDataPoint_ShouldAddDataPointWithCorrectValueAndTimestamp() { | ||
HistoricGauge gauge= new HistoricGauge(); | ||
Long value = 100L; | ||
gauge.addDataPoint(value); | ||
|
||
List<DataPoint> dataPoints = gauge.pollDataPoints(); | ||
|
||
assertEquals(1, dataPoints.size()); | ||
assertEquals(value, dataPoints.get(0).getValue()); | ||
assertTrue(dataPoints.get(0).getTimestamp() > 0); | ||
} | ||
|
||
@Test | ||
public void testPollDataPoints_EmptyGauge_ShouldReturnEmptyList() { | ||
HistoricGauge gauge= new HistoricGauge(); | ||
List<DataPoint> dataPoints = gauge.pollDataPoints(); | ||
|
||
assertTrue(dataPoints.isEmpty()); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.