[unit perf tests] AT-726 Report custom meters metrics

GitOrigin-RevId: 359b969be510af80e708b60a488df78fda18ac4a
This commit is contained in:
Nikita Kudrin
2024-02-09 18:00:34 +02:00
committed by intellij-monorepo-bot
parent 7e25769a84
commit 2921a8abf1
7 changed files with 86 additions and 27 deletions

View File

@@ -45,6 +45,7 @@
<orderEntry type="library" scope="TEST" name="jackson-databind" level="project" />
<orderEntry type="library" scope="TEST" name="jackson-module-kotlin" level="project" />
<orderEntry type="library" scope="TEST" name="http-client" level="project" />
<orderEntry type="module" module-name="intellij.tools.ide.metrics.collector" />
<orderEntry type="module-library" scope="TEST">
<library name="okhttp3-mockwebserver" type="repository">
<properties maven-id="com.squareup.okhttp3:mockwebserver:5.0.0-alpha.11">

View File

@@ -4,16 +4,17 @@
package com.intellij.platform.testFramework.diagnostic
import com.intellij.openapi.diagnostic.logger
import com.intellij.tools.ide.metrics.collector.TelemetryMetricsCollector
import com.intellij.util.concurrency.SynchronizedClearableLazy
import kotlinx.coroutines.runBlocking
import java.util.*
interface MetricsPublisher {
suspend fun publish(fullQualifiedTestMethodName: String, metricName: String)
suspend fun publish(fullQualifiedTestMethodName: String, vararg metricsCollectors: TelemetryMetricsCollector)
fun publishSync(fullQualifiedTestMethodName: String, metricName: String) {
fun publishSync(fullQualifiedTestMethodName: String, vararg metricsCollectors: TelemetryMetricsCollector) {
runBlocking {
publish(fullQualifiedTestMethodName, metricName)
publish(fullQualifiedTestMethodName, *metricsCollectors)
}
}
@@ -24,7 +25,7 @@ interface MetricsPublisher {
/** Dummy that always "works successfully" */
class NoopMetricsPublisher : MetricsPublisher {
override suspend fun publish(fullQualifiedTestMethodName: String, metricName: String) {}
override suspend fun publish(fullQualifiedTestMethodName: String, vararg metricsCollectors: TelemetryMetricsCollector) {}
}
private val instance: SynchronizedClearableLazy<MetricsPublisher> = SynchronizedClearableLazy {

View File

@@ -8,6 +8,7 @@ import com.intellij.platform.diagnostic.telemetry.IJTracer;
import com.intellij.platform.diagnostic.telemetry.Scope;
import com.intellij.platform.diagnostic.telemetry.TelemetryManager;
import com.intellij.platform.testFramework.diagnostic.MetricsPublisher;
import com.intellij.tools.ide.metrics.collector.OpenTelemetryMeterCollector;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
@@ -23,6 +24,7 @@ import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.lang.reflect.Method;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Locale;
@@ -39,14 +41,15 @@ public class PerformanceTestInfo {
MEASURE
}
private final ThrowableComputable<Integer, ?> test; // runnable to measure; returns actual input size
private final int expectedInputSize; // size of input the test is expected to process;
private ThrowableRunnable<?> setup; // to run before each test
private int maxMeasurementAttempts = 3; // number of retries
private final String launchName; // to print on fail
private int warmupIterations = 1; // default warmup iterations should be positive
private final ThrowableComputable<Integer, ?> test; // runnable to measure; returns actual input size
private final int expectedInputSize; // size of input the test is expected to process;
private ThrowableRunnable<?> setup; // to run before each test
private int maxMeasurementAttempts = 3; // number of retries
private final String launchName; // to print on fail
private int warmupIterations = 1; // default warmup iterations should be positive
@NotNull
private final IJTracer tracer;
private OpenTelemetryMeterCollector meterCollector = null;
private static final CoroutineScope coroutineScope = CoroutineScopeKt.CoroutineScope(
SupervisorKt.SupervisorJob(null).plus(Dispatchers.getIO())
@@ -89,8 +92,27 @@ public class PerformanceTestInfo {
}
}
private static void cleanupOutdatedMeters() {
try {
// force spans and meters to be written to disk before any test starts
// it's at least what we can do to minimize interference of the same meter on different tests
TelemetryManager.getInstance().forceFlushMetricsBlocking();
var csvFilesWithMetrics = Files.list(PathManager.getLogDir()).filter((it) -> it.toString().endsWith(".csv")).toList();
for (Path file : csvFilesWithMetrics) {
Files.deleteIfExists(file);
}
}
catch (Exception e) {
System.err.println(
"Error during removing Telemetry .csv files with meters before start of perf test. This might affect metrics value");
e.printStackTrace();
}
}
PerformanceTestInfo(@NotNull ThrowableComputable<Integer, ?> test, int expectedInputSize, @NotNull String launchName) {
initOpenTelemetry();
cleanupOutdatedMeters();
this.test = test;
this.expectedInputSize = expectedInputSize;
@@ -112,6 +134,12 @@ public class PerformanceTestInfo {
return this;
}
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfo withTelemetryMeters(OpenTelemetryMeterCollector meterCollector) {
this.meterCollector = meterCollector;
return this;
}
/**
* Runs the perf test {@code iterations} times before starting the final measuring.
*/
@@ -311,7 +339,13 @@ public class PerformanceTestInfo {
try {
// publish warmup and final measurements at once at the end of the runs
if (iterationType.equals(IterationMode.MEASURE)) {
MetricsPublisher.Companion.getInstance().publishSync(uniqueTestName, uniqueTestName);
var publisherInstance = MetricsPublisher.Companion.getInstance();
if (meterCollector != null) {
publisherInstance.publishSync(uniqueTestName, meterCollector);
}
else {
publisherInstance.publishSync(uniqueTestName);
}
}
}
catch (Throwable t) {

View File

@@ -8,6 +8,7 @@ import com.intellij.openapi.util.io.FileUtil
import com.intellij.platform.testFramework.diagnostic.MetricsPublisher
import com.intellij.teamcity.TeamCityClient
import com.intellij.testFramework.UsefulTestCase
import com.intellij.tools.ide.metrics.collector.TelemetryMetricsCollector
import com.intellij.tools.ide.metrics.collector.metrics.PerformanceMetrics
import com.intellij.tools.ide.metrics.collector.publishing.CIServerBuildInfo
import com.intellij.tools.ide.metrics.collector.publishing.PerformanceMetricsDto
@@ -50,10 +51,13 @@ class IJPerfMetricsPublisherImpl : MetricsPublisher {
else setBuildParams()
)
private suspend fun prepareMetricsForPublishing(fullQualifiedTestMethodName: String, spanName: String): PerformanceMetricsDto {
val metrics: List<PerformanceMetrics.Metric> = MetricsExtractor().waitTillMetricsExported(spanName)
private suspend fun prepareMetricsForPublishing(uniqueTestIdentifier: String, vararg metricsCollectors: TelemetryMetricsCollector): PerformanceMetricsDto {
val metrics: List<PerformanceMetrics.Metric> = SpanMetricsExtractor().waitTillMetricsExported(uniqueTestIdentifier)
val additionalMetrics: List<PerformanceMetrics.Metric> = metricsCollectors.flatMap { it.collect(PathManager.getLogDir()) }
teamCityClient.publishTeamCityArtifacts(source = PathManager.getLogDir(), artifactPath = fullQualifiedTestMethodName)
val mergedMetrics = metrics.plus(additionalMetrics)
teamCityClient.publishTeamCityArtifacts(source = PathManager.getLogDir(), artifactPath = uniqueTestIdentifier)
val buildInfo = CIServerBuildInfo(
buildId = teamCityClient.buildId,
@@ -69,19 +73,19 @@ class IJPerfMetricsPublisherImpl : MetricsPublisher {
)
return PerformanceMetricsDto.create(
projectName = fullQualifiedTestMethodName,
projectName = uniqueTestIdentifier,
projectURL = "",
projectDescription = "",
methodName = fullQualifiedTestMethodName,
methodName = uniqueTestIdentifier,
buildNumber = BuildNumber.currentVersion(),
metrics = metrics,
metrics = mergedMetrics,
buildInfo = buildInfo
)
}
}
override suspend fun publish(fullQualifiedTestMethodName: String, metricName: String) {
val metricsDto = prepareMetricsForPublishing(fullQualifiedTestMethodName, metricName)
override suspend fun publish(fullQualifiedTestMethodName: String, vararg metricsCollectors: TelemetryMetricsCollector) {
val metricsDto = prepareMetricsForPublishing(fullQualifiedTestMethodName, *metricsCollectors)
withContext(Dispatchers.IO) {
val artifactName = "metrics.performance.json"

View File

@@ -11,7 +11,7 @@ import java.nio.file.Path
import kotlin.math.absoluteValue
import kotlin.time.Duration.Companion.milliseconds
class MetricsExtractor(private val telemetryJsonFile: Path = getDefaultPathToTelemetrySpanJson()) {
class SpanMetricsExtractor(private val telemetryJsonFile: Path = getDefaultPathToTelemetrySpanJson()) {
companion object {
fun getDefaultPathToTelemetrySpanJson(): Path {
return Path.of(System.getProperty("idea.diagnostic.opentelemetry.file",
@@ -29,7 +29,7 @@ class MetricsExtractor(private val telemetryJsonFile: Path = getDefaultPathToTel
return requireNotNull(originalMetrics) { "Couldn't find metrics for '$spanName' in $telemetryJsonFile" }
}
private fun getAttemptsStatisticalMetrics(attempts: List<PerformanceMetrics.Metric>, metricsPrefix: String): List<PerformanceMetrics.Metric> {
private fun getAttemptsSpansStatisticalMetrics(attempts: List<PerformanceMetrics.Metric>, metricsPrefix: String): List<PerformanceMetrics.Metric> {
val medianValueOfAttempts: Long = attempts.medianValue()
val madValueOfAttempts = attempts.map { (it.value - medianValueOfAttempts).absoluteValue }.median()
@@ -55,7 +55,7 @@ class MetricsExtractor(private val telemetryJsonFile: Path = getDefaultPathToTel
/**
* Author ot the perf test might want to report custom metrics from the test (span or meters)
*/
private fun getAggregatedCustomMetricsReportedFromTests(customMetrics: List<PerformanceMetrics.Metric>, metricsPrefix: String): List<PerformanceMetrics.Metric> {
private fun getAggregatedCustomSpansMetricsReportedFromTests(customMetrics: List<PerformanceMetrics.Metric>, metricsPrefix: String): List<PerformanceMetrics.Metric> {
return customMetrics.groupBy { it.id.name }
.map { group ->
PerformanceMetrics.newDuration("${metricsPrefix}${group.key}", group.value.map { it.value }.average().toLong())
@@ -79,13 +79,13 @@ class MetricsExtractor(private val telemetryJsonFile: Path = getDefaultPathToTel
// some tests might be forced to run without warmup attempts
if (forWarmup && attempts.isEmpty()) return listOf()
val attemptsStatisticalMetrics: List<PerformanceMetrics.Metric> = getAttemptsStatisticalMetrics(attempts, metricsPrefix)
val attemptsStatisticalMetrics: List<PerformanceMetrics.Metric> = getAttemptsSpansStatisticalMetrics(attempts, metricsPrefix)
val mainMetricValue: Long = originalMetrics.single { it.id.name == spanName }.value
val totalTestDurationMetric = PerformanceMetrics.newDuration("${metricsPrefix}total.test.duration.ms", mainMetricValue)
val customMetrics = originalMetrics.filterNot { it.id.name.startsWith(attemptSuffix, ignoreCase = true) || it.id.name == spanName }
val aggregatedCustomMetrics = getAggregatedCustomMetricsReportedFromTests(customMetrics, metricsPrefix)
val aggregatedCustomMetrics = getAggregatedCustomSpansMetricsReportedFromTests(customMetrics, metricsPrefix)
return attemptsStatisticalMetrics.plus(totalTestDurationMetric).plus(aggregatedCustomMetrics)
}

View File

@@ -1,16 +1,21 @@
// Copyright 2000-2023 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.tools.ide.metrics.benchmark
import com.intellij.openapi.application.PathManager
import com.intellij.platform.diagnostic.telemetry.PlatformMetrics
import com.intellij.platform.diagnostic.telemetry.Scope
import com.intellij.platform.diagnostic.telemetry.TelemetryManager
import com.intellij.platform.diagnostic.telemetry.helpers.runWithSpan
import com.intellij.testFramework.PlatformTestUtil
import com.intellij.testFramework.junit5.TestApplication
import com.intellij.tools.ide.metrics.collector.OpenTelemetryMeterCollector
import com.intellij.tools.ide.metrics.collector.metrics.MetricsSelectionStrategy
import kotlinx.coroutines.delay
import kotlinx.coroutines.runBlocking
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInfo
import java.util.concurrent.atomic.AtomicLong
import kotlin.random.Random
import kotlin.time.Duration.Companion.milliseconds
@@ -28,6 +33,12 @@ class ApplicationMetricsExtractionFromUnitPerfTest {
@Test
fun reportingAnyCustomMetricsFromPerfTest(testInfo: TestInfo) {
val counter: AtomicLong = AtomicLong()
val counterMeter = TelemetryManager.getMeter(ExtractionMetricsScope)
.counterBuilder("custom.counter")
.buildWithCallback { it.record(counter.get()) }
val meterCollector = OpenTelemetryMeterCollector(MetricsSelectionStrategy.SUM) { it.key.contains("custom") }
val testName = testInfo.testMethod.get().name
val customSpanName = "custom span"
@@ -36,9 +47,17 @@ class ApplicationMetricsExtractionFromUnitPerfTest {
runBlocking { delay(Random.nextInt(50, 100).milliseconds) }
}
counter.incrementAndGet()
runBlocking { delay(Random.nextInt(50, 100).milliseconds) }
}.start()
}
.withTelemetryMeters(meterCollector)
.start()
MetricsExtractionFromUnitPerfTest.checkMetricsAreFlushedToTelemetryFile(getFullTestName(testInfo, testName), withWarmup = true, customSpanName)
val meters = meterCollector.collect(PathManager.getLogDir())
Assertions.assertTrue(meters.count { it.id.name == "custom.counter" } == 1, "Counter meter should be present in .csv metrics file")
}
@Test

View File

@@ -25,7 +25,7 @@ class MetricsExtractionFromUnitPerfTest {
companion object {
fun checkMetricsAreFlushedToTelemetryFile(spanName: String, withWarmup: Boolean = true, vararg customSpanNames: String) {
val extractedMetrics = runBlocking {
MetricsExtractor().waitTillMetricsExported(spanName = spanName)
SpanMetricsExtractor().waitTillMetricsExported(spanName = spanName)
}
if (withWarmup) {
@@ -57,7 +57,7 @@ class MetricsExtractionFromUnitPerfTest {
fun unitPerfTestsMetricsExtraction(testInfo: TestInfo) = runBlocking {
val mainMetricName = "simple perf test"
val extractedMetrics = MetricsExtractor((openTelemetryReports / "open-telemetry-unit-perf-test.json"))
val extractedMetrics = SpanMetricsExtractor((openTelemetryReports / "open-telemetry-unit-perf-test.json"))
.waitTillMetricsExported(spanName = mainMetricName)
// warmup metrics