[unit perf tests] AT-726 Removind dependency on metrics extraction implementation details from testFramework

GitOrigin-RevId: a9cd3cd33ef2a0ce8ad49a475bbc9657e8d7e783
This commit is contained in:
Nikita Kudrin
2024-02-13 13:06:52 +02:00
committed by intellij-monorepo-bot
parent 632a414498
commit 4366fae28c
9 changed files with 65 additions and 22 deletions

View File

@@ -45,7 +45,6 @@
<orderEntry type="library" scope="TEST" name="jackson-databind" level="project" />
<orderEntry type="library" scope="TEST" name="jackson-module-kotlin" level="project" />
<orderEntry type="library" scope="TEST" name="http-client" level="project" />
<orderEntry type="module" module-name="intellij.tools.ide.metrics.collector" />
<orderEntry type="module-library" scope="TEST">
<library name="okhttp3-mockwebserver" type="repository">
<properties maven-id="com.squareup.okhttp3:mockwebserver:5.0.0-alpha.11">

View File

@@ -0,0 +1,15 @@
// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.platform.testFramework.diagnostic
/** Counterpart of com.intellij.tools.ide.metrics.collector.metrics.MetricsSelectionStrategy */
enum class MetricsAggregation {
EARLIEST,
/** Usually used to collect gauges */
LATEST,
MINIMUM,
MAXIMUM,
SUM,
AVERAGE;
}

View File

@@ -5,7 +5,6 @@ package com.intellij.platform.testFramework.diagnostic
import com.intellij.openapi.application.PathManager
import com.intellij.openapi.diagnostic.logger
import com.intellij.tools.ide.metrics.collector.TelemetryMetricsCollector
import com.intellij.util.concurrency.SynchronizedClearableLazy
import kotlinx.coroutines.runBlocking
import java.nio.file.Path
@@ -14,9 +13,9 @@ import java.util.*
import kotlin.io.path.writer
interface MetricsPublisher {
suspend fun publish(uniqueTestIdentifier: String, vararg metricsCollectors: TelemetryMetricsCollector)
suspend fun publish(uniqueTestIdentifier: String, vararg metricsCollectors: TelemetryMeterCollector)
fun publishSync(fullQualifiedTestMethodName: String, vararg metricsCollectors: TelemetryMetricsCollector) {
fun publishSync(fullQualifiedTestMethodName: String, vararg metricsCollectors: TelemetryMeterCollector) {
runBlocking {
publish(fullQualifiedTestMethodName, *metricsCollectors)
}
@@ -32,7 +31,7 @@ interface MetricsPublisher {
/** Dummy that always "works successfully" */
class NoopMetricsPublisher : MetricsPublisher {
override suspend fun publish(uniqueTestIdentifier: String, vararg metricsCollectors: TelemetryMetricsCollector) {}
override suspend fun publish(uniqueTestIdentifier: String, vararg metricsCollectors: TelemetryMeterCollector) {}
}
private val instance: SynchronizedClearableLazy<MetricsPublisher> = SynchronizedClearableLazy {

View File

@@ -0,0 +1,5 @@
// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.platform.testFramework.diagnostic
/** Counterpart of com.intellij.tools.ide.metrics.collector.OpenTelemetryMeterCollector */
class TelemetryMeterCollector(val metricsAggregation: MetricsAggregation, val metersFilter: (Map.Entry<String, List<Long>>) -> Boolean)

View File

@@ -7,8 +7,8 @@ import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.platform.diagnostic.telemetry.IJTracer;
import com.intellij.platform.diagnostic.telemetry.Scope;
import com.intellij.platform.diagnostic.telemetry.TelemetryManager;
import com.intellij.platform.testFramework.diagnostic.TelemetryMeterCollector;
import com.intellij.platform.testFramework.diagnostic.MetricsPublisher;
import com.intellij.tools.ide.metrics.collector.OpenTelemetryMeterCollector;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
@@ -50,7 +50,7 @@ public class PerformanceTestInfo {
private String uniqueTestName; // at least full qualified test name (plus other identifiers, optionally)
@NotNull
private final IJTracer tracer;
private OpenTelemetryMeterCollector meterCollector = null;
private TelemetryMeterCollector meterCollector = null;
private static final CoroutineScope coroutineScope = CoroutineScopeKt.CoroutineScope(
SupervisorKt.SupervisorJob(null).plus(Dispatchers.getIO())
@@ -148,7 +148,7 @@ public class PerformanceTestInfo {
* .counterBuilder("custom.counter")
* .buildWithCallback { it.record(counter.get()) }
*
* val meterCollector = OpenTelemetryMeterCollector(MetricsSelectionStrategy.SUM) { it.key.contains("custom") }
* val meterCollector = TelemetryMeterCollector(MetricsAggregation.SUM) { it.key.contains("custom") }
*
* PlatformTestUtil.newPerformanceTest("my perf test") {
* counter.incrementAndGet()
@@ -158,7 +158,7 @@ public class PerformanceTestInfo {
* </pre>
*/
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfo withTelemetryMeters(OpenTelemetryMeterCollector meterCollector) {
public PerformanceTestInfo withTelemetryMeters(TelemetryMeterCollector meterCollector) {
this.meterCollector = meterCollector;
return this;
}
@@ -230,7 +230,7 @@ public class PerformanceTestInfo {
* </ul>
* <br/>
* By default only OpenTelemetry spans will be published. (from the {@code ./system/test/log/opentelemtry.json} file).<br/>
* To enable publishing of meters (from the {@code ./system/test/log/open-telemetry-metrics.*.csv}) use {@link #withTelemetryMeters(OpenTelemetryMeterCollector)}.<br/>
* To enable publishing of meters (from the {@code ./system/test/log/open-telemetry-metrics.*.csv}) use {@link #withTelemetryMeters(OpenTelemetryMeterCollector)}. <br/>
* <p/>
* Considering metrics: better to have a test that produces metrics in seconds, rather milliseconds.<br/>
* This way degradation will be easier to detect and metric deviation from the baseline will be easier to notice.
@@ -239,7 +239,9 @@ public class PerformanceTestInfo {
* <a href="https://buildserver.labs.intellij.net/buildConfiguration/ijplatform_master_Idea_Tests_PerformanceTests?branch=&buildTypeTab=overview&mode=builds">the composite build</a>
* <br/>
* Raw metrics are reported as TC artifacts and can be found on Artifacts tqb in dependency builds.<br/>
* Human friendly metrics representation can be viewed in <a href="https://ij-perf.labs.jb.gg/perfUnit/tests?machine=linux-blade-hetzner&branch=master">IJ Perf</a>
* Human friendly metrics representation can be viewed in <a href="https://ij-perf.labs.jb.gg/perfUnit/tests?machine=linux-blade-hetzner&branch=master">IJ Perf</a><br/>
* Last but not least: if metrics arent published or even not collected - probably TelemetryManager isntance isn't initialized correctly
* or dependency on module intellij.tools.ide.metrics.benchmark isn't set.
*
* @see #start(String)
* @see #start(Method)

View File

@@ -19,6 +19,6 @@
<orderEntry type="module" module-name="intellij.tools.ide.util.common" />
<orderEntry type="module" module-name="intellij.platform.testFramework" />
<orderEntry type="module" module-name="intellij.platform.testFramework.junit5" scope="TEST" />
<orderEntry type="library" scope="TEST" name="opentelemetry" level="project" />
<orderEntry type="library" name="opentelemetry" level="project" />
</component>
</module>

View File

@@ -5,10 +5,13 @@ import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.intellij.openapi.application.PathManager
import com.intellij.openapi.util.BuildNumber
import com.intellij.openapi.util.io.FileUtil
import com.intellij.platform.testFramework.diagnostic.MetricsAggregation
import com.intellij.platform.testFramework.diagnostic.MetricsPublisher
import com.intellij.platform.testFramework.diagnostic.TelemetryMeterCollector
import com.intellij.teamcity.TeamCityClient
import com.intellij.testFramework.UsefulTestCase
import com.intellij.tools.ide.metrics.collector.TelemetryMetricsCollector
import com.intellij.tools.ide.metrics.collector.OpenTelemetryMeterCollector
import com.intellij.tools.ide.metrics.collector.metrics.MetricsSelectionStrategy
import com.intellij.tools.ide.metrics.collector.metrics.PerformanceMetrics
import com.intellij.tools.ide.metrics.collector.publishing.CIServerBuildInfo
import com.intellij.tools.ide.metrics.collector.publishing.PerformanceMetricsDto
@@ -51,9 +54,11 @@ class IJPerfMetricsPublisherImpl : MetricsPublisher {
else setBuildParams()
)
private suspend fun prepareMetricsForPublishing(uniqueTestIdentifier: String, vararg metricsCollectors: TelemetryMetricsCollector): PerformanceMetricsDto {
private suspend fun prepareMetricsForPublishing(uniqueTestIdentifier: String, vararg metricsCollectors: TelemetryMeterCollector): PerformanceMetricsDto {
val metrics: List<PerformanceMetrics.Metric> = SpanMetricsExtractor().waitTillMetricsExported(uniqueTestIdentifier)
val additionalMetrics: List<PerformanceMetrics.Metric> = metricsCollectors.flatMap { it.collect(PathManager.getLogDir()) }
val additionalMetrics: List<PerformanceMetrics.Metric> = metricsCollectors.flatMap {
it.convertToCompleteMetricsCollector().collect(PathManager.getLogDir())
}
val mergedMetrics = metrics.plus(additionalMetrics)
@@ -85,7 +90,7 @@ class IJPerfMetricsPublisherImpl : MetricsPublisher {
}
}
override suspend fun publish(uniqueTestIdentifier: String, vararg metricsCollectors: TelemetryMetricsCollector) {
override suspend fun publish(uniqueTestIdentifier: String, vararg metricsCollectors: TelemetryMeterCollector) {
val metricsDto = prepareMetricsForPublishing(uniqueTestIdentifier, *metricsCollectors)
withContext(Dispatchers.IO) {
@@ -107,4 +112,23 @@ class IJPerfMetricsPublisherImpl : MetricsPublisher {
zipContent = false)
}
}
}
internal fun TelemetryMeterCollector.convertToCompleteMetricsCollector(): OpenTelemetryMeterCollector {
val metricsSelectionStrategy = when (this.metricsAggregation) {
MetricsAggregation.EARLIEST -> MetricsSelectionStrategy.EARLIEST
MetricsAggregation.LATEST -> MetricsSelectionStrategy.LATEST
MetricsAggregation.MINIMUM -> MetricsSelectionStrategy.MINIMUM
MetricsAggregation.MAXIMUM -> MetricsSelectionStrategy.MAXIMUM
MetricsAggregation.SUM -> MetricsSelectionStrategy.SUM
MetricsAggregation.AVERAGE -> MetricsSelectionStrategy.AVERAGE
}
return OpenTelemetryMeterCollector(metricsSelectionStrategy) { meter ->
this.metersFilter(
object : Map.Entry<String, List<Long>> {
override val key: String = meter.key
override val value: List<Long> = meter.value.map { it.value }
})
}
}

View File

@@ -6,10 +6,10 @@ import com.intellij.platform.diagnostic.telemetry.PlatformMetrics
import com.intellij.platform.diagnostic.telemetry.Scope
import com.intellij.platform.diagnostic.telemetry.TelemetryManager
import com.intellij.platform.diagnostic.telemetry.helpers.runWithSpan
import com.intellij.platform.testFramework.diagnostic.MetricsAggregation
import com.intellij.platform.testFramework.diagnostic.TelemetryMeterCollector
import com.intellij.testFramework.PlatformTestUtil
import com.intellij.testFramework.junit5.TestApplication
import com.intellij.tools.ide.metrics.collector.OpenTelemetryMeterCollector
import com.intellij.tools.ide.metrics.collector.metrics.MetricsSelectionStrategy
import kotlinx.coroutines.delay
import kotlinx.coroutines.runBlocking
import org.junit.jupiter.api.Assertions
@@ -38,7 +38,7 @@ class ApplicationMetricsExtractionFromUnitPerfTest {
.counterBuilder("custom.counter")
.buildWithCallback { it.record(counter.get()) }
val meterCollector = OpenTelemetryMeterCollector(MetricsSelectionStrategy.SUM) { it.key.contains("custom") }
val meterCollector = TelemetryMeterCollector(MetricsAggregation.SUM) { it.key.contains("custom") }
val testName = testInfo.testMethod.get().name
val customSpanName = "custom span"
@@ -55,7 +55,7 @@ class ApplicationMetricsExtractionFromUnitPerfTest {
.start()
MetricsExtractionFromUnitPerfTest.checkMetricsAreFlushedToTelemetryFile(getFullTestName(testInfo, testName), withWarmup = true, customSpanName)
val meters = meterCollector.collect(PathManager.getLogDir())
val meters = meterCollector.convertToCompleteMetricsCollector().collect(PathManager.getLogDir())
Assertions.assertTrue(meters.count { it.id.name == "custom.counter" } == 1, "Counter meter should be present in .csv metrics file")
}

View File

@@ -4,7 +4,6 @@ import com.intellij.platform.diagnostic.telemetry.MetricsImporterUtils
import com.intellij.tools.ide.metrics.collector.metrics.MetricsSelectionStrategy
import com.intellij.tools.ide.metrics.collector.metrics.PerformanceMetrics
import io.opentelemetry.sdk.metrics.data.LongPointData
import io.opentelemetry.sdk.metrics.data.PointData
import java.nio.file.Path
import kotlin.io.path.listDirectoryEntries
import kotlin.io.path.name
@@ -14,7 +13,7 @@ import kotlin.io.path.name
* [metersFilter] Input data: key - meter name. value - list of collected data points for that meter
*/
open class OpenTelemetryMeterCollector(val metricsSelectionStrategy: MetricsSelectionStrategy,
val metersFilter: (Map.Entry<String, List<PointData>>) -> Boolean) : TelemetryMetricsCollector {
val metersFilter: (Map.Entry<String, List<LongPointData>>) -> Boolean) : TelemetryMetricsCollector {
private fun getOpenTelemetryCsvReportFiles(logsDirPath: Path): List<Path> {
val metricsCsvFiles = logsDirPath.listDirectoryEntries("*.csv").filter { it.name.startsWith("open-telemetry-metrics") }
require(metricsCsvFiles.isNotEmpty()) {