[perf unit tests] Move PerformanceTestInfo to separate module outside of testFramework

The class depends heavily on metrics and keeping it inside testFramework requires either duplicating a lot of metrics related code or introducing new dependency and such exposing the new module to SDK.

GitOrigin-RevId: 118b43cae4e2eeb74ade92ffd6073de5f23c007c
This commit is contained in:
Maxim.Kolmakov
2024-05-22 16:23:25 +02:00
committed by intellij-monorepo-bot
parent 41659687f5
commit e0d9a260f2
8 changed files with 534 additions and 377 deletions

View File

@@ -0,0 +1,42 @@
// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.testFramework
import com.intellij.openapi.util.ThrowableComputable
import com.intellij.platform.testFramework.diagnostic.TelemetryMeterCollector
import com.intellij.util.ThrowableRunnable
import kotlin.reflect.KFunction
class NoOpPerformanceTestInfo(): PerformanceTestInfo {
override fun setup(setup: ThrowableRunnable<*>): PerformanceTestInfo? {
return this
}
override fun attempts(attempts: Int): PerformanceTestInfo? {
return this
}
override fun withTelemetryMeters(meterCollector: TelemetryMeterCollector?): PerformanceTestInfo? {
return this
}
override fun warmupIterations(iterations: Int): PerformanceTestInfo? {
return this
}
override fun getUniqueTestName(): String? = ""
override fun start() {}
override fun startAsSubtest() {}
override fun startAsSubtest(subTestName: String?) {}
override fun start(fullQualifiedTestMethodName: String?){}
override fun start(kotlinTestMethod: KFunction<*>) {}
override fun getLaunchName(): String? = ""
override fun initialize(test: ThrowableComputable<Int?, *>, expectedInputSize: Int, launchName: String) = this
}

View File

@@ -1,146 +1,24 @@
// Copyright 2000-2023 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.testFramework;
import com.intellij.concurrency.IdeaForkJoinWorkerThreadFactory;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.platform.diagnostic.telemetry.IJTracer;
import com.intellij.platform.diagnostic.telemetry.Scope;
import com.intellij.platform.diagnostic.telemetry.TelemetryManager;
import com.intellij.platform.testFramework.diagnostic.MetricsPublisher;
import com.intellij.platform.testFramework.diagnostic.TelemetryMeterCollector;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.StorageLockContext;
import kotlin.reflect.KFunction;
import kotlinx.coroutines.CoroutineScope;
import kotlinx.coroutines.CoroutineScopeKt;
import kotlinx.coroutines.Dispatchers;
import kotlinx.coroutines.SupervisorKt;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.lang.reflect.Method;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Locale;
import java.util.ServiceLoader;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.function.Supplier;
import static com.intellij.platform.diagnostic.telemetry.helpers.TraceKt.computeWithSpanAttribute;
public interface PerformanceTestInfo {
// to warn about not calling .start() in the end
@Contract(pure = true)
PerformanceTestInfo setup(@NotNull ThrowableRunnable<?> setup);
public class PerformanceTestInfo {
private enum IterationMode {
WARMUP,
MEASURE
}
private final ThrowableComputable<Integer, ?> test; // runnable to measure; returns actual input size
private final int expectedInputSize; // size of input the test is expected to process;
private ThrowableRunnable<?> setup; // to run before each test
private int maxMeasurementAttempts = 3; // number of retries
public final String launchName; // to print on fail
private int warmupIterations = 1; // default warmup iterations should be positive
private String uniqueTestName; // at least full qualified test name (plus other identifiers, optionally)
@NotNull
private final IJTracer tracer;
private TelemetryMeterCollector meterCollector = null;
private static final CoroutineScope coroutineScope = CoroutineScopeKt.CoroutineScope(
SupervisorKt.SupervisorJob(null).plus(Dispatchers.getIO())
);
static {
// to use JobSchedulerImpl.getJobPoolParallelism() in tests which don't init application
IdeaForkJoinWorkerThreadFactory.setupForkJoinCommonPool(true);
}
private static void initOpenTelemetry() {
// Open Telemetry file will be located at ../system/test/log/opentelemetry.json (alongside with open-telemetry-metrics.* files)
System.setProperty("idea.diagnostic.opentelemetry.file",
PathManager.getLogDir().resolve("opentelemetry.json").toAbsolutePath().toString());
var telemetryInstance = TelemetryManager.getInstance();
// looks like telemetry manager is properly initialized
if (telemetryInstance.hasSpanExporters()) return;
System.err.printf(
"%nTelemetry instance will be overriden since span exporters aren't registered. " +
"This means your metrics (meters or spans), configured before any test execution will not be reported. " +
"Consider using TestApplication that will setup proper instance of telemetry.%n");
try {
TelemetryManager.Companion.resetGlobalSdk();
var telemetryClazz = Class.forName("com.intellij.platform.diagnostic.telemetry.impl.TelemetryManagerImpl");
var instance = Arrays.stream(telemetryClazz.getDeclaredConstructors())
.filter((it) -> it.getParameterCount() > 0).findFirst()
.get()
.newInstance(coroutineScope, true);
TelemetryManager.Companion.forceSetTelemetryManager((TelemetryManager)instance);
}
catch (Throwable e) {
System.err.println(
"Couldn't setup TelemetryManager without TestApplication. Either test should use TestApplication or somewhere is a bug");
e.printStackTrace();
}
}
private static void cleanupOutdatedMeters() {
try {
// force spans and meters to be written to disk before any test starts
// it's at least what we can do to minimize interference of the same meter on different tests
TelemetryManager.getInstance().forceFlushMetricsBlocking();
// remove content of the previous tests from the idea.log
MetricsPublisher.Companion.truncateTestLog();
var filesWithMetrics = Files.list(PathManager.getLogDir()).filter((it) ->
it.toString().contains("-metrics") ||
it.toString().contains("-meters")).toList();
for (Path file : filesWithMetrics) {
Files.deleteIfExists(file);
}
}
catch (Exception e) {
System.err.println(
"Error during removing Telemetry files with meters before start of perf test. This might affect collected metrics value.");
e.printStackTrace();
}
}
PerformanceTestInfo(@NotNull ThrowableComputable<Integer, ?> test, int expectedInputSize, @NotNull String launchName) {
initOpenTelemetry();
cleanupOutdatedMeters();
this.test = test;
this.expectedInputSize = expectedInputSize;
assert expectedInputSize > 0 : "Expected input size must be > 0. Was: " + expectedInputSize;
this.launchName = launchName;
this.tracer = TelemetryManager.getInstance().getTracer(new Scope("performanceUnitTests", null));
}
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfo setup(@NotNull ThrowableRunnable<?> setup) {
assert this.setup == null;
this.setup = setup;
return this;
}
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfo attempts(int attempts) {
this.maxMeasurementAttempts = attempts;
return this;
}
// to warn about not calling .start() in the end
@Contract(pure = true)
PerformanceTestInfo attempts(int attempts);
/**
* Instruct to publish Telemetry meters (stored in .json files)
@@ -161,67 +39,18 @@ public class PerformanceTestInfo {
* .start()}
* </pre>
*/
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfo withTelemetryMeters(TelemetryMeterCollector meterCollector) {
this.meterCollector = meterCollector;
return this;
}
// to warn about not calling .start() in the end
@Contract(pure = true)
PerformanceTestInfo withTelemetryMeters(TelemetryMeterCollector meterCollector);
/**
* Runs the perf test {@code iterations} times before starting the final measuring.
*/
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfo warmupIterations(int iterations) {
warmupIterations = iterations;
return this;
}
// to warn about not calling .start() in the end
@Contract(pure = true)
PerformanceTestInfo warmupIterations(int iterations);
public String getUniqueTestName() {
return uniqueTestName;
}
private static Method filterMethodFromStackTrace(Function<Method, Boolean> methodFilter) {
StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();
for (StackTraceElement element : stackTraceElements) {
try {
Method foundMethod = ContainerUtil.find(
Class.forName(element.getClassName()).getDeclaredMethods(),
method -> method.getName().equals(element.getMethodName()) && methodFilter.apply(method)
);
if (foundMethod != null) return foundMethod;
}
catch (ClassNotFoundException e) {
// do nothing, continue
}
}
return null;
}
private static Method tryToFindCallingTestMethodByJUnitAnnotation() {
return filterMethodFromStackTrace(
method -> ContainerUtil.exists(method.getDeclaredAnnotations(), annotation -> annotation.annotationType().getName().contains("junit"))
);
}
private static Method tryToFindCallingTestMethodByNamePattern() {
return filterMethodFromStackTrace(method -> method.getName().toLowerCase(Locale.ROOT).startsWith("test"));
}
private static Method getCallingTestMethod() {
Method callingTestMethod = tryToFindCallingTestMethodByJUnitAnnotation();
if (callingTestMethod == null) {
callingTestMethod = tryToFindCallingTestMethodByNamePattern();
if (callingTestMethod == null) {
throw new AssertionError(
"Couldn't manage to detect the calling test method. Please use one of the overloads of com.intellij.testFramework.PerformanceTestInfo.start"
);
}
}
return callingTestMethod;
}
String getUniqueTestName();
/**
* Start execution of the performance test.
@@ -252,37 +81,7 @@ public class PerformanceTestInfo {
* @see #start(kotlin.reflect.KFunction)
* @see #startAsSubtest(String)
**/
public void start() {
start(getCallingTestMethod(), launchName);
}
/**
* Start the perf test where the test's artifact path will have a name inferred from test method + subtest name.
*
* @see PerformanceTestInfo#start()
* @see PerformanceTestInfo#startAsSubtest(String)
* @see PerformanceTestInfo#start(kotlin.reflect.KFunction)
**/
public void start(@NotNull Method javaTestMethod, String subTestName) {
var fullTestName = String.format("%s.%s", javaTestMethod.getDeclaringClass().getName(), javaTestMethod.getName());
if (subTestName != null && !subTestName.isEmpty()) {
fullTestName += " - " + subTestName;
}
start(fullTestName);
}
/**
* Start the perf test where test artifact path will have a name inferred from test method.
* Useful in parametrized tests.
* <br/>
* Eg: <code>start(GradleHighlightingPerformanceTest::testCompletionPerformance)</code>
*
* @see PerformanceTestInfo#start(Method)
* @see PerformanceTestInfo#start(String)
*/
public void start(@NotNull KFunction<?> kotlinTestMethod) {
start(String.format("%s.%s", kotlinTestMethod.getClass().getName(), kotlinTestMethod.getName()));
}
void start();
/**
* Use it if you need to run many subsequent performance tests in your JUnit test.<br/>
@@ -290,18 +89,11 @@ public class PerformanceTestInfo {
* <br/>
* By default passed test launch name will be used as the subtest name.<br/>
*
* @see PerformanceTestInfo#startAsSubtest(String)
* @see PerformanceTestInfoImpl#startAsSubtest(String)
*/
public void startAsSubtest() {
startAsSubtest(launchName);
}
void startAsSubtest();
/**
* The same as {@link #startAsSubtest()} but with the option to specify subtest name.
*/
public void startAsSubtest(@Nullable String subTestName) {
start(getCallingTestMethod(), subTestName);
}
void startAsSubtest(@Nullable String subTestName);
/**
* Start execution of the performance test.
@@ -310,157 +102,26 @@ public class PerformanceTestInfo {
* For Java you can use {@link com.intellij.testFramework.UsefulTestCase#getQualifiedTestMethodName()}
* OR
* {@link com.intellij.testFramework.fixtures.BareTestFixtureTestCase#getQualifiedTestMethodName()}
* @see PerformanceTestInfo#start()
* @see PerformanceTestInfoImpl#start()
*/
public void start(String fullQualifiedTestMethodName) {
String sanitizedFullQualifiedTestMethodName = sanitizeFullTestNameForArtifactPublishing(fullQualifiedTestMethodName);
start(IterationMode.WARMUP, sanitizedFullQualifiedTestMethodName);
start(IterationMode.MEASURE, sanitizedFullQualifiedTestMethodName);
}
void start(String fullQualifiedTestMethodName);
/**
* @param uniqueTestName - should be at least full qualified test method name.
* Sometimes additional suffixes might be added like here {@link PerformanceTestInfo#startAsSubtest(String)}
* Start the perf test where test artifact path will have a name inferred from test method.
* Useful in parametrized tests.
* <br/>
* Eg: <code>start(GradleHighlightingPerformanceTest::testCompletionPerformance)</code>
*
* @see PerformanceTestInfoImpl#start(Method)
* @see PerformanceTestInfoImpl#start(String)
*/
private void start(IterationMode iterationType, String uniqueTestName) {
this.uniqueTestName = uniqueTestName;
void start(@NotNull KFunction<?> kotlinTestMethod);
if (PlatformTestUtil.COVERAGE_ENABLED_BUILD) return;
System.out.printf("Starting performance test \"%s\" in mode: %s%n", uniqueTestName, iterationType);
String getLaunchName();
int maxIterationsNumber;
if (iterationType.equals(IterationMode.WARMUP)) {
maxIterationsNumber = warmupIterations;
}
else {
maxIterationsNumber = maxMeasurementAttempts;
}
if (maxIterationsNumber == 1) {
//noinspection CallToSystemGC
System.gc();
}
try {
computeWithSpanAttribute(tracer, uniqueTestName, "warmup", (st) -> String.valueOf(iterationType.equals(IterationMode.WARMUP)), () -> {
try {
PlatformTestUtil.waitForAllBackgroundActivityToCalmDown();
for (int attempt = 1; attempt <= maxIterationsNumber; attempt++) {
AtomicInteger actualInputSize;
if (setup != null) setup.run();
actualInputSize = new AtomicInteger(expectedInputSize);
Supplier<Object> perfTestWorkload = getPerfTestWorkloadSupplier(iterationType, attempt, actualInputSize);
computeWithSpanAttribute(
tracer, "Attempt: " + attempt,
"warmup",
(st) -> String.valueOf(iterationType.equals(IterationMode.WARMUP)),
() -> perfTestWorkload.get()
);
if (!UsefulTestCase.IS_UNDER_TEAMCITY) {
// TODO: Print debug metrics here https://youtrack.jetbrains.com/issue/AT-726
}
//noinspection CallToSystemGC
System.gc();
StorageLockContext.forceDirectMemoryCache();
}
}
catch (Throwable throwable) {
ExceptionUtil.rethrowUnchecked(throwable);
throw new RuntimeException(throwable);
}
return null;
});
}
finally {
try {
// publish warmup and final measurements at once at the end of the runs
if (iterationType.equals(IterationMode.MEASURE)) {
var publisherInstance = MetricsPublisher.Companion.getInstance();
if (meterCollector != null) {
publisherInstance.publishSync(uniqueTestName, meterCollector);
}
else {
publisherInstance.publishSync(uniqueTestName);
}
}
}
catch (Throwable t) {
System.err.println("Something unexpected happened during publishing performance metrics");
throw t;
}
}
}
private @NotNull Supplier<Object> getPerfTestWorkloadSupplier(IterationMode iterationType, int attempt, AtomicInteger actualInputSize) {
return () -> {
try {
Profiler.startProfiling(iterationType.name() + attempt);
actualInputSize.set(test.compute());
}
catch (Throwable e) {
ExceptionUtil.rethrowUnchecked(e);
throw new RuntimeException(e);
}
finally {
Profiler.stopProfiling();
}
return null;
};
}
private static @NotNull String sanitizeFullTestNameForArtifactPublishing(@NotNull String fullTestName) {
try {
//noinspection ResultOfMethodCallIgnored
Path.of("./" + fullTestName); // prefix with "./" to make sure "C:/Users" is sanitized
return fullTestName;
}
catch (InvalidPathException e) {
return FileUtil.sanitizeFileName(fullTestName, false);
}
}
private static final class Profiler {
private static final ProfilerForTests profiler = getProfilerInstance();
private static ProfilerForTests getProfilerInstance() {
ServiceLoader<ProfilerForTests> loader = ServiceLoader.load(ProfilerForTests.class);
for (ProfilerForTests service : loader) {
if (service != null) {
return service;
}
}
System.out.println("No service com.intellij.testFramework.Profiler is found in class path");
return null;
}
public static void stopProfiling() {
if (profiler != null) {
try {
profiler.stopProfiling();
}
catch (IOException e) {
System.out.println("Can't stop profiling");
}
}
}
public static void startProfiling(String fileName) {
Path logDir = PathManager.getLogDir();
if (profiler != null) {
try {
profiler.startProfiling(logDir, fileName);
}
catch (IOException e) {
System.out.println("Can't start profiling");
}
}
}
}
/**
* The method should be invoked right after constructor to provide required data.
* It can be part of the constructor since instances are created via ServiceLoader.
*/
PerformanceTestInfo initialize(@NotNull ThrowableComputable<Integer, ?> test, int expectedInputSize, @NotNull String launchName);
}

View File

@@ -0,0 +1,41 @@
// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.testFramework
import com.intellij.openapi.diagnostic.logger
import com.intellij.util.concurrency.SynchronizedClearableLazy
import java.util.ServiceLoader
class PerformanceTestInfoLoader {
companion object {
private val instance: SynchronizedClearableLazy<PerformanceTestInfo> = SynchronizedClearableLazy {
val log = logger<PerformanceTestInfo>()
val instance = try {
val aClass = PerformanceTestInfo::class.java
val implementations = ServiceLoader.load(aClass, aClass.classLoader).toList()
if (implementations.isEmpty()) {
log.info("No implementation found for MetricsPublisher - NOOP implementation will be used")
NoOpPerformanceTestInfo()
}
else if (implementations.size > 1) {
log.error("More than one implementation for ${aClass.simpleName} found: ${implementations.map { it::class.qualifiedName }}")
NoOpPerformanceTestInfo()
}
else {
implementations.single()
}
}
catch (e: Throwable) {
log.info("Cannot create MetricsPublisher, falling back to NOOP implementation", e)
NoOpPerformanceTestInfo()
}
log.info("Loaded metrics publisher implementation ${instance::class.java.name}")
instance
}
fun getInstance(): PerformanceTestInfo = instance.value
}
}

View File

@@ -666,7 +666,7 @@ public final class PlatformTestUtil {
public static @NotNull PerformanceTestInfo newPerformanceTestWithVariableInputSize(@NonNls @NotNull String launchName,
int expectedInputSize,
@NotNull ThrowableComputable<Integer, ?> test) {
return new PerformanceTestInfo(test, expectedInputSize, launchName);
return PerformanceTestInfoLoader.Companion.getInstance().initialize(test, expectedInputSize, launchName);
}
public static void assertPathsEqual(@Nullable String expected, @Nullable String actual) {
@@ -703,7 +703,7 @@ public final class PlatformTestUtil {
OpenProjectTaskBuilderKt.saveProject(project, isForceSavingAllSettings);
}
static void waitForAllBackgroundActivityToCalmDown() {
public static void waitForAllBackgroundActivityToCalmDown() {
for (int i = 0; i < 50; i++) {
CpuUsageData data = CpuUsageData.measureCpuUsage(() -> TimeoutUtil.sleep(100));
if (!data.hasAnyActivityBesides(Thread.currentThread())) {

View File

@@ -20,5 +20,6 @@
<orderEntry type="module" module-name="intellij.platform.testFramework" />
<orderEntry type="module" module-name="intellij.platform.testFramework.junit5" scope="TEST" />
<orderEntry type="library" name="opentelemetry" level="project" />
<orderEntry type="module" module-name="intellij.platform.boot" />
</component>
</module>

View File

@@ -0,0 +1 @@
com.intellij.tools.ide.metrics.benchmark.PerformanceTestInfoImpl

View File

@@ -0,0 +1,411 @@
// Copyright 2000-2023 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.tools.ide.metrics.benchmark;
import com.intellij.concurrency.IdeaForkJoinWorkerThreadFactory;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.platform.diagnostic.telemetry.IJTracer;
import com.intellij.platform.diagnostic.telemetry.Scope;
import com.intellij.platform.diagnostic.telemetry.TelemetryManager;
import com.intellij.platform.testFramework.diagnostic.MetricsPublisher;
import com.intellij.platform.testFramework.diagnostic.TelemetryMeterCollector;
import com.intellij.testFramework.PerformanceTestInfo;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.testFramework.ProfilerForTests;
import com.intellij.testFramework.UsefulTestCase;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.StorageLockContext;
import kotlin.reflect.KFunction;
import kotlinx.coroutines.CoroutineScope;
import kotlinx.coroutines.CoroutineScopeKt;
import kotlinx.coroutines.Dispatchers;
import kotlinx.coroutines.SupervisorKt;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.lang.reflect.Method;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Locale;
import java.util.ServiceLoader;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.function.Supplier;
import static com.intellij.platform.diagnostic.telemetry.helpers.TraceKt.computeWithSpanAttribute;
public class PerformanceTestInfoImpl implements PerformanceTestInfo {
private enum IterationMode {
WARMUP,
MEASURE
}
private ThrowableComputable<Integer, ?> test; // runnable to measure; returns actual input size
private int expectedInputSize; // size of input the test is expected to process;
private ThrowableRunnable<?> setup; // to run before each test
private int maxMeasurementAttempts = 3; // number of retries
public String launchName; // to print on fail
private int warmupIterations = 1; // default warmup iterations should be positive
private String uniqueTestName; // at least full qualified test name (plus other identifiers, optionally)
@NotNull
private final IJTracer tracer;
private TelemetryMeterCollector meterCollector = null;
private static final CoroutineScope coroutineScope = CoroutineScopeKt.CoroutineScope(
SupervisorKt.SupervisorJob(null).plus(Dispatchers.getIO())
);
static {
// to use JobSchedulerImpl.getJobPoolParallelism() in tests which don't init application
IdeaForkJoinWorkerThreadFactory.setupForkJoinCommonPool(true);
}
private static void initOpenTelemetry() {
// Open Telemetry file will be located at ../system/test/log/opentelemetry.json (alongside with open-telemetry-metrics.* files)
System.setProperty("idea.diagnostic.opentelemetry.file",
PathManager.getLogDir().resolve("opentelemetry.json").toAbsolutePath().toString());
var telemetryInstance = TelemetryManager.getInstance();
// looks like telemetry manager is properly initialized
if (telemetryInstance.hasSpanExporters()) return;
System.err.printf(
"%nTelemetry instance will be overriden since span exporters aren't registered. " +
"This means your metrics (meters or spans), configured before any test execution will not be reported. " +
"Consider using TestApplication that will setup proper instance of telemetry.%n");
try {
TelemetryManager.Companion.resetGlobalSdk();
var telemetryClazz = Class.forName("com.intellij.platform.diagnostic.telemetry.impl.TelemetryManagerImpl");
var instance = Arrays.stream(telemetryClazz.getDeclaredConstructors())
.filter((it) -> it.getParameterCount() > 0).findFirst()
.get()
.newInstance(coroutineScope, true);
TelemetryManager.Companion.forceSetTelemetryManager((TelemetryManager)instance);
}
catch (Throwable e) {
System.err.println(
"Couldn't setup TelemetryManager without TestApplication. Either test should use TestApplication or somewhere is a bug");
e.printStackTrace();
}
}
private static void cleanupOutdatedMeters() {
try {
// force spans and meters to be written to disk before any test starts
// it's at least what we can do to minimize interference of the same meter on different tests
TelemetryManager.getInstance().forceFlushMetricsBlocking();
// remove content of the previous tests from the idea.log
MetricsPublisher.Companion.truncateTestLog();
var filesWithMetrics = Files.list(PathManager.getLogDir()).filter((it) ->
it.toString().contains("-metrics") ||
it.toString().contains("-meters")).toList();
for (Path file : filesWithMetrics) {
Files.deleteIfExists(file);
}
}
catch (Exception e) {
System.err.println(
"Error during removing Telemetry files with meters before start of perf test. This might affect collected metrics value.");
e.printStackTrace();
}
}
public PerformanceTestInfoImpl() {
initOpenTelemetry();
cleanupOutdatedMeters();
this.tracer = TelemetryManager.getInstance().getTracer(new Scope("performanceUnitTests", null));
}
@Override
public PerformanceTestInfoImpl initialize(@NotNull ThrowableComputable<Integer, ?> test, int expectedInputSize, @NotNull String launchName){
this.test = test;
this.expectedInputSize = expectedInputSize;
assert expectedInputSize > 0 : "Expected input size must be > 0. Was: " + expectedInputSize;
this.launchName = launchName;
return this;
}
@Override
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfoImpl setup(@NotNull ThrowableRunnable<?> setup) {
assert this.setup == null;
this.setup = setup;
return this;
}
@Override
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfoImpl attempts(int attempts) {
this.maxMeasurementAttempts = attempts;
return this;
}
@Override
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfoImpl withTelemetryMeters(TelemetryMeterCollector meterCollector) {
this.meterCollector = meterCollector;
return this;
}
@Override
@Contract(pure = true) // to warn about not calling .start() in the end
public PerformanceTestInfoImpl warmupIterations(int iterations) {
warmupIterations = iterations;
return this;
}
@Override
public String getUniqueTestName() {
return uniqueTestName;
}
private static Method filterMethodFromStackTrace(Function<Method, Boolean> methodFilter) {
StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();
for (StackTraceElement element : stackTraceElements) {
try {
Method foundMethod = ContainerUtil.find(
Class.forName(element.getClassName()).getDeclaredMethods(),
method -> method.getName().equals(element.getMethodName()) && methodFilter.apply(method)
);
if (foundMethod != null) return foundMethod;
}
catch (ClassNotFoundException e) {
// do nothing, continue
}
}
return null;
}
private static Method tryToFindCallingTestMethodByJUnitAnnotation() {
return filterMethodFromStackTrace(
method -> ContainerUtil.exists(method.getDeclaredAnnotations(), annotation -> annotation.annotationType().getName().contains("junit"))
);
}
private static Method tryToFindCallingTestMethodByNamePattern() {
return filterMethodFromStackTrace(method -> method.getName().toLowerCase(Locale.ROOT).startsWith("test"));
}
private static Method getCallingTestMethod() {
Method callingTestMethod = tryToFindCallingTestMethodByJUnitAnnotation();
if (callingTestMethod == null) {
callingTestMethod = tryToFindCallingTestMethodByNamePattern();
if (callingTestMethod == null) {
throw new AssertionError(
"Couldn't manage to detect the calling test method. Please use one of the overloads of com.intellij.testFramework.PerformanceTestInfo.start"
);
}
}
return callingTestMethod;
}
@Override
public void start() {
start(getCallingTestMethod(), launchName);
}
/**
* Start the perf test where the test's artifact path will have a name inferred from test method + subtest name.
*
* @see PerformanceTestInfoImpl#start()
* @see PerformanceTestInfoImpl#startAsSubtest(String)
* @see PerformanceTestInfoImpl#start(kotlin.reflect.KFunction)
**/
public void start(@NotNull Method javaTestMethod, String subTestName) {
var fullTestName = String.format("%s.%s", javaTestMethod.getDeclaringClass().getName(), javaTestMethod.getName());
if (subTestName != null && !subTestName.isEmpty()) {
fullTestName += " - " + subTestName;
}
start(fullTestName);
}
public void start(@NotNull KFunction<?> kotlinTestMethod) {
start(String.format("%s.%s", kotlinTestMethod.getClass().getName(), kotlinTestMethod.getName()));
}
@Override
public void startAsSubtest() {
startAsSubtest(launchName);
}
/**
* The same as {@link #startAsSubtest()} but with the option to specify subtest name.
*/
@Override
public void startAsSubtest(@Nullable String subTestName) {
start(getCallingTestMethod(), subTestName);
}
public void start(String fullQualifiedTestMethodName) {
String sanitizedFullQualifiedTestMethodName = sanitizeFullTestNameForArtifactPublishing(fullQualifiedTestMethodName);
start(IterationMode.WARMUP, sanitizedFullQualifiedTestMethodName);
start(IterationMode.MEASURE, sanitizedFullQualifiedTestMethodName);
}
@Override
public String getLaunchName() {
return launchName;
}
/**
* @param uniqueTestName - should be at least full qualified test method name.
* Sometimes additional suffixes might be added like here {@link PerformanceTestInfoImpl#startAsSubtest(String)}
*/
private void start(IterationMode iterationType, String uniqueTestName) {
this.uniqueTestName = uniqueTestName;
if (PlatformTestUtil.COVERAGE_ENABLED_BUILD) return;
System.out.printf("Starting performance test \"%s\" in mode: %s%n", uniqueTestName, iterationType);
int maxIterationsNumber;
if (iterationType.equals(IterationMode.WARMUP)) {
maxIterationsNumber = warmupIterations;
}
else {
maxIterationsNumber = maxMeasurementAttempts;
}
if (maxIterationsNumber == 1) {
//noinspection CallToSystemGC
System.gc();
}
try {
computeWithSpanAttribute(tracer, uniqueTestName, "warmup", (st) -> String.valueOf(iterationType.equals(IterationMode.WARMUP)), () -> {
try {
PlatformTestUtil.waitForAllBackgroundActivityToCalmDown();
for (int attempt = 1; attempt <= maxIterationsNumber; attempt++) {
AtomicInteger actualInputSize;
if (setup != null) setup.run();
actualInputSize = new AtomicInteger(expectedInputSize);
Supplier<Object> perfTestWorkload = getPerfTestWorkloadSupplier(iterationType, attempt, actualInputSize);
computeWithSpanAttribute(
tracer, "Attempt: " + attempt,
"warmup",
(st) -> String.valueOf(iterationType.equals(IterationMode.WARMUP)),
() -> perfTestWorkload.get()
);
if (!UsefulTestCase.IS_UNDER_TEAMCITY) {
// TODO: Print debug metrics here https://youtrack.jetbrains.com/issue/AT-726
}
//noinspection CallToSystemGC
System.gc();
StorageLockContext.forceDirectMemoryCache();
}
}
catch (Throwable throwable) {
ExceptionUtil.rethrowUnchecked(throwable);
throw new RuntimeException(throwable);
}
return null;
});
}
finally {
try {
// publish warmup and final measurements at once at the end of the runs
if (iterationType.equals(IterationMode.MEASURE)) {
var publisherInstance = MetricsPublisher.Companion.getInstance();
if (meterCollector != null) {
publisherInstance.publishSync(uniqueTestName, meterCollector);
}
else {
publisherInstance.publishSync(uniqueTestName);
}
}
}
catch (Throwable t) {
System.err.println("Something unexpected happened during publishing performance metrics");
throw t;
}
}
}
private @NotNull Supplier<Object> getPerfTestWorkloadSupplier(IterationMode iterationType, int attempt, AtomicInteger actualInputSize) {
return () -> {
try {
Profiler.startProfiling(iterationType.name() + attempt);
actualInputSize.set(test.compute());
}
catch (Throwable e) {
ExceptionUtil.rethrowUnchecked(e);
throw new RuntimeException(e);
}
finally {
Profiler.stopProfiling();
}
return null;
};
}
private static @NotNull String sanitizeFullTestNameForArtifactPublishing(@NotNull String fullTestName) {
try {
//noinspection ResultOfMethodCallIgnored
Path.of("./" + fullTestName); // prefix with "./" to make sure "C:/Users" is sanitized
return fullTestName;
}
catch (InvalidPathException e) {
return FileUtil.sanitizeFileName(fullTestName, false);
}
}
private static final class Profiler {
private static final ProfilerForTests profiler = getProfilerInstance();
private static ProfilerForTests getProfilerInstance() {
ServiceLoader<ProfilerForTests> loader = ServiceLoader.load(ProfilerForTests.class);
for (ProfilerForTests service : loader) {
if (service != null) {
return service;
}
}
System.out.println("No service com.intellij.testFramework.Profiler is found in class path");
return null;
}
public static void stopProfiling() {
if (profiler != null) {
try {
profiler.stopProfiling();
}
catch (IOException e) {
System.out.println("Can't stop profiling");
}
}
}
public static void startProfiling(String fileName) {
Path logDir = PathManager.getLogDir();
if (profiler != null) {
try {
profiler.startProfiling(logDir, fileName);
}
catch (IOException e) {
System.out.println("Can't start profiling");
}
}
}
}
}

View File

@@ -203,7 +203,7 @@ public class XmlParsingTest extends ParsingTestCase {
});
perfTest.setup(() -> PsiManager.getInstance(getProject()).dropPsiCaches())
.startAsSubtest(this.getClass().getSimpleName() + " " + perfTest.launchName);
.startAsSubtest(this.getClass().getSimpleName() + " " + perfTest.getLaunchName());
LeafElement firstLeaf = TreeUtil.findFirstLeaf(file.getNode());
int count = 0;