= if (toDownload.isEmpty()) {
emptyList()
}
else {
- val httpClientWithoutFollowingRedirects = httpClient.newBuilder().followRedirects(false).build()
- // 4MB block, x2 of thread count - one buffer to source, another one for target
- createBufferPool(downloadParallelism * 2).use { bufferPool ->
+ createHttp2ClientSessionFactory(trustAll = metadata.serverUrl.contains("127.0.0.1")).use { client ->
downloadCompilationCache(
- serverUrl = serverUrl,
- prefix = prefix,
+ client = client,
+ serverUrl = metadata.serverUrl,
+ prefix = metadata.prefix,
toDownload = toDownload,
- client = httpClientWithoutFollowingRedirects,
- bufferPool = bufferPool,
downloadedBytes = downloadedBytes,
skipUnpack = skipUnpack,
saveHash = saveHash,
@@ -442,10 +396,12 @@ suspend fun fetchAndUnpackCompiledClasses(
}
val start = System.nanoTime()
- spanBuilder("unpack compiled classes archives").use(Dispatchers.IO) {
- toUnpack.forEachConcurrent { item ->
- spanBuilder("unpack").setAttribute("name", item.name).use {
- unpackArchive(item, saveHash)
+ spanBuilder("unpack compiled classes archives").use {
+ for (item in toUnpack) {
+ launch {
+ spanBuilder("unpack").setAttribute("name", item.name).use {
+ unpackArchive(item, saveHash)
+ }
}
}
}
@@ -466,55 +422,57 @@ private suspend fun checkPreviouslyUnpackedDirectories(
}
val start = System.nanoTime()
- withContext(Dispatchers.IO) {
- launch {
+ coroutineScope {
+ launch(Dispatchers.IO) {
spanBuilder("remove stalled directories not present in metadata").setAttribute(AttributeKey.stringArrayKey("keys"), java.util.List.copyOf(metadata.files.keys)).use {
removeStalledDirs(metadata, classOutput)
}
}
- items.forEachConcurrent { item ->
- val out = item.output
- if (Files.notExists(out)) {
- span.addEvent("output directory doesn't exist", Attributes.of(AttributeKey.stringKey("name"), item.name, AttributeKey.stringKey("outDir"), out.toString()))
- return@forEachConcurrent
- }
-
- val hashFile = out.resolve(".hash")
- if (!Files.isRegularFile(hashFile)) {
- span.addEvent("no .hash file in output directory", Attributes.of(AttributeKey.stringKey("name"), item.name))
- out.deleteRecursively()
- return@forEachConcurrent
- }
-
- try {
- val actual = Files.readString(hashFile)
- if (actual == item.hash) {
- upToDate.add(item.name)
+ for (item in items) {
+ launch {
+ val out = item.output
+ if (Files.notExists(out)) {
+ span.addEvent("output directory doesn't exist", Attributes.of(AttributeKey.stringKey("name"), item.name, AttributeKey.stringKey("outDir"), out.toString()))
+ return@launch
}
- else {
- span.addEvent(
- "output directory hash mismatch",
- Attributes.of(
- AttributeKey.stringKey("name"), item.name,
- AttributeKey.stringKey("expected"), item.hash,
- AttributeKey.stringKey("actual"), actual,
+
+ val hashFile = out.resolve(".hash")
+ if (!Files.isRegularFile(hashFile)) {
+ span.addEvent("no .hash file in output directory", Attributes.of(AttributeKey.stringKey("name"), item.name))
+ out.deleteRecursively()
+ return@launch
+ }
+
+ try {
+ val actual = Files.readString(hashFile)
+ if (actual == item.hash) {
+ upToDate.add(item.name)
+ }
+ else {
+ span.addEvent(
+ "output directory hash mismatch",
+ Attributes.of(
+ AttributeKey.stringKey("name"), item.name,
+ AttributeKey.stringKey("expected"), item.hash,
+ AttributeKey.stringKey("actual"), actual,
+ )
)
- )
+ out.deleteRecursively()
+ }
+ }
+ catch (e: CancellationException) {
+ throw e
+ }
+ catch (e: Throwable) {
+ span.addEvent("output directory hash calculation failed", Attributes.of(AttributeKey.stringKey("name"), item.name))
+ span.recordException(e, Attributes.of(AttributeKey.stringKey("name"), item.name))
out.deleteRecursively()
}
}
- catch (e: CancellationException) {
- throw e
- }
- catch (e: Throwable) {
- span.addEvent("output directory hash calculation failed", Attributes.of(AttributeKey.stringKey("name"), item.name))
- span.recordException(e, Attributes.of(AttributeKey.stringKey("name"), item.name))
- out.deleteRecursively()
- }
}
}
- return TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start)
+ return System.nanoTime() - start
}
private fun CoroutineScope.removeStalledDirs(
@@ -557,7 +515,7 @@ private fun CoroutineScope.removeStalledDirs(
private val sharedDigest = MessageDigest.getInstance("SHA-256", java.security.Security.getProvider("SUN"))
internal fun sha256() = sharedDigest.clone() as MessageDigest
-private fun computeHash(file: Path): String {
+internal fun computeHash(file: Path): String {
val messageDigest = sha256()
FileChannel.open(file, READ_OPERATION).use { channel ->
val fileSize = channel.size()
@@ -583,7 +541,7 @@ private fun computeHash(file: Path): String {
// we cannot change file extension or prefix, so, add suffix
internal fun digestToString(digest: MessageDigest): String = BigInteger(1, digest.digest()).toString(36) + "-z"
-data class PackAndUploadItem(
+internal data class PackAndUploadItem(
@JvmField val output: Path,
@JvmField val name: String,
@JvmField val archive: Path,
@@ -604,7 +562,7 @@ internal data class FetchAndUnpackItem(
* URL for each part should be constructed like: ${serverUrl}/${prefix}/${files.key}/${files.value}.jar
*/
@Serializable
-private data class CompilationPartsMetadata(
+internal data class CompilationPartsMetadata(
@JvmField @SerialName("server-url") val serverUrl: String,
@JvmField val branch: String,
@JvmField val prefix: String,
diff --git a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/DirectFixedSizeByteBufferPool.kt b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/DirectFixedSizeByteBufferPool.kt
index c54d2590a768..d9dd75e643f7 100644
--- a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/DirectFixedSizeByteBufferPool.kt
+++ b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/DirectFixedSizeByteBufferPool.kt
@@ -1,38 +1,53 @@
// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package org.jetbrains.intellij.build.impl.compilation
-import com.intellij.util.lang.ByteBufferCleaner
-import kotlinx.coroutines.channels.Channel
-import kotlinx.coroutines.channels.getOrElse
+import kotlinx.coroutines.sync.Semaphore
+import kotlinx.coroutines.sync.withPermit
+import org.jetbrains.intellij.build.io.unmapBuffer
import java.nio.ByteBuffer
import java.nio.ByteOrder
+import java.util.concurrent.ConcurrentLinkedQueue
+import java.util.concurrent.atomic.AtomicInteger
-internal class DirectFixedSizeByteBufferPool(private val bufferSize: Int, maxPoolSize: Int) : AutoCloseable {
- private val pool = Channel(capacity = maxPoolSize)
+internal class DirectFixedSizeByteBufferPool(private val bufferSize: Int, private val maxPoolSize: Int) : AutoCloseable {
+ private val pool = ConcurrentLinkedQueue()
+ private val count = AtomicInteger()
+ @JvmField
+ val semaphore: Semaphore = Semaphore(maxPoolSize)
- fun allocate(): ByteBuffer {
- val result = pool.tryReceive()
- return when {
- result.isSuccess -> result.getOrThrow()
- result.isClosed -> throw IllegalStateException("Pool is closed")
- else -> ByteBuffer.allocateDirect(bufferSize)
+ private fun allocate(): ByteBuffer {
+ val result = pool.poll() ?: return ByteBuffer.allocateDirect(bufferSize)
+ count.decrementAndGet()
+ return result
+ }
+
+ suspend inline fun withBuffer(task: (buffer: ByteBuffer) -> T): T {
+ return semaphore.withPermit {
+ val buffer = allocate()
+ try {
+ task(buffer)
+ }
+ finally {
+ release(buffer)
+ }
}
}
- fun release(buffer: ByteBuffer) {
+ private fun release(buffer: ByteBuffer) {
buffer.clear()
buffer.order(ByteOrder.BIG_ENDIAN)
- pool.trySend(buffer).getOrElse {
- // if the pool is full, we simply discard the buffer
- ByteBufferCleaner.unmapBuffer(buffer)
+ if (count.incrementAndGet() < maxPoolSize) {
+ pool.offer(buffer)
+ }
+ else {
+ count.decrementAndGet()
+ unmapBuffer(buffer)
}
}
- // pool is not expected to be used during releaseAll call
override fun close() {
while (true) {
- ByteBufferCleaner.unmapBuffer(pool.tryReceive().getOrNull() ?: break)
+ unmapBuffer(pool.poll() ?: return)
}
- pool.close()
}
}
\ No newline at end of file
diff --git a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/PortableCompilationCacheDownloader.kt b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/PortableCompilationCacheDownloader.kt
index 74d169480403..1fca14d1e66e 100644
--- a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/PortableCompilationCacheDownloader.kt
+++ b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/PortableCompilationCacheDownloader.kt
@@ -3,7 +3,6 @@
package org.jetbrains.intellij.build.impl.compilation
-import com.intellij.platform.util.coroutines.forEachConcurrent
import com.intellij.util.io.Decompressor
import io.opentelemetry.api.common.AttributeKey
import io.opentelemetry.api.common.Attributes
@@ -12,6 +11,7 @@ import kotlinx.coroutines.*
import okhttp3.Request
import okio.sink
import org.jetbrains.intellij.build.CompilationContext
+import org.jetbrains.intellij.build.forEachConcurrent
import org.jetbrains.intellij.build.impl.compilation.cache.CommitsHistory
import org.jetbrains.intellij.build.impl.compilation.cache.getAllCompilationOutputs
import org.jetbrains.intellij.build.impl.compilation.cache.parseSourcesStateFile
diff --git a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/PortableCompilationCacheUploader.kt b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/PortableCompilationCacheUploader.kt
index fa8a777c6d6d..985ad4b6b13a 100644
--- a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/PortableCompilationCacheUploader.kt
+++ b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/PortableCompilationCacheUploader.kt
@@ -3,7 +3,6 @@
package org.jetbrains.intellij.build.impl.compilation
import com.google.gson.stream.JsonReader
-import com.intellij.platform.util.coroutines.forEachConcurrent
import com.intellij.util.io.Compressor
import io.opentelemetry.api.common.AttributeKey
import io.opentelemetry.api.common.Attributes
@@ -11,12 +10,14 @@ import io.opentelemetry.api.trace.Span
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
+import okhttp3.MediaType.Companion.toMediaType
import okhttp3.Request
import okhttp3.RequestBody
import okio.BufferedSink
import okio.source
import org.jetbrains.intellij.build.BuildMessages
import org.jetbrains.intellij.build.CompilationContext
+import org.jetbrains.intellij.build.forEachConcurrent
import org.jetbrains.intellij.build.impl.compilation.cache.CommitsHistory
import org.jetbrains.intellij.build.impl.compilation.cache.getAllCompilationOutputs
import org.jetbrains.intellij.build.io.copyFile
@@ -37,6 +38,7 @@ import kotlin.io.path.ExperimentalPathApi
import kotlin.io.path.deleteRecursively
private const val SOURCES_STATE_FILE_NAME = "target_sources_state.json"
+private val MEDIA_TYPE_BINARY = "application/octet-stream".toMediaType()
internal class PortableCompilationCacheUploader(
private val context: CompilationContext,
diff --git a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/ZstdCompressContextPool.kt b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/ZstdCompressContextPool.kt
new file mode 100644
index 000000000000..829352f7b044
--- /dev/null
+++ b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/ZstdCompressContextPool.kt
@@ -0,0 +1,43 @@
+// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
+package org.jetbrains.intellij.build.impl.compilation
+
+import com.github.luben.zstd.ZstdCompressCtx
+import java.util.concurrent.ConcurrentLinkedQueue
+
+// we cannot use Netty Recycler as we must close ZstdCompressCtx after use of pool
+internal class ZstdCompressContextPool(private val level: Int = 3) : AutoCloseable {
+ private val pool = ConcurrentLinkedQueue()
+
+ inline fun withZstd(task: (zstd: ZstdCompressCtx) -> T): T {
+ val zstd = allocate()
+ try {
+ return task(zstd)
+ }
+ finally {
+ zstd.reset()
+ pool.offer(zstd)
+ }
+ }
+
+ private fun allocate(): ZstdCompressCtx {
+ pool.poll()?.let {
+ configure(it)
+ return it
+ }
+
+ val zstd = ZstdCompressCtx()
+ configure(zstd)
+ return zstd
+ }
+
+ private fun configure(zstd: ZstdCompressCtx) {
+ zstd.setLevel(level)
+ //zstd.setLong(64)
+ }
+
+ override fun close() {
+ while (true) {
+ (pool.poll() ?: return).close()
+ }
+ }
+}
\ No newline at end of file
diff --git a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/download.kt b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/download.kt
index 7a6ed1c8a58c..50edf7f73149 100644
--- a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/download.kt
+++ b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/download.kt
@@ -1,31 +1,26 @@
// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package org.jetbrains.intellij.build.impl.compilation
-import com.github.luben.zstd.ZstdDirectBufferDecompressingStreamNoFinalizer
-import com.intellij.platform.util.coroutines.mapConcurrent
import com.intellij.util.lang.HashMapZipFile
import io.opentelemetry.api.common.AttributeKey
import io.opentelemetry.api.common.Attributes
-import io.opentelemetry.api.trace.Span
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.withContext
-import okhttp3.OkHttpClient
-import okhttp3.Request
-import okhttp3.Response
import okio.IOException
+import org.jetbrains.intellij.build.forEachConcurrent
+import org.jetbrains.intellij.build.http2Client.Http2ClientConnection
+import org.jetbrains.intellij.build.http2Client.Http2ClientConnectionFactory
+import org.jetbrains.intellij.build.http2Client.download
import org.jetbrains.intellij.build.io.INDEX_FILENAME
-import org.jetbrains.intellij.build.retryWithExponentialBackOff
import org.jetbrains.intellij.build.telemetry.TraceManager.spanBuilder
import org.jetbrains.intellij.build.telemetry.use
-import java.net.HttpURLConnection
-import java.nio.ByteBuffer
+import java.math.BigInteger
+import java.net.URI
import java.nio.channels.FileChannel
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardOpenOption
-import java.security.MessageDigest
import java.util.*
import java.util.concurrent.CancellationException
+import java.util.concurrent.CopyOnWriteArrayList
import java.util.concurrent.atomic.AtomicLong
import kotlin.io.path.name
@@ -33,46 +28,54 @@ private val OVERWRITE_OPERATION = EnumSet.of(StandardOpenOption.WRITE, StandardO
internal suspend fun downloadCompilationCache(
serverUrl: String,
+ client: Http2ClientConnectionFactory,
prefix: String,
- toDownload: List,
- client: OkHttpClient,
- bufferPool: DirectFixedSizeByteBufferPool,
+ toDownload: Collection,
downloadedBytes: AtomicLong,
skipUnpack: Boolean,
saveHash: Boolean,
): List {
- var urlWithPrefix = "$serverUrl/$prefix/"
+ var urlPathWithPrefix = "/$prefix/"
// first let's check for initial redirect (mirror selection)
- spanBuilder("mirror selection").use { span ->
- client.newCall(Request.Builder().url(urlWithPrefix).head().build()).executeAsync().use { response ->
- val statusCode = response.code
- val locationHeader = response.header("location")
- if (locationHeader != null && (statusCode == HttpURLConnection.HTTP_MOVED_TEMP ||
- statusCode == HttpURLConnection.HTTP_MOVED_PERM ||
- statusCode == 307 ||
- statusCode == HttpURLConnection.HTTP_SEE_OTHER)) {
- urlWithPrefix = locationHeader
- span.addEvent("redirected to mirror", Attributes.of(AttributeKey.stringKey("url"), urlWithPrefix))
+ val initialServerUri = URI(serverUrl)
+ var effectiveServerUri = initialServerUri
+ var connection: Http2ClientConnection? = client.connect(effectiveServerUri.host, effectiveServerUri.port)
+ try {
+ spanBuilder("mirror selection").use { span ->
+ val newLocation = connection!!.getRedirectLocation(urlPathWithPrefix)
+ if (newLocation == null) {
+ span.addEvent("origin server will be used", Attributes.of(AttributeKey.stringKey("url"), urlPathWithPrefix))
}
else {
- span.addEvent("origin server will be used", Attributes.of(AttributeKey.stringKey("url"), urlWithPrefix))
+ effectiveServerUri = URI(newLocation.toString())
+ urlPathWithPrefix = effectiveServerUri.path
+ span.addEvent("redirected to mirror", Attributes.of(AttributeKey.stringKey("url"), urlPathWithPrefix))
}
}
}
+ finally {
+ if (initialServerUri != effectiveServerUri) {
+ connection?.close()
+ connection = null
+ }
+ }
- return withContext(Dispatchers.IO) {
- toDownload.mapConcurrent(downloadParallelism) { item ->
- val url = "$urlWithPrefix${item.name}/${item.file.fileName}"
- spanBuilder("download").setAttribute("name", item.name).setAttribute("url", url).use {
+ if (connection == null) {
+ connection = client.connect(effectiveServerUri.host, effectiveServerUri.port)
+ }
+ try {
+ val errors = CopyOnWriteArrayList()
+ toDownload.forEachConcurrent(downloadParallelism) { item ->
+ val urlPath = "$urlPathWithPrefix${item.name}/${item.file.fileName}"
+ spanBuilder("download").setAttribute("name", item.name).setAttribute("urlPath", urlPath).use { span ->
try {
downloadedBytes.getAndAdd(
download(
item = item,
- url = url,
- bufferPool = bufferPool,
+ urlPath = urlPath,
skipUnpack = skipUnpack,
saveHash = saveHash,
- client = client,
+ connection = connection,
)
)
}
@@ -80,43 +83,41 @@ internal suspend fun downloadCompilationCache(
throw e
}
catch (e: Throwable) {
- return@use CompilePartDownloadFailedError(item, e)
+ span.recordException(e)
+ errors.add(CompilePartDownloadFailedError(item, e))
}
- null
}
}
- }.filterNotNull()
+ return errors
+ }
+ finally {
+ connection.close()
+ }
}
private suspend fun download(
item: FetchAndUnpackItem,
- url: String,
- bufferPool: DirectFixedSizeByteBufferPool,
+ urlPath: String,
skipUnpack: Boolean,
saveHash: Boolean,
- client: OkHttpClient,
+ connection: Http2ClientConnection,
): Long {
- val downloaded = retryWithExponentialBackOff(onException = ::onDownloadException) {
- client.newCall(Request.Builder().url(url).build()).executeAsync().useSuccessful { response ->
- val digest = sha256()
- writeFile(file = item.file, response = response, bufferPool = bufferPool, url = url, digest = digest)
- val computedHash = digestToString(digest)
- if (computedHash != item.hash) {
- throw HashMismatchException("hash mismatch") { span, attempt ->
- span.addEvent(
- "hash mismatch",
- Attributes.of(
- AttributeKey.longKey("attemptNumber"), attempt.toLong(),
- AttributeKey.stringKey("name"), item.file.name,
- AttributeKey.stringKey("expected"), item.hash,
- AttributeKey.stringKey("computed"), computedHash,
- )
- )
- }
- }
- response.body.contentLength()
- }
+ val (downloaded, digest) = connection.download(path = urlPath, file = item.file, digestFactory = { sha256() })
+ val digestBytes = digest.digest()
+ val computedHash = BigInteger(1, digestBytes).toString(36) + "-z"
+ if (computedHash != item.hash) {
+ println("actualHash : ${computeHash(item.file)}")
+ println("expectedHash: ${item.hash}")
+ println("computedHash: $computedHash")
+
+ val spanAttributes = Attributes.of(
+ AttributeKey.stringKey("name"), item.file.name,
+ AttributeKey.stringKey("expected"), item.hash,
+ AttributeKey.stringKey("computed"), computedHash,
+ )
+ throw HashMismatchException("hash mismatch ($spanAttributes)")
}
+
if (!skipUnpack) {
spanBuilder("unpack").setAttribute("name", item.name).use {
unpackArchive(item, saveHash)
@@ -125,23 +126,9 @@ private suspend fun download(
return downloaded
}
-private suspend fun onDownloadException(attempt: Int, e: Exception) {
- spanBuilder("Retrying download with exponential back off").use { span ->
- if (e is HashMismatchException) {
- e.eventLogger.invoke(span, attempt)
- }
- else {
- span.addEvent("Attempt failed", Attributes.of(
- AttributeKey.longKey("attemptNumber"), attempt.toLong(),
- AttributeKey.stringKey("error"), e.toString()
- ))
- }
- }
-}
-
internal class CompilePartDownloadFailedError(@JvmField val item: FetchAndUnpackItem, cause: Throwable) : RuntimeException(cause)
-internal class HashMismatchException(message: String, @JvmField val eventLogger: (Span, Int) -> Unit) : IOException(message)
+internal class HashMismatchException(message: String) : IOException(message)
internal fun unpackArchive(item: FetchAndUnpackItem, saveHash: Boolean) {
HashMapZipFile.load(item.file).use { zipFile ->
@@ -170,66 +157,4 @@ internal fun unpackArchive(item: FetchAndUnpackItem, saveHash: Boolean) {
// save actual hash
Files.writeString(item.output.resolve(".hash"), item.hash)
}
-}
-
-private fun writeFile(file: Path, response: Response, bufferPool: DirectFixedSizeByteBufferPool, url: String, digest: MessageDigest) {
- Files.createDirectories(file.parent)
- FileChannel.open(file, OVERWRITE_OPERATION).use { channel ->
- val source = response.body.source()
- val sourceBuffer = bufferPool.allocate()
- object : ZstdDirectBufferDecompressingStreamNoFinalizer(sourceBuffer) {
- public override fun refill(toRefill: ByteBuffer): ByteBuffer {
- toRefill.clear()
- do {
- if (source.read(toRefill) == -1) {
- break
- }
- }
- while (!source.exhausted() && toRefill.hasRemaining())
- toRefill.flip()
- return toRefill
- }
-
- override fun close() {
- try {
- super.close()
- }
- finally {
- bufferPool.release(sourceBuffer)
- }
- }
- }.use { decompressor ->
- var offset = 0L
- val targetBuffer = bufferPool.allocate()
- try {
- // refill is not called on start
- decompressor.refill(sourceBuffer)
- do {
- do {
- // decompressor can consume not the whole source buffer if target buffer size is not enough
- decompressor.read(targetBuffer)
- targetBuffer.flip()
-
- targetBuffer.mark()
- digest.update(targetBuffer)
- targetBuffer.reset()
-
- do {
- offset += channel.write(targetBuffer, offset)
- }
- while (targetBuffer.hasRemaining())
- targetBuffer.clear()
- }
- while (sourceBuffer.hasRemaining())
- }
- while (!source.exhausted())
- }
- catch (e: IOException) {
- throw IOException("Cannot unpack $url", e)
- }
- finally {
- bufferPool.release(targetBuffer)
- }
- }
- }
}
\ No newline at end of file
diff --git a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/http.kt b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/http.kt
index e44322dee8b9..1007f0e72c08 100644
--- a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/http.kt
+++ b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/http.kt
@@ -4,15 +4,12 @@ package org.jetbrains.intellij.build.impl.compilation
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.suspendCancellableCoroutine
import okhttp3.*
-import okhttp3.MediaType.Companion.toMediaType
import okhttp3.internal.closeQuietly
import org.jetbrains.intellij.build.NoMoreRetriesException
import java.io.IOException
import java.util.concurrent.TimeUnit
import kotlin.coroutines.resumeWithException
-internal val MEDIA_TYPE_BINARY = "application/octet-stream".toMediaType()
-
internal suspend fun OkHttpClient.head(url: String, authHeader: String): Int {
return newCall(Request.Builder().url(url).head().header("Authorization", authHeader).build()).executeAsync().use { response ->
if (response.code != 200 && response.code != 404) {
@@ -43,6 +40,11 @@ internal val httpClient: OkHttpClient by lazy {
.connectTimeout(timeout, unit)
.writeTimeout(timeout, unit)
.readTimeout(timeout, unit)
+ .dispatcher(Dispatcher().apply {
+ // we upload/download to the same host - increase `maxRequestsPerHost`
+ //maxRequestsPerHost = Runtime.getRuntime().availableProcessors().coerceIn(5, 16)
+ //... but in the same time it can increase the bill for ALB, so, leave it as is
+ })
.addInterceptor { chain ->
var request = chain.request()
if (request.header("User-Agent").isNullOrBlank()) {
@@ -83,7 +85,7 @@ internal suspend fun Call.executeAsync(): Response {
continuation.invokeOnCancellation {
this.cancel()
}
- this.enqueue(object : Callback {
+ enqueue(object : Callback {
override fun onFailure(call: Call, e: IOException) {
continuation.resumeWithException(e)
}
diff --git a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/upload.kt b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/upload.kt
index 8f486eb57194..627faac3e6bc 100644
--- a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/upload.kt
+++ b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/compilation/upload.kt
@@ -4,47 +4,42 @@
package org.jetbrains.intellij.build.impl.compilation
import com.github.luben.zstd.Zstd
-import com.github.luben.zstd.ZstdDirectBufferCompressingStreamNoFinalizer
-import com.intellij.platform.util.coroutines.forEachConcurrent
+import com.github.luben.zstd.ZstdCompressCtx
+import io.netty.handler.codec.http.HttpHeaderValues
+import io.netty.handler.codec.http.HttpResponseStatus
+import io.netty.handler.codec.http2.Http2StreamChannel
+import io.netty.util.AsciiString
import io.opentelemetry.api.common.AttributeKey
import io.opentelemetry.api.common.Attributes
import io.opentelemetry.api.trace.Span
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.withContext
import kotlinx.serialization.Serializable
-import kotlinx.serialization.json.Json
-import kotlinx.serialization.json.decodeFromStream
-import okhttp3.MediaType.Companion.toMediaType
-import okhttp3.OkHttpClient
-import okhttp3.Request
-import okhttp3.RequestBody
-import okhttp3.RequestBody.Companion.toRequestBody
-import okio.BufferedSink
-import okio.use
+import org.jetbrains.intellij.build.forEachConcurrent
+import org.jetbrains.intellij.build.http2Client.Http2ClientConnection
+import org.jetbrains.intellij.build.http2Client.MAX_BUFFER_SIZE
+import org.jetbrains.intellij.build.http2Client.writeData
+import org.jetbrains.intellij.build.io.unmapBuffer
import org.jetbrains.intellij.build.telemetry.TraceManager.spanBuilder
import org.jetbrains.intellij.build.telemetry.use
-import java.nio.ByteBuffer
+import java.nio.MappedByteBuffer
import java.nio.channels.FileChannel
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardOpenOption
import java.util.*
+import java.util.concurrent.CancellationException
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.atomic.AtomicLong
+import kotlin.math.min
-private val MEDIA_TYPE_JSON = "application/json".toMediaType()
internal val READ_OPERATION = EnumSet.of(StandardOpenOption.READ)
-internal const val MAX_BUFFER_SIZE = 4 * 1014 * 1024
-internal const val ZSTD_LEVEL = 3
internal suspend fun uploadArchives(
reportStatisticValue: (key: String, value: String) -> Unit,
config: CompilationCacheUploadConfiguration,
metadataJson: String,
- httpClient: OkHttpClient,
+ httpConnection: Http2ClientConnection,
items: List,
- bufferPool: DirectFixedSizeByteBufferPool,
) {
val uploadedCount = AtomicInteger()
val uploadedBytes = AtomicLong()
@@ -56,20 +51,25 @@ internal suspend fun uploadArchives(
val alreadyUploaded: Set = try {
if (config.checkFiles) {
spanBuilder("fetch info about already uploaded files").use {
- HashSet(getFoundAndMissingFiles(metadataJson, config.serverUrl, httpClient).found)
+ getFoundAndMissingFiles(metadataJson = metadataJson, urlPathPrefix = config.serverUrlPathPrefix, connection = httpConnection).found
}
}
else {
emptySet()
}
}
+ catch (e: CancellationException) {
+ throw e
+ }
catch (e: Throwable) {
Span.current().recordException(e, Attributes.of(AttributeKey.stringKey("message"), "failed to fetch info about already uploaded files, will fallback to HEAD requests"))
fallbackToHeads = true
emptySet()
}
- withContext(Dispatchers.IO) {
+ val sourceBlockSize = MAX_BUFFER_SIZE
+ val urlPathPrefix = "${config.serverUrlPathPrefix}/${config.uploadUrlPathPrefix}"
+ ZstdCompressContextPool().use { zstdCompressContextPool ->
items.forEachConcurrent(uploadParallelism) { item ->
if (alreadyUploaded.contains(item.name)) {
reusedCount.getAndIncrement()
@@ -77,16 +77,18 @@ internal suspend fun uploadArchives(
return@forEachConcurrent
}
+ val urlPath = "$urlPathPrefix/${item.name}/${item.hash!!}.jar"
spanBuilder("upload archive").setAttribute("name", item.name).setAttribute("hash", item.hash!!).use {
val size = Files.size(item.archive)
val isUploaded = uploadFile(
- url = "${config.serverUrl}/${config.uploadPrefix}/${item.name}/${item.hash!!}.jar",
+ urlPath = urlPath,
file = item.archive,
useHead = fallbackToHeads,
span = Span.current(),
- httpClient = httpClient,
- bufferPool = bufferPool,
+ httpSession = httpConnection,
fileSize = size,
+ sourceBlockSize = sourceBlockSize,
+ zstdCompressContextPool = zstdCompressContextPool,
)
if (isUploaded) {
uploadedCount.getAndIncrement()
@@ -125,172 +127,100 @@ internal suspend fun uploadArchives(
reportStatisticValue("compile-parts:total:count", (reusedCount.get() + uploadedCount.get()).toString())
}
-private suspend fun getFoundAndMissingFiles(metadataJson: String, serverUrl: String, httpClient: OkHttpClient): CheckFilesResponse {
- httpClient.newCall(Request.Builder()
- .url("$serverUrl/check-files")
- .post(metadataJson.toRequestBody(MEDIA_TYPE_JSON))
- .build()).executeAsync().useSuccessful {
- return Json.decodeFromStream(it.body.byteStream())
- }
+private suspend fun getFoundAndMissingFiles(metadataJson: String, urlPathPrefix: String, connection: Http2ClientConnection): CheckFilesResponse {
+ return connection.post(path = "$urlPathPrefix/check-files", data = metadataJson, contentType = HttpHeaderValues.APPLICATION_JSON)
}
// Using ZSTD dictionary doesn't make the difference, even slightly worse (default compression level 3).
// That's because in our case, we compress a relatively large archive of class files.
private suspend fun uploadFile(
- url: String,
+ urlPath: String,
file: Path,
useHead: Boolean,
span: Span,
- httpClient: OkHttpClient,
- bufferPool: DirectFixedSizeByteBufferPool,
+ httpSession: Http2ClientConnection,
fileSize: Long,
+ sourceBlockSize: Int,
+ zstdCompressContextPool: ZstdCompressContextPool,
): Boolean {
if (useHead) {
- val request = Request.Builder().url(url).head().build()
- val code = httpClient.newCall(request).executeAsync().use {
- it.code
+ val status = httpSession.head(urlPath)
+ if (status == HttpResponseStatus.OK) {
+ span.addEvent("already exist on server, nothing to upload", Attributes.of(AttributeKey.stringKey("urlPath"), urlPath))
+ return false
}
-
- when {
- code == 200 -> {
- span.addEvent("already exist on server, nothing to upload", Attributes.of(AttributeKey.stringKey("url"), url))
- return false
- }
- code != 404 -> {
- span.addEvent("responded with unexpected", Attributes.of(
- AttributeKey.longKey("code"), code.toLong(),
- AttributeKey.stringKey("url"), url,
- ))
- }
+ else if (status != HttpResponseStatus.NOT_FOUND) {
+ span.addEvent(
+ "responded with unexpected",
+ Attributes.of(
+ AttributeKey.stringKey("status"), status.toString(),
+ AttributeKey.stringKey("urlPath"), urlPath
+ ),
+ )
}
}
- if (Zstd.compressBound(fileSize) <= MAX_BUFFER_SIZE) {
- compressSmallFile(file = file, fileSize = fileSize, bufferPool = bufferPool, url = url)
- }
- else {
- val request = Request.Builder()
- .url(url)
- .put(object : RequestBody() {
- override fun contentType() = MEDIA_TYPE_BINARY
+ require(fileSize > 0)
- override fun writeTo(sink: BufferedSink) {
- compressFile(file = file, output = sink, bufferPool = bufferPool)
- }
- })
- .build()
-
- httpClient.newCall(request).executeAsync().useSuccessful { }
+ val fileBuffer = FileChannel.open(file, READ_OPERATION).use { channel ->
+ channel.map(FileChannel.MapMode.READ_ONLY, 0, fileSize)
}
+ try {
+ zstdCompressContextPool.withZstd { zstd ->
+ httpSession.put(AsciiString.of(urlPath)) { stream ->
+ compressAndUpload(
+ fileSize = fileSize,
+ fileBuffer = fileBuffer,
+ sourceBlockSize = sourceBlockSize,
+ zstd = zstd,
+ stream = stream,
+ )
+ }
+ }
+ }
+ finally {
+ unmapBuffer(fileBuffer)
+ }
return true
}
-private suspend fun compressSmallFile(file: Path, fileSize: Long, bufferPool: DirectFixedSizeByteBufferPool, url: String) {
- val targetBuffer = bufferPool.allocate()
- try {
- var readOffset = 0L
- val sourceBuffer = bufferPool.allocate()
- try {
- FileChannel.open(file, READ_OPERATION).use { input ->
- do {
- readOffset += input.read(sourceBuffer, readOffset)
- }
- while (readOffset < fileSize)
- }
- sourceBuffer.flip()
+private suspend fun compressAndUpload(
+ fileSize: Long,
+ fileBuffer: MappedByteBuffer,
+ sourceBlockSize: Int,
+ zstd: ZstdCompressCtx,
+ stream: Http2StreamChannel,
+) {
+ var position = 0
+ while (true) {
+ val chunkSize = min(fileSize - position, sourceBlockSize.toLong()).toInt()
+ val targetSize = Zstd.compressBound(chunkSize.toLong()).toInt()
+ val targetNettyBuffer = stream.alloc().directBuffer(targetSize)
+ val targetBuffer = targetNettyBuffer.nioBuffer(0, targetSize)
+ val compressedSize = zstd.compressDirectByteBuffer(
+ targetBuffer, // compress into targetBuffer
+ targetBuffer.position(), // write compressed data starting at offset position()
+ targetSize, // write no more than target block size bytes
+ fileBuffer, // read data to compress from fileBuffer
+ position, // start reading at position()
+ chunkSize, // read chunk size bytes
+ )
+ assert(compressedSize > 0)
+ targetNettyBuffer.writerIndex(targetNettyBuffer.writerIndex() + compressedSize)
+ assert(targetNettyBuffer.readableBytes() == compressedSize)
- Zstd.compress(targetBuffer, sourceBuffer, ZSTD_LEVEL, false)
- targetBuffer.flip()
- }
- finally {
- bufferPool.release(sourceBuffer)
- }
+ position += chunkSize
- val compressedSize = targetBuffer.remaining()
-
- val request = Request.Builder()
- .url(url)
- .put(object : RequestBody() {
- override fun contentLength() = compressedSize.toLong()
-
- override fun contentType() = MEDIA_TYPE_BINARY
-
- override fun writeTo(sink: BufferedSink) {
- targetBuffer.mark()
- sink.write(targetBuffer)
- targetBuffer.reset()
- }
- })
- .build()
-
- httpClient.newCall(request).executeAsync().useSuccessful { }
- }
- finally {
- bufferPool.release(targetBuffer)
- }
-}
-
-private fun compressFile(file: Path, output: BufferedSink, bufferPool: DirectFixedSizeByteBufferPool) {
- val targetBuffer = bufferPool.allocate()
- CompilationCacheZstdCompressingStream(targetBuffer = targetBuffer, output = output, bufferPool = bufferPool).use { compressor ->
- val sourceBuffer = bufferPool.allocate()
- try {
- var offset = 0L
- FileChannel.open(file, READ_OPERATION).use { input ->
- val fileSize = input.size()
- while (offset < fileSize) {
- val actualBlockSize = (fileSize - offset).toInt()
- if (sourceBuffer.remaining() > actualBlockSize) {
- sourceBuffer.limit(sourceBuffer.position() + actualBlockSize)
- }
-
- var readOffset = offset
- do {
- readOffset += input.read(sourceBuffer, readOffset)
- }
- while (sourceBuffer.hasRemaining())
-
- sourceBuffer.flip()
- compressor.compress(sourceBuffer)
-
- sourceBuffer.clear()
- offset = readOffset
- }
- }
- }
- finally {
- bufferPool.release(sourceBuffer)
- }
- }
-}
-
-private class CompilationCacheZstdCompressingStream(
- private val targetBuffer: ByteBuffer,
- private val output: BufferedSink,
- private val bufferPool: DirectFixedSizeByteBufferPool,
-) : ZstdDirectBufferCompressingStreamNoFinalizer(targetBuffer, ZSTD_LEVEL) {
- override fun flushBuffer(toFlush: ByteBuffer): ByteBuffer {
- toFlush.flip()
- while (toFlush.hasRemaining()) {
- output.write(toFlush)
- }
- toFlush.clear()
- return toFlush
- }
-
- override fun close() {
- try {
- super.close()
- }
- finally {
- bufferPool.release(targetBuffer)
+ val endStream = position >= fileSize
+ stream.writeData(targetNettyBuffer, endStream)
+ if (endStream) {
+ break
}
}
}
@Serializable
private data class CheckFilesResponse(
- @JvmField val found: List = emptyList(),
- @JvmField val missing: List = emptyList(),
-)
\ No newline at end of file
+ @JvmField val found: HashSet = HashSet(),
+)
diff --git a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/sbom/SoftwareBillOfMaterialsImpl.kt b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/sbom/SoftwareBillOfMaterialsImpl.kt
index fbc12f3c1ddc..3c8373a01e68 100644
--- a/platform/build-scripts/src/org/jetbrains/intellij/build/impl/sbom/SoftwareBillOfMaterialsImpl.kt
+++ b/platform/build-scripts/src/org/jetbrains/intellij/build/impl/sbom/SoftwareBillOfMaterialsImpl.kt
@@ -4,10 +4,8 @@
package org.jetbrains.intellij.build.impl.sbom
import com.intellij.openapi.util.SystemInfoRt
-import com.intellij.platform.util.coroutines.forEachConcurrent
import com.intellij.util.io.DigestUtil
import com.intellij.util.io.DigestUtil.sha1Hex
-import com.intellij.util.io.DigestUtil.updateContentHash
import com.intellij.util.io.bytesToHex
import com.intellij.util.io.sha256Hex
import com.jetbrains.plugin.structure.base.utils.exists
diff --git a/platform/build-scripts/tests/nginx-webdav.conf b/platform/build-scripts/tests/nginx-webdav.conf
index 84d61463def0..fceaa3f888a4 100644
--- a/platform/build-scripts/tests/nginx-webdav.conf
+++ b/platform/build-scripts/tests/nginx-webdav.conf
@@ -1,19 +1,35 @@
+daemon off;
worker_processes auto;
events {
- worker_connections 1024;
}
http {
+ log_format main '[$time_local] "$request" $status $body_bytes_sent ';
+ access_log /dev/stdout main;
+ error_log /dev/stderr;
+
+ gzip on;
+ gzip_types text/plain text/css application/x-javascript text/xml application/xml application/xml+rss text/javascript application/json application/javascript text/x-js;
server {
- listen 1900;
+ listen 127.0.0.1:1900 ssl;
+ http2 on;
+ server_name 127.0.0.1;
+ ssl_certificate ./server.crt;
+ ssl_certificate_key ./server.key;
location / {
root /tmp/webdav;
client_max_body_size 0;
create_full_put_path on;
- dav_methods PUT DELETE MKCOL COPY MOVE;
+ dav_methods PUT;
autoindex on;
}
+
+ location /check-files {
+ proxy_http_version 1.1;
+ proxy_set_header Connection "";
+ proxy_pass http://127.0.0.1:8082/;
+ }
}
}
\ No newline at end of file
diff --git a/platform/build-scripts/tests/server.crt b/platform/build-scripts/tests/server.crt
new file mode 100644
index 000000000000..2d210eb7e1e2
--- /dev/null
+++ b/platform/build-scripts/tests/server.crt
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDhjCCAm6gAwIBAgIJAJN8H+8liHX0MA0GCSqGSIb3DQEBCwUAMHgxCzAJBgNV
+BAYTAlhYMQwwCgYDVQQIDANOL0ExDDAKBgNVBAcMA04vQTEgMB4GA1UECgwXU2Vs
+Zi1zaWduZWQgY2VydGlmaWNhdGUxKzApBgNVBAMMIjEyNy4wLjAuMTogU2VsZi1z
+aWduZWQgY2VydGlmaWNhdGUwHhcNMjQwODI2MTEyMzEzWhcNMjYwODI2MTEyMzEz
+WjB4MQswCQYDVQQGEwJYWDEMMAoGA1UECAwDTi9BMQwwCgYDVQQHDANOL0ExIDAe
+BgNVBAoMF1NlbGYtc2lnbmVkIGNlcnRpZmljYXRlMSswKQYDVQQDDCIxMjcuMC4w
+LjE6IFNlbGYtc2lnbmVkIGNlcnRpZmljYXRlMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEAvRyBeGto8nsEMkr14NK+g9hIyFdiA+j+m1fQ4D/y+zI+N+7Z
+2NgVTK5TJlxoZkmjbfqZ4Dmv9nqL/OQ8LF3Jw+gkInEhX6lb/NX+IbywOSzMpkuu
+pIzGN0UWhhfb3oF3YfD1HTSrpEcpOP8VeUGRt6XNNK2XgcrFAuQVoSNQktdCOJuF
+VC20dYBT2ngk02uW+6Vs0/q4rMpMGalRTOel1aOLJrlCocITe/iUUOwoTugcCaro
+T+7hKofSrNLI+vwN9s8H/YGj2QyssfhBqHOsah080Be5jkRP7TolrIlIY72XDe9b
+VA/vfHoJ1ry6n4aHNouHU1BC8Kyy416/wPCZJwIDAQABoxMwETAPBgNVHREECDAG
+hwR/AAABMA0GCSqGSIb3DQEBCwUAA4IBAQBZ+EbXZyIvUGhSdxqbRpUSUX+gVu4h
+QlgVNlk0w7s+SnSzru3MLXYV0KW/nbLmW1EUe2zaAbWHPS85VvsZszYlBtOzqUh3
+6uyEXmGr9bq4laMo7mK6pDGxsI72Xl4X3Vm/gUw547z5+gh4/ggzer+lQrOPgLRJ
+vhXwCSD26wB2qx4pgQAV0oGtC8vcm7hSkP2FJH1HveOvivdwDQfeo5+pc8YJvsUG
+pZK/2QqZY2QkQ4iv1rreaxWv3p7W2EVkioyjxbgCZBglxTC8RwPwH4Kxhs+8tXzi
+E2AUE5ryQCwGbQw6dElJvIFY9TpgPPEeCu8l/JzJxxkkR1TNK58rH8rN
+-----END CERTIFICATE-----
\ No newline at end of file
diff --git a/platform/build-scripts/tests/server.key b/platform/build-scripts/tests/server.key
new file mode 100644
index 000000000000..04f53d79ac90
--- /dev/null
+++ b/platform/build-scripts/tests/server.key
@@ -0,0 +1,29 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC9HIF4a2jyewQy
+SvXg0r6D2EjIV2ID6P6bV9DgP/L7Mj437tnY2BVMrlMmXGhmSaNt+pngOa/2eov8
+5DwsXcnD6CQicSFfqVv81f4hvLA5LMymS66kjMY3RRaGF9vegXdh8PUdNKukRyk4
+/xV5QZG3pc00rZeBysUC5BWhI1CS10I4m4VULbR1gFPaeCTTa5b7pWzT+risykwZ
+qVFM56XVo4smuUKhwhN7+JRQ7ChO6BwJquhP7uEqh9Ks0sj6/A32zwf9gaPZDKyx
++EGoc6xqHTzQF7mORE/tOiWsiUhjvZcN71tUD+98egnWvLqfhoc2i4dTUELwrLLj
+Xr/A8JknAgMBAAECggEALSaEJtsGKHaEbvmEsNPAFrxpzCNIzIQxXadewFukSKMb
+RcFqE6Krmy43vf3sExfbxCND38wGHgPuLkfTsggGZxaiofJ+tFc8FiaFUUq6jDwM
+9Fs3bCQMIyAEm6lQnlQsy5569ykfHc67odcNKnEkOEOGteAIPz3JQcJxA5Lp5tTC
+HmyZOcrbFaDSpn1h/HXsq3OIP98TCT2KYOhFuWy0JQTr9uvZDuq8lqoZ53fMNjj/
+0RwesJdHpeRb7GnWW66GQrOmMCFZkKqWOLUK8tcwCYfo9w5GV9BC8sSUx3RF4f1H
+6oO8v3caScOChrznnx7b5KnRkS7O0UUxuTt1+h1HOQKBgQD5fBDSfm+YvMJyAP0Z
+bmIdqqchMyW6xlF1tfhpfLkgFj1ufpIE/G8q+ZUVfah+TMhPUAyAc3AznYHCiHVe
+suu1QJ7tIuBkRfWZgdLo8XdREhpKnm8xAUXOs43t6/CxmPTao+ylN0WGmUHVgQae
+OHcpftNN3zLio0rNm1xOlmdDSwKBgQDCDNA0pxzOvELHHFCMG+N7Sjdy/HK2h1LW
+UQ/FvP/qx/iPJ0fyW0LZkLF2Y9nc7FxVqiA/5W2sy6MQ3ZuOzpobVobeb8SC3fBm
+Q6TSCIKd69E7vLCb9BMfnYPYqW2WP3fEYp8A3MXMyjDJxZSharNeP7/YSSk85VfE
+ruq/ege8FQKBgBnslGrrDHmYk7P4+lPcLoHaq8c9Y1xHI0vR/uAnP61f4j5LFK1D
+9eFHUgCLsCh/ngjvznzCghQ697LZLykJ+og5EMqfZyXER0MORHZEMRvRf73lPLSg
+5zoVWlgwvjAWLstRYVPBrI3R+w9OevuR7n/3V8mtucHnKey3ih34bv6FAoGABtxH
+HCVwWkrDnaB9pIZz4277SOBt+dAM+LDC+v20motZWU5NN99MHL8F1yaulCXzGcA7
+BadJ2lsUt8rt7f2V6zOC7yhKbUoFbsgjcp2EaKrmqdMA93KInoyGFcnfqvkxdcr6
+ziAACj53vRp0J8TK9KESWkYz5AhDsxtwBzb8QQUCgYAawMvTQRDPGwMLtoSBEyDt
+j7ytvxzWPdMhf9/d+xT6wtyfVHnc1CSWermfmN3BXTHTBskTeAXVA8l2sjM5lMOG
+obekKJy31dNz+4ysu1ZiwspxS22iNLgii6L2Qy7u6I16Kb2ZHhmVMwSVcTABzNxS
+//ymT1boXSvNKgTWwe30qg==
+-----END PRIVATE KEY-----
+
diff --git a/platform/build-scripts/tests/testSrc/org/jetbrains/intellij/build/CompilationCacheTest.kt b/platform/build-scripts/tests/testSrc/org/jetbrains/intellij/build/CompilationCacheTest.kt
index 0f2a06b65375..dd653c369cd6 100644
--- a/platform/build-scripts/tests/testSrc/org/jetbrains/intellij/build/CompilationCacheTest.kt
+++ b/platform/build-scripts/tests/testSrc/org/jetbrains/intellij/build/CompilationCacheTest.kt
@@ -1,10 +1,11 @@
// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package org.jetbrains.intellij.build
+import com.intellij.testFramework.utils.io.deleteRecursively
+import com.intellij.util.SystemProperties
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.runBlocking
import org.jetbrains.intellij.build.impl.compilation.fetchAndUnpackCompiledClasses
-import org.jetbrains.intellij.build.io.deleteDir
import org.jetbrains.intellij.build.telemetry.TraceManager
import org.junit.jupiter.api.AfterAll
import org.junit.jupiter.api.Assumptions.assumeTrue
@@ -25,7 +26,7 @@ class CompilationCacheTest {
@Test
fun testUnpack() = runBlocking(Dispatchers.Default) {
- val metadataFile = Path.of("/Volumes/data/Documents/idea/out/compilation-archive/metadata.json")
+ val metadataFile = Path.of(SystemProperties.getUserHome(), "projects/idea/out/compilation-archive/metadata.json")
assumeTrue(Files.exists(metadataFile))
// do not use Junit TempDir - it is very slow
@@ -41,10 +42,7 @@ class CompilationCacheTest {
)
}
finally {
- Files.list(outDir).parallel().use { stream ->
- stream.forEach(::deleteDir)
- }
- Files.delete(outDir)
+ outDir.deleteRecursively()
}
}
}
\ No newline at end of file