mirror of
https://gitflic.ru/project/openide/openide.git
synced 2026-02-05 08:06:56 +07:00
use immutable key value store in classloader
GitOrigin-RevId: 512166737d7b21f69dc1c42acf4f30f70290a5ff
This commit is contained in:
committed by
intellij-monorepo-bot
parent
3ae016532a
commit
b872ab9363
4
.idea/modules.xml
generated
4
.idea/modules.xml
generated
@@ -797,8 +797,10 @@
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util/base/intellij.platform.util.base.iml" filepath="$PROJECT_DIR$/platform/util/base/intellij.platform.util.base.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util-class-loader/intellij.platform.util.classLoader.iml" filepath="$PROJECT_DIR$/platform/util-class-loader/intellij.platform.util.classLoader.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util-ex/intellij.platform.util.ex.iml" filepath="$PROJECT_DIR$/platform/util-ex/intellij.platform.util.ex.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util/ikv-benchmark/intellij.platform.util.ikvBenchmark.iml" filepath="$PROJECT_DIR$/platform/util/ikv-benchmark/intellij.platform.util.ikvBenchmark.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util/immutable-key-value-store/intellij.platform.util.immutableKeyValueStore.iml" filepath="$PROJECT_DIR$/platform/util/immutable-key-value-store/intellij.platform.util.immutableKeyValueStore.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util/immutable-key-value-store/benchmark/intellij.platform.util.immutableKeyValueStore.benchmark.iml" filepath="$PROJECT_DIR$/platform/util/immutable-key-value-store/benchmark/intellij.platform.util.immutableKeyValueStore.benchmark.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util-rt/intellij.platform.util.rt.iml" filepath="$PROJECT_DIR$/platform/util-rt/intellij.platform.util.rt.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util/rt-java8/intellij.platform.util.rt.java8.iml" filepath="$PROJECT_DIR$/platform/util/rt-java8/intellij.platform.util.rt.java8.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util/testSrc/intellij.platform.util.tests.iml" filepath="$PROJECT_DIR$/platform/util/testSrc/intellij.platform.util.tests.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util/text-matching/intellij.platform.util.text.matching.iml" filepath="$PROJECT_DIR$/platform/util/text-matching/intellij.platform.util.text.matching.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/platform/util/intellij.platform.util.ui.iml" filepath="$PROJECT_DIR$/platform/util/intellij.platform.util.ui.iml" />
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.cucumber;
|
||||
|
||||
import com.intellij.TestCaseLoader;
|
||||
@@ -40,7 +40,6 @@ public final class CucumberMain {
|
||||
UrlClassLoader loader = UrlClassLoader.build().files(files).parent(original.getParent())
|
||||
.useCache()
|
||||
.usePersistentClasspathIndexForLocalClassDirectories()
|
||||
.autoAssignUrlsWithProtectionDomain()
|
||||
.get();
|
||||
Thread.currentThread().setContextClassLoader(loader);
|
||||
exitStatus = (Integer)loader.loadClass(CucumberMain.class.getName())
|
||||
|
||||
@@ -46,7 +46,6 @@
|
||||
<orderEntry type="module" module-name="intellij.platform.testFramework" scope="TEST" />
|
||||
<orderEntry type="module" module-name="intellij.platform.bootstrap" scope="TEST" />
|
||||
<orderEntry type="library" name="fastutil-min" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt" />
|
||||
<orderEntry type="library" name="ASM" level="project" />
|
||||
<orderEntry type="library" name="opentelemetry" level="project" />
|
||||
<orderEntry type="library" name="blockmap" level="project" />
|
||||
@@ -57,5 +56,8 @@
|
||||
<orderEntry type="library" name="Log4J" level="project" />
|
||||
<orderEntry type="library" scope="RUNTIME" name="bouncy-castle-provider" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit5" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.immutableKeyValueStore" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt.java8" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt" scope="TEST" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -2,8 +2,10 @@
|
||||
@file:Suppress("ReplaceGetOrSet")
|
||||
package org.jetbrains.intellij.build.io
|
||||
|
||||
import com.intellij.util.io.Murmur3_32Hash
|
||||
import it.unimi.dsi.fastutil.ints.IntArrayList
|
||||
import org.jetbrains.ikv.IkvIndexBuilder
|
||||
import org.jetbrains.ikv.IkvIndexEntry
|
||||
import org.jetbrains.ikv.UniversalHash
|
||||
import org.jetbrains.xxh3.Xxh3
|
||||
import java.io.IOException
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.ByteOrder
|
||||
@@ -14,6 +16,9 @@ import java.util.zip.ZipEntry
|
||||
|
||||
internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
private val withOptimizedMetadataEnabled: Boolean) : AutoCloseable {
|
||||
private var classPackages: LongArray? = null
|
||||
private var resourcePackages: LongArray? = null
|
||||
|
||||
private var finished = false
|
||||
private var entryCount = 0
|
||||
|
||||
@@ -21,15 +26,13 @@ internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
// 1 MB should be enough for end of central directory record
|
||||
private val buffer = ByteBuffer.allocateDirect(1024 * 1024).order(ByteOrder.LITTLE_ENDIAN)
|
||||
|
||||
private val sizes = IntArrayList()
|
||||
private val names = ArrayList<ByteArray>()
|
||||
private val dataOffsets = IntArrayList()
|
||||
private val indexWriter = IkvIndexBuilder(hash = IndexEntryHash())
|
||||
|
||||
private var channelPosition = 0L
|
||||
|
||||
private val fileChannel = channel as? FileChannel
|
||||
|
||||
fun addDirEntry(name: ByteArray) {
|
||||
fun addDirEntry(name: String) {
|
||||
if (finished) {
|
||||
throw IOException("Stream has already been finished")
|
||||
}
|
||||
@@ -37,6 +40,10 @@ internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
val offset = channelPosition
|
||||
entryCount++
|
||||
|
||||
assert(!name.endsWith('/'))
|
||||
val key = name.toByteArray()
|
||||
val nameInArchive = key + '/'.code.toByte()
|
||||
|
||||
buffer.clear()
|
||||
buffer.putInt(0x04034b50)
|
||||
// Version needed to extract (minimum)
|
||||
@@ -56,15 +63,15 @@ internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
// Uncompressed size
|
||||
buffer.putInt(0)
|
||||
// File name length
|
||||
buffer.putShort((name.size and 0xffff).toShort())
|
||||
buffer.putShort((nameInArchive.size and 0xffff).toShort())
|
||||
// Extra field length
|
||||
buffer.putShort(0)
|
||||
buffer.put(name)
|
||||
buffer.put(nameInArchive)
|
||||
|
||||
buffer.flip()
|
||||
writeBuffer(buffer)
|
||||
|
||||
writeCentralFileHeader(0, 0, ZipEntry.STORED, 0, name, offset, dataOffset = 0)
|
||||
writeCentralFileHeader(0, 0, ZipEntry.STORED, 0, nameInArchive, offset, dataOffset = -1, normalName = key)
|
||||
}
|
||||
|
||||
fun writeRawEntry(header: ByteBuffer, content: ByteBuffer, name: ByteArray, size: Int, compressedSize: Int, method: Int, crc: Long) {
|
||||
@@ -133,20 +140,66 @@ internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
dataOffset = dataOffset)
|
||||
}
|
||||
|
||||
private fun writeCustomMetadata(): Int {
|
||||
val optimizedMetadataOffset = channelPosition.toInt()
|
||||
// write one by one to channel to avoid buffer overflow
|
||||
writeIntArray(sizes.toIntArray())
|
||||
writeIntArray(dataOffsets.toIntArray())
|
||||
writeIntArray(computeTableIndexes(names))
|
||||
return optimizedMetadataOffset
|
||||
private inline fun writeData(task: (ByteBuffer) -> Unit) {
|
||||
buffer.clear()
|
||||
task(buffer)
|
||||
buffer.flip()
|
||||
writeBuffer(buffer)
|
||||
}
|
||||
|
||||
private fun writeIntArray(value: IntArray) {
|
||||
buffer.clear()
|
||||
buffer.asIntBuffer().put(value)
|
||||
buffer.limit(value.size * Int.SIZE_BYTES)
|
||||
writeBuffer(buffer)
|
||||
private fun writeIndex(): Int {
|
||||
// write one by one to channel to avoid buffer overflow
|
||||
val entries = indexWriter.write {
|
||||
writeBuffer(it)
|
||||
}
|
||||
val indexDataEnd = channelPosition.toInt()
|
||||
|
||||
// write package class and resource hashes
|
||||
writeData { buffer ->
|
||||
val classPackages = classPackages
|
||||
val resourcePackages = resourcePackages
|
||||
if (classPackages == null && resourcePackages == null) {
|
||||
buffer.putInt(0)
|
||||
buffer.putInt(0)
|
||||
}
|
||||
else {
|
||||
buffer.putInt(classPackages!!.size)
|
||||
buffer.putInt(resourcePackages!!.size)
|
||||
useAsLongBuffer(buffer) {
|
||||
it.put(classPackages)
|
||||
it.put(resourcePackages)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// write fingerprints
|
||||
writeData { buffer ->
|
||||
useAsLongBuffer(buffer) { longBuffer ->
|
||||
// bloom filter is not an option - false positive leads to error like "wrong class name" on class define
|
||||
for (entry in entries) {
|
||||
longBuffer.put(entry.keyHash)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// write names
|
||||
writeData { buffer ->
|
||||
val shortBuffer = buffer.asShortBuffer()
|
||||
for (entry in entries) {
|
||||
shortBuffer.put(entry.key.size.toShort())
|
||||
}
|
||||
buffer.position(buffer.position() + (shortBuffer.position() * Short.SIZE_BYTES))
|
||||
}
|
||||
|
||||
for (list in entries.asSequence().chunked(1024)) {
|
||||
writeData { buffer ->
|
||||
for (indexEntry in list) {
|
||||
buffer.put(indexEntry.key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return indexDataEnd
|
||||
}
|
||||
|
||||
fun finish() {
|
||||
@@ -154,7 +207,12 @@ internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
throw IOException("This archive has already been finished")
|
||||
}
|
||||
|
||||
val optimizedMetadataOffset = if (withOptimizedMetadataEnabled) writeCustomMetadata() else -1
|
||||
val indexOffset = if (withOptimizedMetadataEnabled && entryCount != 0) {
|
||||
writeIndex()
|
||||
}
|
||||
else {
|
||||
-1
|
||||
}
|
||||
|
||||
val centralDirectoryOffset = channelPosition
|
||||
// write central directory file header
|
||||
@@ -183,16 +241,16 @@ internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
if (withOptimizedMetadataEnabled) {
|
||||
buffer.putShort(1 + 4 + 4)
|
||||
// version
|
||||
buffer.put(1)
|
||||
buffer.putInt(sizes.size)
|
||||
buffer.putInt(optimizedMetadataOffset)
|
||||
buffer.put(2)
|
||||
buffer.putInt(entryCount)
|
||||
buffer.putInt(indexOffset)
|
||||
}
|
||||
else {
|
||||
buffer.putShort(0)
|
||||
}
|
||||
}
|
||||
else {
|
||||
writeZip64End(centralDirectoryLength, centralDirectoryOffset, optimizedMetadataOffset)
|
||||
writeZip64End(centralDirectoryLength, centralDirectoryOffset, indexOffset)
|
||||
}
|
||||
buffer.flip()
|
||||
writeBuffer(buffer)
|
||||
@@ -302,7 +360,31 @@ internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
}
|
||||
}
|
||||
|
||||
private fun writeCentralFileHeader(size: Int, compressedSize: Int, method: Int, crc: Long, name: ByteArray, offset: Long, dataOffset: Int) {
|
||||
private class IndexEntry(@JvmField val key: ByteArray,
|
||||
override val offset: Int,
|
||||
override val size: Int,
|
||||
val keyHash: Long) : IkvIndexEntry {
|
||||
override fun equals(other: Any?) = key.contentEquals((other as? IndexEntry)?.key)
|
||||
|
||||
override fun toString(): String {
|
||||
return "IndexEntryHash(key=${key.toString(Charsets.UTF_8)}, keyHash=$keyHash)"
|
||||
}
|
||||
|
||||
override fun hashCode() = keyHash.toInt()
|
||||
}
|
||||
|
||||
private class IndexEntryHash : UniversalHash<IndexEntry> {
|
||||
override fun universalHash(key: IndexEntry, index: Long) = Xxh3.seededHash(key.key, index)
|
||||
}
|
||||
|
||||
private fun writeCentralFileHeader(size: Int,
|
||||
compressedSize: Int,
|
||||
method: Int,
|
||||
crc: Long,
|
||||
name: ByteArray,
|
||||
offset: Long,
|
||||
dataOffset: Int,
|
||||
normalName: ByteArray = name) {
|
||||
var buffer = metadataBuffer
|
||||
if (buffer.remaining() < (46 + name.size)) {
|
||||
metadataBuffer = ByteBuffer.allocateDirect(buffer.capacity() * 2).order(ByteOrder.LITTLE_ENDIAN)
|
||||
@@ -323,9 +405,7 @@ internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
// uncompressed size
|
||||
buffer.putInt(headerOffset + 24, size)
|
||||
|
||||
sizes.add(size)
|
||||
dataOffsets.add(dataOffset)
|
||||
names.add(name)
|
||||
indexWriter.add(IndexEntry(key = normalName, offset = dataOffset, size = size, keyHash = Xxh3.hash(normalName)))
|
||||
|
||||
// file name length
|
||||
buffer.putShort(headerOffset + 28, (name.size and 0xffff).toShort())
|
||||
@@ -335,32 +415,10 @@ internal class ZipArchiveOutputStream(private val channel: WritableByteChannel,
|
||||
buffer.position(headerOffset + 46)
|
||||
buffer.put(name)
|
||||
}
|
||||
}
|
||||
|
||||
private fun computeTableIndexes(names: List<ByteArray>): IntArray {
|
||||
val indexes = IntArray(names.size)
|
||||
val tableSize = names.size * 2
|
||||
val indexToName = arrayOfNulls<ByteArray>(tableSize)
|
||||
@Suppress("ReplaceManualRangeWithIndicesCalls")
|
||||
for (entryIndex in 0 until names.size) {
|
||||
val name = names.get(entryIndex)
|
||||
val nameHash = Murmur3_32Hash.MURMUR3_32.hashBytes(name, 0, name.size - (if (name.last() == '/'.code.toByte()) 1 else 0))
|
||||
var index = Math.floorMod(nameHash, tableSize)
|
||||
while (true) {
|
||||
val found = indexToName[index]
|
||||
if (found == null) {
|
||||
indexes[entryIndex] = index
|
||||
indexToName[index] = name
|
||||
break
|
||||
}
|
||||
else if (name.contentEquals(found)) {
|
||||
indexes[entryIndex] = index
|
||||
break
|
||||
}
|
||||
else if (++index == tableSize) {
|
||||
index = 0
|
||||
}
|
||||
}
|
||||
fun setPackageIndex(classPackages: LongArray, resourcePackages: LongArray) {
|
||||
assert(this.classPackages == null && this.resourcePackages == null)
|
||||
this.classPackages = classPackages
|
||||
this.resourcePackages = resourcePackages
|
||||
}
|
||||
return indexes
|
||||
}
|
||||
@@ -40,6 +40,10 @@ internal class ZipFileWriter(channel: WritableByteChannel, private val deflater:
|
||||
private val bufferAllocator = ByteBufferAllocator()
|
||||
private val deflateBufferAllocator = if (deflater == null) null else ByteBufferAllocator()
|
||||
|
||||
fun setPackageIndex(classPackages: LongArray, resourcePackages: LongArray) {
|
||||
resultStream.setPackageIndex(classPackages, resourcePackages)
|
||||
}
|
||||
|
||||
@Suppress("DuplicatedCode")
|
||||
fun file(nameString: String, file: Path) {
|
||||
var isCompressed = deflater != null && !nameString.endsWith(".png")
|
||||
@@ -264,7 +268,7 @@ internal class ZipFileWriter(channel: WritableByteChannel, private val deflater:
|
||||
}
|
||||
|
||||
fun dir(name: String) {
|
||||
resultStream.addDirEntry((if (name.endsWith('/')) name else "$name/").toByteArray())
|
||||
resultStream.addDirEntry(name)
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
|
||||
@@ -5,6 +5,7 @@ import java.lang.invoke.MethodHandles
|
||||
import java.lang.invoke.MethodType
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.ByteOrder
|
||||
import java.nio.LongBuffer
|
||||
|
||||
// not thread-safe, intended only for single thread for one time use
|
||||
class ByteBufferAllocator : AutoCloseable {
|
||||
@@ -47,4 +48,10 @@ internal fun unmapBuffer(buffer: ByteBuffer) {
|
||||
|
||||
private fun roundUpInt(x: Int, @Suppress("SameParameterValue") blockSizePowerOf2: Int): Int {
|
||||
return x + blockSizePowerOf2 - 1 and -blockSizePowerOf2
|
||||
}
|
||||
|
||||
internal inline fun useAsLongBuffer(buffer: ByteBuffer, task: (LongBuffer) -> Unit) {
|
||||
val longBuffer = buffer.asLongBuffer()
|
||||
task(longBuffer)
|
||||
buffer.position(buffer.position() + (longBuffer.position() * Long.SIZE_BYTES))
|
||||
}
|
||||
@@ -1,8 +1,7 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.intellij.build.io
|
||||
|
||||
import com.intellij.util.lang.ImmutableZipFile
|
||||
import org.jetbrains.intellij.build.tasks.PACKAGE_INDEX_NAME
|
||||
import com.intellij.util.lang.HashMapZipFile
|
||||
import org.jetbrains.intellij.build.tasks.PackageIndexBuilder
|
||||
import java.nio.file.*
|
||||
import java.util.*
|
||||
@@ -128,9 +127,9 @@ internal fun compressDir(startDir: Path, archiver: ZipArchiver, excludes: List<P
|
||||
internal fun copyZipRaw(sourceFile: Path,
|
||||
packageIndexBuilder: PackageIndexBuilder,
|
||||
zipCreator: ZipFileWriter) {
|
||||
ImmutableZipFile.load(sourceFile).use { sourceZipFile ->
|
||||
HashMapZipFile.load(sourceFile).use { sourceZipFile ->
|
||||
for (entry in sourceZipFile.entries) {
|
||||
if (entry.isDirectory || entry.name == PACKAGE_INDEX_NAME) {
|
||||
if (entry.isDirectory) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.intellij.build.tasks
|
||||
|
||||
import com.intellij.util.io.Murmur3_32Hash
|
||||
import it.unimi.dsi.fastutil.ints.IntOpenHashSet
|
||||
import it.unimi.dsi.fastutil.longs.LongOpenHashSet
|
||||
import org.jetbrains.intellij.build.io.ZipFileWriter
|
||||
import org.jetbrains.xxh3.Xx3UnencodedString
|
||||
|
||||
internal class PackageIndexBuilder {
|
||||
val classPackageHashSet = IntOpenHashSet()
|
||||
val resourcePackageHashSet = IntOpenHashSet()
|
||||
val classPackageHashSet = LongOpenHashSet()
|
||||
val resourcePackageHashSet = LongOpenHashSet()
|
||||
|
||||
private val dirsToCreate = HashSet<String>()
|
||||
|
||||
@@ -18,51 +18,35 @@ internal class PackageIndexBuilder {
|
||||
fun _getDirsToCreate(): Set<String> = dirsToCreate
|
||||
|
||||
fun addFile(name: String) {
|
||||
val i = name.lastIndexOf('/')
|
||||
val packageNameHash = if (i == -1) 0 else Xx3UnencodedString.hashUnencodedStringRange(name, 0, i)
|
||||
if (name.endsWith(".class")) {
|
||||
classPackageHashSet.add(getPackageNameHash(name))
|
||||
classPackageHashSet.add(packageNameHash)
|
||||
}
|
||||
else {
|
||||
resourcePackageHashSet.add(getPackageNameHash(name))
|
||||
resourcePackageHashSet.add(packageNameHash)
|
||||
computeDirsToCreate(name)
|
||||
}
|
||||
}
|
||||
|
||||
fun writeDirs(zipCreator: ZipFileWriter) {
|
||||
if (dirsToCreate.isEmpty()) {
|
||||
return
|
||||
}
|
||||
|
||||
val list = dirsToCreate.toMutableList()
|
||||
list.sort()
|
||||
for (name in list) {
|
||||
// name in our ImmutableZipEntry doesn't have ending slash
|
||||
zipCreator.dir(name)
|
||||
}
|
||||
}
|
||||
|
||||
fun writePackageIndex(zipCreator: ZipFileWriter) {
|
||||
fun writeDirsAndPackageIndex(zipCreator: ZipFileWriter) {
|
||||
assert(!wasWritten)
|
||||
wasWritten = true
|
||||
|
||||
// name in our ImmutableZipEntry doesn't have ending slash
|
||||
dirsToCreate.sorted().forEach(zipCreator::dir)
|
||||
|
||||
if (!resourcePackageHashSet.isEmpty()) {
|
||||
// add empty package if top-level directory will be requested
|
||||
resourcePackageHashSet.add(0)
|
||||
}
|
||||
|
||||
zipCreator.uncompressedData(PACKAGE_INDEX_NAME,
|
||||
(2 * Int.SIZE_BYTES) + ((classPackageHashSet.size + resourcePackageHashSet.size) * Int.SIZE_BYTES)) {
|
||||
val classPackages = classPackageHashSet.toIntArray()
|
||||
val resourcePackages = resourcePackageHashSet.toIntArray()
|
||||
// same content for same data
|
||||
classPackages.sort()
|
||||
resourcePackages.sort()
|
||||
it.putInt(classPackages.size)
|
||||
it.putInt(resourcePackages.size)
|
||||
val intBuffer = it.asIntBuffer()
|
||||
intBuffer.put(classPackages)
|
||||
intBuffer.put(resourcePackages)
|
||||
it.position(it.position() + (intBuffer.position() * Int.SIZE_BYTES))
|
||||
}
|
||||
val classPackages = classPackageHashSet.toLongArray()
|
||||
val resourcePackages = resourcePackageHashSet.toLongArray()
|
||||
// same content for same data
|
||||
classPackages.sort()
|
||||
resourcePackages.sort()
|
||||
zipCreator.setPackageIndex(classPackages, resourcePackages)
|
||||
}
|
||||
|
||||
// leave only directories where some non-class files are located (as it can be requested in runtime, e.g. stubs, fileTemplates)
|
||||
@@ -78,7 +62,7 @@ internal class PackageIndexBuilder {
|
||||
|
||||
var dirName = name.substring(0, slashIndex)
|
||||
while (dirsToCreate.add(dirName)) {
|
||||
resourcePackageHashSet.add(Murmur3_32Hash.MURMUR3_32.hashString(dirName, 0, dirName.length))
|
||||
resourcePackageHashSet.add(Xx3UnencodedString.hashUnencodedString(dirName))
|
||||
|
||||
slashIndex = dirName.lastIndexOf('/')
|
||||
if (slashIndex == -1) {
|
||||
@@ -88,12 +72,4 @@ internal class PackageIndexBuilder {
|
||||
dirName = name.substring(0, slashIndex)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun getPackageNameHash(name: String): Int {
|
||||
val i = name.lastIndexOf('/')
|
||||
if (i == -1) {
|
||||
return 0
|
||||
}
|
||||
return Murmur3_32Hash.MURMUR3_32.hashString(name, 0, i)
|
||||
}
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
package org.jetbrains.intellij.build.tasks
|
||||
|
||||
import com.intellij.util.lang.HashMapZipFile
|
||||
import org.apache.commons.compress.archivers.zip.Zip64Mode
|
||||
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry
|
||||
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream
|
||||
@@ -157,6 +158,16 @@ fun crossPlatformZip(macDistDir: Path,
|
||||
}
|
||||
}
|
||||
|
||||
fun consumeDataByPrefix(file: Path, prefixWithEndingSlash: String, consumer: BiConsumer<String, ByteArray>) {
|
||||
HashMapZipFile.load(file).use { zip ->
|
||||
for (entry in zip.entries) {
|
||||
if (entry.name.startsWith(prefixWithEndingSlash)) {
|
||||
consumer.accept(entry.name, entry.getData(zip))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
typealias EntryCustomizer = (entry: ZipArchiveEntry, file: Path, relativeFile: Path) -> Unit
|
||||
|
||||
private val fsUnixMode: EntryCustomizer = { entry, file, relativeFile ->
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
@file:Suppress("ReplaceJavaStaticMethodWithKotlinAnalog")
|
||||
package org.jetbrains.intellij.build.tasks
|
||||
|
||||
import com.intellij.util.lang.HashMapZipFile
|
||||
import com.intellij.util.lang.ImmutableZipEntry
|
||||
import com.intellij.util.lang.ImmutableZipFile
|
||||
import io.opentelemetry.api.common.AttributeKey
|
||||
import io.opentelemetry.context.Context
|
||||
import org.jetbrains.intellij.build.io.ZipArchiver
|
||||
@@ -160,7 +160,7 @@ fun buildJar(targetFile: Path, sources: List<Source>, dryRun: Boolean = false) {
|
||||
}
|
||||
else -> {
|
||||
val file = (source as ZipSource).file
|
||||
ImmutableZipFile.load(file).use { zipFile ->
|
||||
HashMapZipFile.load(file).use { zipFile ->
|
||||
val entries = getFilteredEntries(targetFile, file, zipFile, uniqueNames, includeManifest = sources.size == 1,
|
||||
forbidNativeFiles)
|
||||
writeEntries(entries.iterator(), zipCreator, zipFile, packageIndexBuilder)
|
||||
@@ -170,15 +170,45 @@ fun buildJar(targetFile: Path, sources: List<Source>, dryRun: Boolean = false) {
|
||||
|
||||
source.sizeConsumer?.accept((outChannel.position() - positionBefore).toInt())
|
||||
}
|
||||
packageIndexBuilder.writeDirs(zipCreator)
|
||||
packageIndexBuilder.writePackageIndex(zipCreator)
|
||||
packageIndexBuilder.writeDirsAndPackageIndex(zipCreator)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun getIgnoredNames(): Set<String> {
|
||||
val set = HashSet<String>()
|
||||
// compilation cache on TC
|
||||
set.add(".hash")
|
||||
set.add("pom.xml")
|
||||
set.add("about.html")
|
||||
set.add("module-info.class")
|
||||
set.add("META-INF/services/javax.xml.parsers.SAXParserFactory")
|
||||
set.add("META-INF/services/javax.xml.stream.XMLEventFactory")
|
||||
set.add("META-INF/services/javax.xml.parsers.DocumentBuilderFactory")
|
||||
set.add("META-INF/services/javax.xml.datatype.DatatypeFactory")
|
||||
set.add("native-image")
|
||||
set.add("native")
|
||||
set.add("licenses")
|
||||
@Suppress("SpellCheckingInspection")
|
||||
set.add(".gitkeep")
|
||||
for (originalName in listOf("NOTICE", "README", "LICENSE", "DEPENDENCIES", "CHANGES", "THIRD_PARTY_LICENSES", "COPYING")) {
|
||||
for (name in listOf(originalName, originalName.lowercase())) {
|
||||
set.add(name)
|
||||
set.add("$name.txt")
|
||||
set.add("$name.md")
|
||||
set.add("META-INF/$name")
|
||||
set.add("META-INF/$name.txt")
|
||||
set.add("META-INF/$name.md")
|
||||
}
|
||||
}
|
||||
return set
|
||||
}
|
||||
|
||||
private val ignoredNames = java.util.Set.copyOf(getIgnoredNames())
|
||||
|
||||
private fun getFilteredEntries(targetFile: Path,
|
||||
sourceFile: Path,
|
||||
zipFile: ImmutableZipFile,
|
||||
zipFile: HashMapZipFile,
|
||||
uniqueNames: MutableSet<String>,
|
||||
includeManifest: Boolean,
|
||||
forbidNativeFiles: Boolean): Sequence<ImmutableZipEntry> {
|
||||
@@ -190,39 +220,14 @@ private fun getFilteredEntries(targetFile: Path,
|
||||
throw IllegalStateException("Library with native files must be packed separately (sourceFile=$sourceFile, targetFile=$targetFile, fileName=${it.name})")
|
||||
}
|
||||
|
||||
@Suppress("SpellCheckingInspection")
|
||||
!ignoredNames.contains(name) &&
|
||||
uniqueNames.add(name) &&
|
||||
!name.endsWith(".kotlin_metadata") &&
|
||||
(includeManifest || name != "META-INF/MANIFEST.MF") &&
|
||||
name != PACKAGE_INDEX_NAME &&
|
||||
name != "license" && !name.startsWith("license/") &&
|
||||
name != "META-INF/services/javax.xml.parsers.SAXParserFactory" &&
|
||||
name != "META-INF/services/javax.xml.stream.XMLEventFactory" &&
|
||||
name != "META-INF/services/javax.xml.parsers.DocumentBuilderFactory" &&
|
||||
name != "META-INF/services/javax.xml.datatype.DatatypeFactory" &&
|
||||
name != "native-image" && !name.startsWith("native-image/") &&
|
||||
name != "native" && !name.startsWith("native/") &&
|
||||
name != "licenses" && !name.startsWith("licenses/") &&
|
||||
name != ".gitkeep" &&
|
||||
name != "META-INF/CHANGES" &&
|
||||
name != "META-INF/DEPENDENCIES" &&
|
||||
name != "META-INF/LICENSE" &&
|
||||
name != "META-INF/LICENSE.txt" &&
|
||||
name != "META-INF/README.txt" &&
|
||||
name != "META-INF/README.md" &&
|
||||
name != "META-INF/NOTICE" &&
|
||||
name != "META-INF/NOTICE.txt" &&
|
||||
name != "LICENSE" &&
|
||||
name != "LICENSE.md" &&
|
||||
name != "module-info.class" &&
|
||||
name != "license.txt" &&
|
||||
name != "LICENSE.txt" &&
|
||||
name != "COPYING.txt" &&
|
||||
name != "about.html" &&
|
||||
name != "pom.xml" &&
|
||||
name != "THIRD_PARTY_LICENSES.txt" &&
|
||||
name != "NOTICE.txt" &&
|
||||
name != "NOTICE.md" &&
|
||||
!name.startsWith("license/") &&
|
||||
!name.startsWith("native-image/") &&
|
||||
!name.startsWith("native/") &&
|
||||
!name.startsWith("licenses/") &&
|
||||
(requiresMavenFiles(targetFileName) || (name != "META-INF/maven" && !name.startsWith("META-INF/maven/"))) &&
|
||||
!name.startsWith("META-INF/INDEX.LIST") &&
|
||||
(!name.startsWith("META-INF/") || (!name.endsWith(".DSA") && !name.endsWith(".SF") && !name.endsWith(".RSA")))
|
||||
@@ -258,6 +263,8 @@ private val commonModuleExcludes = java.util.List.of(
|
||||
FileSystems.getDefault().getPathMatcher("glob:**/icon-robots.txt"),
|
||||
FileSystems.getDefault().getPathMatcher("glob:icon-robots.txt"),
|
||||
FileSystems.getDefault().getPathMatcher("glob:.unmodified"),
|
||||
// compilation cache on TC
|
||||
FileSystems.getDefault().getPathMatcher("glob:.hash"),
|
||||
FileSystems.getDefault().getPathMatcher("glob:classpath.index"),
|
||||
)
|
||||
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
|
||||
package org.jetbrains.intellij.build.tasks
|
||||
|
||||
import com.intellij.util.lang.HashMapZipFile
|
||||
import com.intellij.util.lang.ImmutableZipEntry
|
||||
import com.intellij.util.lang.ImmutableZipFile
|
||||
import io.opentelemetry.api.common.AttributeKey
|
||||
import io.opentelemetry.context.Context
|
||||
import it.unimi.dsi.fastutil.ints.IntSet
|
||||
import it.unimi.dsi.fastutil.longs.LongSet
|
||||
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap
|
||||
import org.jetbrains.intellij.build.io.ZipFileWriter
|
||||
import org.jetbrains.intellij.build.io.copyZipRaw
|
||||
@@ -19,8 +19,6 @@ import java.nio.file.Path
|
||||
import java.nio.file.StandardCopyOption
|
||||
import java.util.*
|
||||
|
||||
internal const val PACKAGE_INDEX_NAME = "__packageIndex__"
|
||||
|
||||
@Suppress("ReplaceJavaStaticMethodWithKotlinAnalog")
|
||||
private val excludedLibJars = java.util.Set.of("testFramework.core.jar", "testFramework.jar", "testFramework-java.jar")
|
||||
|
||||
@@ -87,8 +85,7 @@ fun generateClasspath(homeDir: Path, mainJarName: String, antLibDir: Path?): Lis
|
||||
Files.delete(productJar)
|
||||
}
|
||||
|
||||
packageIndexBuilder.writeDirs(zipCreator)
|
||||
packageIndexBuilder.writePackageIndex(zipCreator)
|
||||
packageIndexBuilder.writeDirsAndPackageIndex(zipCreator)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -157,7 +154,7 @@ internal fun readClassLoadingLog(classLoadingLog: InputStream, rootDir: Path, ma
|
||||
return sourceToNames
|
||||
}
|
||||
|
||||
data class PackageIndexEntry(val path: Path, val classPackageIndex: IntSet, val resourcePackageIndex: IntSet)
|
||||
data class PackageIndexEntry(val path: Path, val classPackageIndex: LongSet, val resourcePackageIndex: LongSet)
|
||||
|
||||
fun reorderJar(jarFile: Path, orderedNames: List<String>, resultJarFile: Path): PackageIndexEntry {
|
||||
val orderedNameToIndex = Object2IntOpenHashMap<String>(orderedNames.size)
|
||||
@@ -170,16 +167,9 @@ fun reorderJar(jarFile: Path, orderedNames: List<String>, resultJarFile: Path):
|
||||
|
||||
val packageIndexBuilder = PackageIndexBuilder()
|
||||
|
||||
ImmutableZipFile.load(jarFile).use { zipFile ->
|
||||
HashMapZipFile.load(jarFile).use { zipFile ->
|
||||
// ignore existing package index on reorder - a new one will be computed even if it is the same, do not optimize for simplicity
|
||||
val entries = zipFile.entries.toMutableList()
|
||||
// package index in the end
|
||||
for (i in (entries.size - 1) downTo 0) {
|
||||
if (entries.get(i).name == PACKAGE_INDEX_NAME) {
|
||||
entries.removeAt(i)
|
||||
break
|
||||
}
|
||||
}
|
||||
entries.sortWith(Comparator { o1, o2 ->
|
||||
val o2p = o2.name
|
||||
if ("META-INF/plugin.xml" == o2p) {
|
||||
@@ -204,8 +194,7 @@ fun reorderJar(jarFile: Path, orderedNames: List<String>, resultJarFile: Path):
|
||||
|
||||
writeNewZip(tempJarFile) { zipCreator ->
|
||||
writeEntries(entries.iterator(), zipCreator, zipFile, packageIndexBuilder)
|
||||
packageIndexBuilder.writeDirs(zipCreator)
|
||||
packageIndexBuilder.writePackageIndex(zipCreator)
|
||||
packageIndexBuilder.writeDirsAndPackageIndex(zipCreator)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,7 +213,7 @@ fun reorderJar(jarFile: Path, orderedNames: List<String>, resultJarFile: Path):
|
||||
|
||||
internal fun writeEntries(entries: Iterator<ImmutableZipEntry>,
|
||||
zipCreator: ZipFileWriter,
|
||||
sourceZipFile: ImmutableZipFile,
|
||||
sourceZipFile: HashMapZipFile,
|
||||
packageIndexBuilder: PackageIndexBuilder?) {
|
||||
for (entry in entries) {
|
||||
if (entry.isDirectory) {
|
||||
@@ -239,7 +228,7 @@ internal fun writeEntries(entries: Iterator<ImmutableZipEntry>,
|
||||
zipCreator.uncompressedData(name, data)
|
||||
}
|
||||
finally {
|
||||
entry.releaseBuffer(data)
|
||||
sourceZipFile.releaseBuffer(data)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
package org.jetbrains.intellij.build.tasks
|
||||
|
||||
import com.intellij.util.lang.ImmutableZipFile
|
||||
import com.intellij.util.lang.HashMapZipFile
|
||||
import org.jetbrains.intellij.build.io.*
|
||||
import org.jetbrains.org.objectweb.asm.ClassReader
|
||||
import org.jetbrains.org.objectweb.asm.ClassVisitor
|
||||
@@ -104,8 +104,7 @@ private fun updatePackageIndex(sourceFile: Path, targetFile: Path) {
|
||||
writeNewZip(targetFile) { zipCreator ->
|
||||
val packageIndexBuilder = PackageIndexBuilder()
|
||||
copyZipRaw(sourceFile, packageIndexBuilder, zipCreator)
|
||||
packageIndexBuilder.writeDirs(zipCreator)
|
||||
packageIndexBuilder.writePackageIndex(zipCreator)
|
||||
packageIndexBuilder.writeDirsAndPackageIndex(zipCreator)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -113,7 +112,7 @@ private fun updatePackageIndex(sourceFile: Path, targetFile: Path) {
|
||||
// so we check validity of the produced class files here
|
||||
private fun checkClassFilesValidity(jarFile: Path) {
|
||||
tracer.spanBuilder("check class files validity").setAttribute("file", jarFile.toString()).startSpan().use {
|
||||
ImmutableZipFile.load(jarFile).use { file ->
|
||||
HashMapZipFile.load(jarFile).use { file ->
|
||||
for (entry in file.entries) {
|
||||
if (!entry.isDirectory && entry.name.endsWith(".class")) {
|
||||
entry.getInputStream(file).use {
|
||||
|
||||
@@ -4,7 +4,9 @@ package org.jetbrains.intellij.build.io
|
||||
import com.intellij.openapi.util.SystemInfoRt
|
||||
import com.intellij.testFramework.rules.InMemoryFsExtension
|
||||
import com.intellij.util.io.write
|
||||
import com.intellij.util.lang.HashMapZipFile
|
||||
import com.intellij.util.lang.ImmutableZipFile
|
||||
import com.intellij.util.lang.ZipFile
|
||||
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.assertj.core.configuration.ConfigurationProvider
|
||||
@@ -15,6 +17,8 @@ import org.junit.jupiter.api.Assumptions
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.extension.RegisterExtension
|
||||
import org.junit.jupiter.api.io.TempDir
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.ByteOrder
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.util.concurrent.ForkJoinTask
|
||||
@@ -27,31 +31,65 @@ class ZipTest {
|
||||
// not used in every test because we want to check the real FS behaviour
|
||||
val fs = InMemoryFsExtension()
|
||||
|
||||
private val secret = byteArrayOf(0xb8.toByte(), 0xfe.toByte(), 0x6c.toByte(), 0x39.toByte(), 0x23.toByte(), 0xa4.toByte(), 0x4b.toByte(),
|
||||
0xbe.toByte(), 0x7c.toByte(), 0x01.toByte(), 0x81.toByte(), 0x2c.toByte(), 0xf7.toByte(), 0x21.toByte(),
|
||||
0xad.toByte(), 0x1c.toByte(), 0xde.toByte(), 0xd4.toByte(), 0x6d.toByte(), 0xe9.toByte(), 0x83.toByte(),
|
||||
0x90.toByte(), 0x97.toByte(), 0xdb.toByte(), 0x72.toByte(), 0x40.toByte(), 0xa4.toByte(), 0xa4.toByte(),
|
||||
0xb7.toByte(), 0xb3.toByte(), 0x67.toByte(), 0x1f.toByte(), 0xcb.toByte(), 0x79.toByte(), 0xe6.toByte(),
|
||||
0x4e.toByte(), 0xcc.toByte(), 0xc0.toByte(), 0xe5.toByte(), 0x78.toByte(), 0x82.toByte(), 0x5a.toByte(),
|
||||
0xd0.toByte(), 0x7d.toByte(), 0xcc.toByte(), 0xff.toByte(), 0x72.toByte(), 0x21.toByte(), 0xb8.toByte(),
|
||||
0x08.toByte(), 0x46.toByte(), 0x74.toByte(), 0xf7.toByte(), 0x43.toByte(), 0x24.toByte(), 0x8e.toByte(),
|
||||
0xe0.toByte(), 0x35.toByte(), 0x90.toByte(), 0xe6.toByte(), 0x81.toByte(), 0x3a.toByte(), 0x26.toByte(),
|
||||
0x4c.toByte(), 0x3c.toByte(), 0x28.toByte(), 0x52.toByte(), 0xbb.toByte(), 0x91.toByte(), 0xc3.toByte(),
|
||||
0x00.toByte(), 0xcb.toByte(), 0x88.toByte(), 0xd0.toByte(), 0x65.toByte(), 0x8b.toByte(), 0x1b.toByte(),
|
||||
0x53.toByte(), 0x2e.toByte(), 0xa3.toByte(), 0x71.toByte(), 0x64.toByte(), 0x48.toByte(), 0x97.toByte(),
|
||||
0xa2.toByte(), 0x0d.toByte(), 0xf9.toByte(), 0x4e.toByte(), 0x38.toByte(), 0x19.toByte(), 0xef.toByte(),
|
||||
0x46.toByte(), 0xa9.toByte(), 0xde.toByte(), 0xac.toByte(), 0xd8.toByte(), 0xa8.toByte(), 0xfa.toByte(),
|
||||
0x76.toByte(), 0x3f.toByte(), 0xe3.toByte(), 0x9c.toByte(), 0x34.toByte(), 0x3f.toByte(), 0xf9.toByte(),
|
||||
0xdc.toByte(), 0xbb.toByte(), 0xc7.toByte(), 0xc7.toByte(), 0x0b.toByte(), 0x4f.toByte(), 0x1d.toByte(),
|
||||
0x8a.toByte(), 0x51.toByte(), 0xe0.toByte(), 0x4b.toByte(), 0xcd.toByte(), 0xb4.toByte(), 0x59.toByte(),
|
||||
0x31.toByte(), 0xc8.toByte(), 0x9f.toByte(), 0x7e.toByte(), 0xc9.toByte(), 0xd9.toByte(), 0x78.toByte(),
|
||||
0x73.toByte(), 0x64.toByte(), 0xea.toByte(), 0xc5.toByte(), 0xac.toByte(), 0x83.toByte(), 0x34.toByte(),
|
||||
0xd3.toByte(), 0xeb.toByte(), 0xc3.toByte(), 0xc5.toByte(), 0x81.toByte(), 0xa0.toByte(), 0xff.toByte(),
|
||||
0xfa.toByte(), 0x13.toByte(), 0x63.toByte(), 0xeb.toByte(), 0x17.toByte(), 0x0d.toByte(), 0xdd.toByte(),
|
||||
0x51.toByte(), 0xb7.toByte(), 0xf0.toByte(), 0xda.toByte(), 0x49.toByte(), 0xd3.toByte(), 0x16.toByte(),
|
||||
0x55.toByte(), 0x26.toByte(), 0x29.toByte(), 0xd4.toByte(), 0x68.toByte(), 0x9e.toByte(), 0x2b.toByte(),
|
||||
0x16.toByte(), 0xbe.toByte(), 0x58.toByte(), 0x7d.toByte(), 0x47.toByte(), 0xa1.toByte(), 0xfc.toByte(),
|
||||
0x8f.toByte(), 0xf8.toByte(), 0xb8.toByte(), 0xd1.toByte(), 0x7a.toByte(), 0xd0.toByte(), 0x31.toByte(),
|
||||
0xce.toByte(), 0x45.toByte(), 0xcb.toByte(), 0x3a.toByte(), 0x8f.toByte(), 0x95.toByte(), 0x16.toByte(),
|
||||
0x04.toByte(), 0x28.toByte(), 0xaf.toByte(), 0xd7.toByte(), 0xfb.toByte(), 0xca.toByte(), 0xbb.toByte(),
|
||||
0x4b.toByte(), 0x40.toByte(), 0x7e.toByte())
|
||||
|
||||
@Test
|
||||
fun `interrupt thread`(@TempDir tempDir: Path) {
|
||||
val buffer = ByteBuffer.wrap(secret).order(ByteOrder.LITTLE_ENDIAN).asIntBuffer()
|
||||
val a = IntArray(buffer.limit())
|
||||
buffer.get(12)
|
||||
|
||||
val (list, archiveFile) = createLargeArchive(128, tempDir)
|
||||
val zipFile = ImmutableZipFile.load(archiveFile)
|
||||
val tasks = mutableListOf<ForkJoinTask<*>>()
|
||||
// force init of AssertJ to avoid ClosedByInterruptException on reading FileLoader index
|
||||
ConfigurationProvider.CONFIGURATION_PROVIDER
|
||||
for (i in 0..100) {
|
||||
tasks.add(ForkJoinTask.adapt(Runnable {
|
||||
val ioThread = runInThread {
|
||||
while (!Thread.currentThread().isInterrupted()) {
|
||||
for (name in list) {
|
||||
assertThat(zipFile.getEntry(name)).isNotNull()
|
||||
checkZip(archiveFile) { zipFile ->
|
||||
val tasks = mutableListOf<ForkJoinTask<*>>()
|
||||
// force init of AssertJ to avoid ClosedByInterruptException on reading FileLoader index
|
||||
ConfigurationProvider.CONFIGURATION_PROVIDER
|
||||
for (i in 0..100) {
|
||||
tasks.add(ForkJoinTask.adapt(Runnable {
|
||||
val ioThread = runInThread {
|
||||
while (!Thread.currentThread().isInterrupted()) {
|
||||
for (name in list) {
|
||||
assertThat(zipFile.getResource(name)).isNotNull()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// once in a while, the IO thread is stopped
|
||||
Thread.sleep(50)
|
||||
ioThread.interrupt()
|
||||
Thread.sleep(10)
|
||||
ioThread.join()
|
||||
}))
|
||||
// once in a while, the IO thread is stopped
|
||||
Thread.sleep(50)
|
||||
ioThread.interrupt()
|
||||
Thread.sleep(10)
|
||||
ioThread.join()
|
||||
}))
|
||||
}
|
||||
ForkJoinTask.invokeAll(tasks)
|
||||
}
|
||||
ForkJoinTask.invokeAll(tasks)
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -59,13 +97,10 @@ class ZipTest {
|
||||
Assumptions.assumeTrue(SystemInfoRt.isUnix)
|
||||
|
||||
val (list, archiveFile) = createLargeArchive(Short.MAX_VALUE * 2 + 20, fs.root)
|
||||
ImmutableZipFile.load(archiveFile).use { zipFile ->
|
||||
zipFile.getEntry("qweqw")
|
||||
|
||||
checkZip(archiveFile) { zipFile ->
|
||||
for (name in list) {
|
||||
assertThat(zipFile.getEntry(name)).isNotNull()
|
||||
assertThat(zipFile.getResource(name)).isNotNull()
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -102,9 +137,9 @@ class ZipTest {
|
||||
val archiveFile = tempDir.resolve("archive.zip")
|
||||
zip(archiveFile, mapOf(dir to "test"), compress = false)
|
||||
|
||||
ImmutableZipFile.load(archiveFile).use { zipFile ->
|
||||
checkZip(archiveFile) { zipFile ->
|
||||
for (name in list) {
|
||||
assertThat(zipFile.getEntry("test/$name")).isNotNull()
|
||||
assertThat(zipFile.getResource("test/$name")).isNotNull()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -137,14 +172,32 @@ class ZipTest {
|
||||
fs.getPathMatcher("glob:**/icon-robots.txt"),
|
||||
))))
|
||||
|
||||
ImmutableZipFile.load(archiveFile).use { zipFile ->
|
||||
for (name in list) {
|
||||
assertThat(zipFile.getEntry("test/$name")).isNull()
|
||||
checkZip(archiveFile) { zipFile ->
|
||||
if (zipFile is ImmutableZipFile) {
|
||||
assertThat(zipFile.getOrComputeNames()).containsExactly(
|
||||
"entry-item663137163-10",
|
||||
"entry-item972016666-0",
|
||||
"entry-item1791766502-3",
|
||||
"entry-item1705343313-9",
|
||||
"entry-item-942605861-5",
|
||||
"entry-item1578011503-7",
|
||||
"entry-item949746295-2",
|
||||
"entry-item-245744780-1",
|
||||
"do-not-ignore-me",
|
||||
"icon-robots.txt",
|
||||
"entry-item-2145949183-8",
|
||||
"entry-item-1326272896-6",
|
||||
"entry-item828400960-4"
|
||||
)
|
||||
}
|
||||
assertThat(zipFile.getEntry("do-not-ignore-me")).isNotNull()
|
||||
assertThat(zipFile.getEntry("test-relative-ignore")).isNull()
|
||||
assertThat(zipFile.getEntry("some/nested/dir/icon-robots.txt")).isNull()
|
||||
assertThat(zipFile.getEntry("unknown")).isNull()
|
||||
|
||||
for (name in list) {
|
||||
assertThat(zipFile.getResource("test/$name")).isNull()
|
||||
}
|
||||
assertThat(zipFile.getResource("do-not-ignore-me")).isNotNull()
|
||||
assertThat(zipFile.getResource("test-relative-ignore")).isNull()
|
||||
assertThat(zipFile.getResource("some/nested/dir/icon-robots.txt")).isNull()
|
||||
assertThat(zipFile.getResource("unknown")).isNull()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,9 +211,9 @@ class ZipTest {
|
||||
val archiveFile = tempDir.resolve("archive.zip")
|
||||
zip(archiveFile, mapOf(dir to ""), compress = true)
|
||||
|
||||
ImmutableZipFile.load(archiveFile).use { zipFile ->
|
||||
HashMapZipFile.load(archiveFile).use { zipFile ->
|
||||
for (name in zipFile.entries) {
|
||||
val entry = zipFile.getEntry("samples/nested_dir/__init__.py")
|
||||
val entry = zipFile.getRawEntry("samples/nested_dir/__init__.py")
|
||||
assertThat(entry).isNotNull()
|
||||
assertThat(entry!!.isCompressed()).isFalse()
|
||||
assertThat(String(entry.getData(zipFile), Charsets.UTF_8)).isEqualTo("\n")
|
||||
@@ -197,12 +250,10 @@ class ZipTest {
|
||||
val archiveFile = tempDir.resolve("archive.zip")
|
||||
zip(archiveFile, mapOf(dir to ""), compress = true)
|
||||
|
||||
ImmutableZipFile.load(archiveFile).use { zipFile ->
|
||||
for (name in zipFile.entries) {
|
||||
val entry = zipFile.getEntry("file")
|
||||
assertThat(entry).isNotNull()
|
||||
assertThat(entry!!.isCompressed()).isTrue()
|
||||
}
|
||||
HashMapZipFile.load(archiveFile).use { zipFile ->
|
||||
val entry = zipFile.getRawEntry("file")
|
||||
assertThat(entry).isNotNull()
|
||||
assertThat(entry!!.isCompressed()).isTrue()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,11 +269,9 @@ class ZipTest {
|
||||
val archiveFile = tempDir.resolve("archive.zip")
|
||||
zip(archiveFile, mapOf(dir to ""), compress = false)
|
||||
|
||||
ImmutableZipFile.load(archiveFile).use { zipFile ->
|
||||
for (name in zipFile.entries) {
|
||||
val entry = zipFile.getEntry("largeFile1")
|
||||
assertThat(entry).isNotNull()
|
||||
}
|
||||
checkZip(archiveFile) { zipFile ->
|
||||
val entry = zipFile.getResource("largeFile1")
|
||||
assertThat(entry).isNotNull()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -240,11 +289,9 @@ class ZipTest {
|
||||
val archiveFile = tempDir.resolve("archive.zip")
|
||||
zip(archiveFile, mapOf(dir to ""), compress = true)
|
||||
|
||||
ImmutableZipFile.load(archiveFile).use { zipFile ->
|
||||
for (name in zipFile.entries) {
|
||||
val entry = zipFile.getEntry("largeFile1")
|
||||
assertThat(entry).isNotNull()
|
||||
}
|
||||
checkZip(archiveFile) { zipFile ->
|
||||
val entry = zipFile.getResource("largeFile1")
|
||||
assertThat(entry).isNotNull()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -262,11 +309,19 @@ class ZipTest {
|
||||
val archiveFile = tempDir.resolve("archive.zip")
|
||||
zip(archiveFile, mapOf(dir to ""), compress = true)
|
||||
|
||||
ImmutableZipFile.load(archiveFile).use { zipFile ->
|
||||
for (name in zipFile.entries) {
|
||||
val entry = zipFile.getEntry("largeFile1")
|
||||
assertThat(entry).isNotNull()
|
||||
}
|
||||
checkZip(archiveFile) { zipFile ->
|
||||
val entry = zipFile.getResource("largeFile1")
|
||||
assertThat(entry).isNotNull()
|
||||
}
|
||||
}
|
||||
|
||||
// check both IKV- and non-IKV varians of immutable zip file
|
||||
private fun checkZip(file: Path, checker: (ZipFile) -> Unit) {
|
||||
HashMapZipFile.load(file).use { zipFile ->
|
||||
checker(zipFile)
|
||||
}
|
||||
ImmutableZipFile.load(file).use { zipFile ->
|
||||
checker(zipFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,8 @@ package org.jetbrains.intellij.build.tasks
|
||||
|
||||
import com.intellij.testFramework.PlatformTestUtil
|
||||
import com.intellij.testFramework.rules.InMemoryFsExtension
|
||||
import com.intellij.util.io.Murmur3_32Hash
|
||||
import com.intellij.util.io.inputStream
|
||||
import com.intellij.util.lang.ImmutableZipFile
|
||||
import io.opentelemetry.api.common.AttributeKey
|
||||
import io.opentelemetry.api.common.Attributes
|
||||
import io.opentelemetry.api.trace.Span
|
||||
@@ -14,6 +14,7 @@ import org.assertj.core.api.Assertions.assertThat
|
||||
import org.jetbrains.intellij.build.io.RW_CREATE_NEW
|
||||
import org.jetbrains.intellij.build.io.ZipFileWriter
|
||||
import org.jetbrains.intellij.build.io.zip
|
||||
import org.jetbrains.xxh3.Xx3UnencodedString
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.extension.RegisterExtension
|
||||
import org.junit.jupiter.api.io.TempDir
|
||||
@@ -24,8 +25,6 @@ import java.util.concurrent.ForkJoinTask
|
||||
import java.util.zip.ZipEntry
|
||||
import kotlin.random.Random
|
||||
|
||||
// XxHash3.hashUnencodedChars32 cannot be used because ClasspathCache located in the module where only JDK 8 is supported.
|
||||
|
||||
private val testDataPath: Path
|
||||
get() = Path.of(PlatformTestUtil.getPlatformTestDataPath(), "plugins/reorderJars")
|
||||
|
||||
@@ -43,10 +42,10 @@ class ReorderJarsTest {
|
||||
val file = fs.root.resolve("f")
|
||||
Files.createDirectories(file.parent)
|
||||
FileChannel.open(file, RW_CREATE_NEW).use {
|
||||
packageIndexBuilder.writePackageIndex(ZipFileWriter(it, deflater = null))
|
||||
packageIndexBuilder.writeDirsAndPackageIndex(ZipFileWriter(it, deflater = null))
|
||||
}
|
||||
assertThat(packageIndexBuilder.resourcePackageHashSet)
|
||||
.containsExactlyInAnyOrder(0, Murmur3_32Hash.MURMUR3_32.hashString("tsMeteorStubs", 0, "tsMeteorStubs".length))
|
||||
.containsExactlyInAnyOrder(0, Xx3UnencodedString.hashUnencodedString("tsMeteorStubs"))
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -69,7 +68,6 @@ class ReorderJarsTest {
|
||||
doReorderJars(mapOf(archiveFile to emptyList()), archiveFile.parent, archiveFile.parent)
|
||||
ZipFile(Files.newByteChannel(archiveFile)).use { zipFile ->
|
||||
assertThat(zipFile.entriesInPhysicalOrder.asSequence().map { it.name }.sorted().joinToString(separator = "\n")).isEqualTo("""
|
||||
__packageIndex__
|
||||
anotherDir/
|
||||
anotherDir/resource2.txt
|
||||
dir2/
|
||||
@@ -77,6 +75,10 @@ class ReorderJarsTest {
|
||||
dir2/dir3/resource.txt
|
||||
""".trimIndent())
|
||||
}
|
||||
|
||||
ImmutableZipFile.load(archiveFile).use { zipFile ->
|
||||
assertThat(zipFile.getResource("anotherDir")).isNotNull()
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -118,7 +120,6 @@ class ReorderJarsTest {
|
||||
assertThat(file.name).isEqualTo("zkm.jar")
|
||||
ZipFile(file).use { zipFile ->
|
||||
val entries: List<ZipEntry> = zipFile.entries.toList()
|
||||
assertThat(entries.last().name).isEqualTo(PACKAGE_INDEX_NAME)
|
||||
assertThat(entries.first().name).isEqualTo("META-INF/plugin.xml")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -340,11 +340,13 @@ public final class JavaSdkImpl extends JavaSdk {
|
||||
static VirtualFile internalJdkAnnotationsPath(@NotNull List<? super String> pathsChecked, boolean refresh) {
|
||||
Path javaPluginClassesRootPath = PathManager.getJarForClass(JavaSdkImpl.class);
|
||||
LOG.assertTrue(javaPluginClassesRootPath != null);
|
||||
javaPluginClassesRootPath = javaPluginClassesRootPath.toAbsolutePath();
|
||||
VirtualFile root;
|
||||
VirtualFileManager vfm = VirtualFileManager.getInstance();
|
||||
LocalFileSystem lfs = LocalFileSystem.getInstance();
|
||||
String pathInResources = "resources/jdkAnnotations.jar";
|
||||
if (Files.isRegularFile(javaPluginClassesRootPath)) {
|
||||
Path annotationsJarPath = javaPluginClassesRootPath.resolveSibling("jdkAnnotations.jar").toAbsolutePath();
|
||||
Path annotationsJarPath = javaPluginClassesRootPath.resolveSibling(pathInResources);
|
||||
String annotationsJarPathString = FileUtil.toSystemIndependentName(annotationsJarPath.toString());
|
||||
String url = "jar://" + annotationsJarPathString + "!/";
|
||||
root = refresh ? vfm.refreshAndFindFileByUrl(url) : vfm.findFileByUrl(url);
|
||||
@@ -364,7 +366,7 @@ public final class JavaSdkImpl extends JavaSdk {
|
||||
}
|
||||
}
|
||||
if (root == null) {
|
||||
String url = "jar://" + FileUtil.toSystemIndependentName(PathManager.getHomePath()) + "/lib/jdkAnnotations.jar!/";
|
||||
String url = "jar://" + FileUtil.toSystemIndependentName(PathManager.getHomePath()) + "/lib/" + pathInResources + "!/";
|
||||
root = refresh ? vfm.refreshAndFindFileByUrl(url) : vfm.findFileByUrl(url);
|
||||
pathsChecked.add(url);
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ import java.net.ConnectException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.nio.file.*;
|
||||
import java.security.ProtectionDomain;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Stream;
|
||||
@@ -55,7 +54,7 @@ public final class BootstrapClassLoaderUtil {
|
||||
if (isDevServer()) {
|
||||
ClassLoader classLoader = BootstrapClassLoaderUtil.class.getClassLoader();
|
||||
if (!(classLoader instanceof PathClassLoader)) {
|
||||
//noinspection SpellCheckingInspection,UseOfSystemOutOrSystemErr
|
||||
//noinspection UseOfSystemOutOrSystemErr
|
||||
System.err.println("Please run with VM option -Djava.system.class.loader=com.intellij.util.lang.PathClassLoader");
|
||||
System.exit(1);
|
||||
}
|
||||
@@ -128,8 +127,8 @@ public final class BootstrapClassLoaderUtil {
|
||||
}
|
||||
else {
|
||||
if (useUnifiedClassloader) {
|
||||
//noinspection UseOfSystemOutOrSystemErr,SpellCheckingInspection
|
||||
System.err.println("You should run JVM with -Djava.system.class.loader=com.intellij.util.lang.PathClassLoader");
|
||||
//noinspection UseOfSystemOutOrSystemErr
|
||||
System.err.println("You must run JVM with -Djava.system.class.loader=com.intellij.util.lang.PathClassLoader");
|
||||
}
|
||||
newClassLoader = new PathClassLoader(createNonUnifiedClassloaderBuilder(classpath));
|
||||
}
|
||||
@@ -163,7 +162,6 @@ public final class BootstrapClassLoaderUtil {
|
||||
return UrlClassLoader.build()
|
||||
.files(filterClassPath(classpath))
|
||||
.usePersistentClasspathIndexForLocalClassDirectories()
|
||||
.autoAssignUrlsWithProtectionDomain()
|
||||
.parent(ClassLoader.getPlatformClassLoader())
|
||||
.useCache()
|
||||
.allowBootstrapResources(Boolean.parseBoolean(System.getProperty(PROPERTY_ALLOW_BOOTSTRAP_RESOURCES, "true")));
|
||||
@@ -378,17 +376,13 @@ public final class BootstrapClassLoaderUtil {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String className,
|
||||
ClassLoader loader,
|
||||
@Nullable ProtectionDomain protectionDomain) {
|
||||
return impl.isApplicable(className, loader, protectionDomain);
|
||||
public boolean isApplicable(String className, ClassLoader loader) {
|
||||
return impl.isApplicable(className, loader, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] transform(ClassLoader loader,
|
||||
String className,
|
||||
@Nullable ProtectionDomain protectionDomain, byte[] classBytes) {
|
||||
return impl.transform(loader, className, protectionDomain, classBytes);
|
||||
public byte[] transform(ClassLoader loader, String className, byte[] classBytes) {
|
||||
return impl.transform(loader, className, null, classBytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,17 +24,16 @@ public final class StartupActionScriptManager {
|
||||
@ApiStatus.Internal
|
||||
public static synchronized void executeActionScript() throws IOException {
|
||||
Path scriptFile = getActionScriptFile();
|
||||
if (Files.isRegularFile(scriptFile)) {
|
||||
try {
|
||||
List<ActionCommand> commands = loadActionScript(scriptFile);
|
||||
for (ActionCommand command : commands) {
|
||||
command.execute();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
Files.deleteIfExists(scriptFile); // deleting a file should not cause an exception
|
||||
try {
|
||||
List<ActionCommand> commands = loadActionScript(scriptFile);
|
||||
for (ActionCommand command : commands) {
|
||||
command.execute();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
// deleting a file should not cause an exception
|
||||
Files.deleteIfExists(scriptFile);
|
||||
}
|
||||
}
|
||||
|
||||
@ApiStatus.Internal
|
||||
|
||||
@@ -103,7 +103,9 @@ public final class Main {
|
||||
Thread.currentThread().setContextClassLoader(newClassLoader);
|
||||
|
||||
startupTimings.put("MainRunner search", System.nanoTime());
|
||||
Class<?> mainClass = newClassLoader.loadClassInsideSelf(MAIN_RUNNER_CLASS_NAME, true);
|
||||
|
||||
Class<?> mainClass = newClassLoader.loadClassInsideSelf(MAIN_RUNNER_CLASS_NAME, "com/intellij/idea/StartupUtil.class",
|
||||
-635775336887217634L, true);
|
||||
if (mainClass == null) {
|
||||
throw new ClassNotFoundException(MAIN_RUNNER_CLASS_NAME);
|
||||
}
|
||||
@@ -117,7 +119,13 @@ public final class Main {
|
||||
@SuppressWarnings("HardCodedStringLiteral")
|
||||
private static void installPluginUpdates() {
|
||||
try {
|
||||
StartupActionScriptManager.executeActionScript();
|
||||
// referencing StartupActionScriptManager.ACTION_SCRIPT_FILE is ok - string constant will be inlined
|
||||
Path scriptFile = Path.of(PathManager.getPluginTempPath(), StartupActionScriptManager.ACTION_SCRIPT_FILE);
|
||||
if (Files.isRegularFile(scriptFile)) {
|
||||
// load StartupActionScriptManager and all others related class (ObjectInputStream and so on loaded as part of class define)
|
||||
// only if there is action script to execute
|
||||
StartupActionScriptManager.executeActionScript();
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
showMessage("Plugin Installation Error",
|
||||
@@ -140,13 +148,13 @@ public final class Main {
|
||||
return isLightEdit;
|
||||
}
|
||||
|
||||
public static void setFlags(String @NotNull [] args) {
|
||||
private static void setFlags(String @NotNull [] args) {
|
||||
isHeadless = isHeadless(args);
|
||||
isCommandLine = isHeadless || (args.length > 0 && GUI_COMMANDS.contains(args[0]));
|
||||
if (isHeadless) {
|
||||
System.setProperty(AWT_HEADLESS, Boolean.TRUE.toString());
|
||||
}
|
||||
isLightEdit = "LightEdit".equals(System.getProperty(PLATFORM_PREFIX_PROPERTY)) || !isCommandLine && isFileAfterOptions(args);
|
||||
isLightEdit = "LightEdit".equals(System.getProperty(PLATFORM_PREFIX_PROPERTY)) || (!isCommandLine && isFileAfterOptions(args));
|
||||
}
|
||||
|
||||
private static boolean isFileAfterOptions(String @NotNull [] args) {
|
||||
|
||||
@@ -3,11 +3,9 @@ package com.intellij.util.lang;
|
||||
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.ProtectionDomain;
|
||||
import java.util.function.Function;
|
||||
|
||||
@ApiStatus.Internal
|
||||
@@ -23,14 +21,13 @@ public final class PathClassLoader extends UrlClassLoader {
|
||||
}
|
||||
|
||||
public interface BytecodeTransformer {
|
||||
default boolean isApplicable(String className, ClassLoader loader, @Nullable ProtectionDomain protectionDomain) {
|
||||
default boolean isApplicable(String className, ClassLoader loader) {
|
||||
return true;
|
||||
}
|
||||
|
||||
byte[] transform(ClassLoader loader, String className, @Nullable ProtectionDomain protectionDomain, byte[] classBytes);
|
||||
byte[] transform(ClassLoader loader, String className, byte[] classBytes);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public static Function<Path, ResourceFile> getResourceFileFactory() {
|
||||
return RESOURCE_FILE_FACTORY;
|
||||
}
|
||||
@@ -51,8 +48,8 @@ public final class PathClassLoader extends UrlClassLoader {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isByteBufferSupported(@NotNull String name, @Nullable ProtectionDomain protectionDomain) {
|
||||
return transformer == null || !transformer.isApplicable(name, this, protectionDomain);
|
||||
public boolean isByteBufferSupported(@NotNull String name) {
|
||||
return transformer == null || !transformer.isApplicable(name, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -61,15 +58,15 @@ public final class PathClassLoader extends UrlClassLoader {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> consumeClassData(@NotNull String name, byte[] data, Loader loader, @Nullable ProtectionDomain protectionDomain)
|
||||
public Class<?> consumeClassData(@NotNull String name, byte[] data, Loader loader)
|
||||
throws IOException {
|
||||
BytecodeTransformer transformer = this.transformer;
|
||||
if (transformer != null && transformer.isApplicable(name, this, protectionDomain)) {
|
||||
byte[] transformedData = transformer.transform(this, name, protectionDomain, data);
|
||||
if (transformer != null && transformer.isApplicable(name, this)) {
|
||||
byte[] transformedData = transformer.transform(this, name, data);
|
||||
if (transformedData != null) {
|
||||
return super.consumeClassData(name, transformedData, loader, protectionDomain);
|
||||
return super.consumeClassData(name, transformedData, loader);
|
||||
}
|
||||
}
|
||||
return super.consumeClassData(name, data, loader, protectionDomain);
|
||||
return super.consumeClassData(name, data, loader);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,17 +2,15 @@
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import com.intellij.openapi.diagnostic.Logger;
|
||||
import com.intellij.util.lang.ZipFile.ZipResource;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.*;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.IntBuffer;
|
||||
import java.nio.file.Path;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Predicate;
|
||||
@@ -21,12 +19,10 @@ import java.util.jar.JarFile;
|
||||
import java.util.jar.Manifest;
|
||||
|
||||
@SuppressWarnings("SuspiciousPackagePrivateAccess")
|
||||
public final class ZipResourceFile implements ResourceFile {
|
||||
private static final int MANIFEST_HASH_CODE = 0x4099_fd89; // = Murmur3_32Hash.MURMUR3_32.hashString(JarFile.MANIFEST_NAME)
|
||||
final class ZipResourceFile implements ResourceFile {
|
||||
private final ZipFile zipFile;
|
||||
|
||||
private final ImmutableZipFile zipFile;
|
||||
|
||||
public ZipResourceFile(@NotNull Path file) {
|
||||
ZipResourceFile(@NotNull Path file) {
|
||||
ZipFilePool pool = ZipFilePool.POOL;
|
||||
try {
|
||||
if (pool == null) {
|
||||
@@ -34,7 +30,7 @@ public final class ZipResourceFile implements ResourceFile {
|
||||
}
|
||||
else {
|
||||
Object zipFile = pool.loadZipFile(file);
|
||||
this.zipFile = (ImmutableZipFile)zipFile;
|
||||
this.zipFile = (ZipFile)zipFile;
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
@@ -46,57 +42,61 @@ public final class ZipResourceFile implements ResourceFile {
|
||||
public void processResources(@NotNull String dir,
|
||||
@NotNull Predicate<? super String> nameFilter,
|
||||
@NotNull BiConsumer<? super String, ? super InputStream> consumer) throws IOException {
|
||||
int minNameLength = dir.length() + 2;
|
||||
for (ImmutableZipEntry entry : zipFile.getEntries()) {
|
||||
String name = entry.getName();
|
||||
if (name.length() >= minNameLength && name.startsWith(dir) && name.charAt(dir.length()) == '/' && nameFilter.test(name)) {
|
||||
try (InputStream stream = entry.getInputStream(zipFile)) {
|
||||
consumer.accept(name, stream);
|
||||
}
|
||||
}
|
||||
}
|
||||
zipFile.processResources(dir, nameFilter, consumer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable Attributes loadManifestAttributes() throws IOException {
|
||||
ImmutableZipEntry entry = zipFile.getEntry(JarFile.MANIFEST_NAME, MANIFEST_HASH_CODE);
|
||||
return entry == null ? null : new Manifest(new ByteArrayInputStream(entry.getData(zipFile))).getMainAttributes();
|
||||
InputStream stream = zipFile.getInputStream(JarFile.MANIFEST_NAME);
|
||||
if (stream == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return new Manifest(stream).getMainAttributes();
|
||||
}
|
||||
finally {
|
||||
stream.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull ClasspathCache.IndexRegistrar buildClassPathCacheData() throws IOException {
|
||||
public @NotNull ClasspathCache.IndexRegistrar buildClassPathCacheData() {
|
||||
// name hash is not added - doesn't make sense as fast lookup by name is supported by ImmutableZipFile
|
||||
ImmutableZipEntry packageIndex = zipFile.getEntry("__packageIndex__");
|
||||
if (packageIndex == null) {
|
||||
if (zipFile instanceof ImmutableZipFile) {
|
||||
ImmutableZipFile file = (ImmutableZipFile)zipFile;
|
||||
return new ClasspathCache.IndexRegistrar() {
|
||||
@Override
|
||||
public int classPackageCount() {
|
||||
return file.classPackages.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int resourcePackageCount() {
|
||||
return file.resourcePackages.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long[] classPackages() {
|
||||
return file.classPackages;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long[] resourcePackages() {
|
||||
return file.resourcePackages;
|
||||
}
|
||||
};
|
||||
}
|
||||
else {
|
||||
return computePackageIndex();
|
||||
}
|
||||
|
||||
ByteBuffer buffer = packageIndex.getByteBuffer(zipFile);
|
||||
buffer.order(ByteOrder.LITTLE_ENDIAN);
|
||||
int[] classPackages = new int[buffer.getInt()];
|
||||
int[] resourcePackages = new int[buffer.getInt()];
|
||||
IntBuffer intBuffer = buffer.asIntBuffer();
|
||||
intBuffer.get(classPackages);
|
||||
intBuffer.get(resourcePackages);
|
||||
return (classMap, resourceMap, loader) -> {
|
||||
ClasspathCache.addResourceEntries(classPackages, classMap, loader);
|
||||
ClasspathCache.addResourceEntries(resourcePackages, resourceMap, loader);
|
||||
};
|
||||
}
|
||||
|
||||
private @NotNull ClasspathCache.LoaderDataBuilder computePackageIndex() {
|
||||
ClasspathCache.LoaderDataBuilder builder = new ClasspathCache.LoaderDataBuilder(false);
|
||||
for (ImmutableZipEntry entry : zipFile.getRawNameSet()) {
|
||||
if (entry == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
String name = entry.name;
|
||||
if (name.endsWith(ClassPath.CLASS_EXTENSION)) {
|
||||
builder.addClassPackageFromName(name);
|
||||
}
|
||||
else {
|
||||
builder.addResourcePackageFromName(name);
|
||||
ClasspathCache.LoaderDataBuilder builder = new ClasspathCache.LoaderDataBuilder();
|
||||
for (ImmutableZipEntry entry : ((HashMapZipFile)zipFile).getRawNameSet()) {
|
||||
if (entry != null) {
|
||||
builder.addPackageFromName(entry.name);
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
@@ -105,60 +105,60 @@ public final class ZipResourceFile implements ResourceFile {
|
||||
@Override
|
||||
public @Nullable Class<?> findClass(String fileName, String className, JarLoader jarLoader, ClassPath.ClassDataConsumer classConsumer)
|
||||
throws IOException {
|
||||
ImmutableZipEntry entry = zipFile.getEntry(fileName);
|
||||
if (entry == null) {
|
||||
return null;
|
||||
}
|
||||
if (classConsumer.isByteBufferSupported(className)) {
|
||||
ByteBuffer buffer = zipFile.getByteBuffer(fileName);
|
||||
if (buffer == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (classConsumer.isByteBufferSupported(className, null)) {
|
||||
ByteBuffer buffer = entry.getByteBuffer(zipFile);
|
||||
try {
|
||||
return classConsumer.consumeClassData(className, buffer, jarLoader, null);
|
||||
return classConsumer.consumeClassData(className, buffer, jarLoader);
|
||||
}
|
||||
finally {
|
||||
entry.releaseBuffer(buffer);
|
||||
zipFile.releaseBuffer(buffer);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return classConsumer.consumeClassData(className, entry.getData(zipFile), jarLoader, null);
|
||||
byte[] data = zipFile.getData(fileName);
|
||||
if (data == null) {
|
||||
return null;
|
||||
}
|
||||
return classConsumer.consumeClassData(className, data, jarLoader);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable Resource getResource(@NotNull String name, @NotNull JarLoader jarLoader) throws IOException {
|
||||
ImmutableZipEntry entry = zipFile.getEntry(name);
|
||||
if (entry == null) {
|
||||
return null;
|
||||
}
|
||||
return new ZipFileResource(jarLoader, entry, zipFile);
|
||||
public @Nullable Resource getResource(@NotNull String name, @NotNull JarLoader jarLoader) {
|
||||
ZipResource entry = zipFile.getResource(name);
|
||||
return entry == null ? null : new ZipFileResource(jarLoader, entry, name);
|
||||
}
|
||||
|
||||
private static final class ZipFileResource implements Resource {
|
||||
private final URL baseUrl;
|
||||
private URL url;
|
||||
private final ImmutableZipEntry entry;
|
||||
private final ImmutableZipFile file;
|
||||
private final String name;
|
||||
private final ZipResource entry;
|
||||
private @Nullable("if mimicJarUrlConnection equals to false") final Path path;
|
||||
|
||||
private ZipFileResource(@NotNull JarLoader jarLoader, @NotNull ImmutableZipEntry entry, @NotNull ImmutableZipFile file) {
|
||||
private ZipFileResource(@NotNull JarLoader jarLoader, @NotNull ZipResource entry, @NotNull String name) {
|
||||
this.baseUrl = jarLoader.url;
|
||||
this.entry = entry;
|
||||
this.file = file;
|
||||
this.path = jarLoader.configuration.mimicJarUrlConnection ? jarLoader.path : null;
|
||||
this.name = name;
|
||||
this.path = jarLoader.configuration.mimicJarUrlConnection ? jarLoader.getPath() : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ZipFileResource(name=" + entry.getName() + ", file=" + file + ')';
|
||||
return "ZipFileResource(name=" + entry + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull URL getURL() {
|
||||
URL result = url;
|
||||
if (result == null) {
|
||||
URLStreamHandler handler = new MyJarUrlStreamHandler(entry, file, path);
|
||||
URLStreamHandler handler = new MyJarUrlStreamHandler(entry, path);
|
||||
try {
|
||||
result = new URL(baseUrl, entry.getName(), handler);
|
||||
result = new URL(baseUrl, name, handler);
|
||||
}
|
||||
catch (MalformedURLException e) {
|
||||
throw new RuntimeException(e);
|
||||
@@ -170,56 +170,50 @@ public final class ZipResourceFile implements ResourceFile {
|
||||
|
||||
@Override
|
||||
public @NotNull InputStream getInputStream() throws IOException {
|
||||
return entry.getInputStream(file);
|
||||
return entry.getInputStream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte @NotNull [] getBytes() throws IOException {
|
||||
return entry.getData(file);
|
||||
return entry.getData();
|
||||
}
|
||||
}
|
||||
|
||||
private static final class MyJarUrlStreamHandler extends URLStreamHandler {
|
||||
private @NotNull final ImmutableZipEntry entry;
|
||||
private @NotNull final ImmutableZipFile file;
|
||||
private @NotNull final ZipResource entry;
|
||||
private @Nullable final Path path;
|
||||
|
||||
private MyJarUrlStreamHandler(@NotNull ImmutableZipEntry entry, @NotNull ImmutableZipFile file, @Nullable Path path) {
|
||||
private MyJarUrlStreamHandler(@NotNull ZipResource entry, @Nullable Path path) {
|
||||
this.entry = entry;
|
||||
this.file = file;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected URLConnection openConnection(URL url) throws MalformedURLException {
|
||||
return path == null ? new MyUrlConnection(url, entry, file) : new MyJarUrlConnection(url, entry, file, path);
|
||||
return path == null ? new MyUrlConnection(url, entry) : new MyJarUrlConnection(url, entry, path);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class MyUrlConnection extends URLConnection {
|
||||
private final ImmutableZipEntry entry;
|
||||
private final ImmutableZipFile file;
|
||||
private final ZipResource entry;
|
||||
private byte[] data;
|
||||
|
||||
MyUrlConnection(@NotNull URL url,
|
||||
@NotNull ImmutableZipEntry entry,
|
||||
@NotNull ImmutableZipFile file) {
|
||||
MyUrlConnection(@NotNull URL url, @NotNull ZipResource entry) {
|
||||
super(url);
|
||||
this.entry = entry;
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
private byte[] getData() throws IOException {
|
||||
byte[] result = data;
|
||||
if (result == null) {
|
||||
result = entry.getData(file);
|
||||
result = entry.getData();
|
||||
data = result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void connect() throws IOException {
|
||||
public void connect() {
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -229,42 +223,37 @@ public final class ZipResourceFile implements ResourceFile {
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() throws IOException {
|
||||
return entry.getInputStream(file);
|
||||
return entry.getInputStream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getContentLength() {
|
||||
return entry.uncompressedSize;
|
||||
return entry.getUncompressedSize();
|
||||
}
|
||||
}
|
||||
|
||||
private static final class MyJarUrlConnection extends JarURLConnection {
|
||||
private final ImmutableZipEntry entry;
|
||||
private final ImmutableZipFile file;
|
||||
private final ZipResource entry;
|
||||
private final Path path;
|
||||
private byte[] data;
|
||||
|
||||
MyJarUrlConnection(@NotNull URL url,
|
||||
@NotNull ImmutableZipEntry entry,
|
||||
@NotNull ImmutableZipFile file,
|
||||
@NotNull Path path) throws MalformedURLException {
|
||||
MyJarUrlConnection(@NotNull URL url, @NotNull ZipResource entry, @NotNull Path path) throws MalformedURLException {
|
||||
super(url);
|
||||
this.entry = entry;
|
||||
this.file = file;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
private byte[] getData() throws IOException {
|
||||
byte[] result = data;
|
||||
if (result == null) {
|
||||
result = entry.getData(file);
|
||||
result = entry.getData();
|
||||
data = result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void connect() throws IOException {
|
||||
public void connect() {
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -274,12 +263,12 @@ public final class ZipResourceFile implements ResourceFile {
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() throws IOException {
|
||||
return entry.getInputStream(file);
|
||||
return entry.getInputStream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getContentLength() {
|
||||
return entry.uncompressedSize;
|
||||
return entry.getUncompressedSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -33,11 +33,13 @@
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="kotlin-stdlib-jdk8" level="project" />
|
||||
<orderEntry type="library" name="jps-build-script-dependencies-bootstrap" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.zip" scope="RUNTIME" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.immutableKeyValueStore" scope="RUNTIME" />
|
||||
<orderEntry type="module" module-name="intellij.platform.buildScripts" />
|
||||
<orderEntry type="module" module-name="intellij.idea.community.build" scope="RUNTIME" />
|
||||
<orderEntry type="library" name="kotlinx-serialization-core" level="project" />
|
||||
<orderEntry type="library" name="kotlinx-serialization-json" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.idea.community.build.tasks" />
|
||||
<orderEntry type="library" name="jps-build-script-dependencies-bootstrap" level="project" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -19,6 +19,9 @@ final class DevBuildServerBootstrap {
|
||||
List<String> jarUrls = new ArrayList<>();
|
||||
Path classDir = Path.of(System.getenv("CLASSES_DIR"));
|
||||
jarUrls.add(classDir.resolve("intellij.platform.devBuildServer").toString());
|
||||
jarUrls.add(classDir.resolve("intellij.platform.util.rt.java8").toString());
|
||||
jarUrls.add(classDir.resolve("intellij.platform.util.zip").toString());
|
||||
jarUrls.add(classDir.resolve("intellij.platform.util.immutableKeyValueStore").toString());
|
||||
jarUrls.add(classDir.resolve("intellij.platform.buildScripts").toString());
|
||||
jarUrls.add(classDir.resolve("intellij.platform.buildScripts.downloader").toString());
|
||||
jarUrls.add(classDir.resolve("intellij.idea.community.build").toString());
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
@file:Suppress("ReplaceGetOrSet")
|
||||
|
||||
package org.jetbrains.intellij.build.devServer
|
||||
|
||||
import com.intellij.openapi.application.PathManager
|
||||
import com.intellij.openapi.util.io.FileUtil
|
||||
import com.sun.net.httpserver.HttpContext
|
||||
import com.sun.net.httpserver.HttpExchange
|
||||
import com.sun.net.httpserver.HttpServer
|
||||
import org.apache.log4j.ConsoleAppender
|
||||
@@ -23,6 +26,7 @@ import java.util.concurrent.Semaphore
|
||||
import kotlin.io.path.createDirectories
|
||||
import kotlin.system.exitProcess
|
||||
|
||||
@Suppress("GrazieInspection")
|
||||
val skippedPluginModules = hashSetOf(
|
||||
"intellij.cwm.plugin", // quiche downloading should be implemented as a maven lib
|
||||
)
|
||||
@@ -99,61 +103,65 @@ class DevIdeaBuildServer {
|
||||
|
||||
private fun HttpExchange.getPlatformPrefix() = parseQuery(this.requestURI).get("platformPrefix")?.first() ?: "idea"
|
||||
|
||||
private fun HttpServer.createBuildEndpoint(buildServer: BuildServer) = createContext("/build") { exchange ->
|
||||
val platformPrefix = exchange.getPlatformPrefix()
|
||||
private fun createBuildEndpoint(httpServer: HttpServer, buildServer: BuildServer): HttpContext? {
|
||||
return httpServer.createContext("/build") { exchange ->
|
||||
val platformPrefix = exchange.getPlatformPrefix()
|
||||
|
||||
var statusMessage: String
|
||||
var statusCode = HttpURLConnection.HTTP_OK
|
||||
productBuildStatus[platformPrefix] = DevIdeaBuildServerStatus.UNDEFINED
|
||||
var statusMessage: String
|
||||
var statusCode = HttpURLConnection.HTTP_OK
|
||||
productBuildStatus[platformPrefix] = DevIdeaBuildServerStatus.UNDEFINED
|
||||
|
||||
try {
|
||||
productBuildStatus[platformPrefix] = DevIdeaBuildServerStatus.IN_PROGRESS
|
||||
buildQueueLock.acquire()
|
||||
try {
|
||||
productBuildStatus[platformPrefix] = DevIdeaBuildServerStatus.IN_PROGRESS
|
||||
buildQueueLock.acquire()
|
||||
|
||||
exchange.responseHeaders.add("Content-Type", "text/plain")
|
||||
val ideBuilder = buildServer.checkOrCreateIdeBuilder(platformPrefix)
|
||||
statusMessage = ideBuilder.pluginBuilder.buildChanged()
|
||||
LOG.info(statusMessage)
|
||||
}
|
||||
catch (e: ConfigurationException) {
|
||||
statusCode = HttpURLConnection.HTTP_BAD_REQUEST
|
||||
productBuildStatus[platformPrefix] = DevIdeaBuildServerStatus.FAILED
|
||||
statusMessage = e.message!!
|
||||
}
|
||||
catch (e: Throwable) {
|
||||
productBuildStatus[platformPrefix] = DevIdeaBuildServerStatus.FAILED
|
||||
exchange.sendResponseHeaders(HttpURLConnection.HTTP_UNAVAILABLE, -1)
|
||||
LOG.error("Cannot handle build request", e)
|
||||
return@createContext
|
||||
}
|
||||
finally {
|
||||
buildQueueLock.release()
|
||||
}
|
||||
exchange.responseHeaders.add("Content-Type", "text/plain")
|
||||
val ideBuilder = buildServer.checkOrCreateIdeBuilder(platformPrefix)
|
||||
statusMessage = ideBuilder.pluginBuilder.buildChanged()
|
||||
LOG.info(statusMessage)
|
||||
}
|
||||
catch (e: ConfigurationException) {
|
||||
statusCode = HttpURLConnection.HTTP_BAD_REQUEST
|
||||
productBuildStatus[platformPrefix] = DevIdeaBuildServerStatus.FAILED
|
||||
statusMessage = e.message!!
|
||||
}
|
||||
catch (e: Throwable) {
|
||||
productBuildStatus[platformPrefix] = DevIdeaBuildServerStatus.FAILED
|
||||
exchange.sendResponseHeaders(HttpURLConnection.HTTP_UNAVAILABLE, -1)
|
||||
LOG.error("Cannot handle build request", e)
|
||||
return@createContext
|
||||
}
|
||||
finally {
|
||||
buildQueueLock.release()
|
||||
}
|
||||
|
||||
productBuildStatus[platformPrefix] =
|
||||
if (statusCode == HttpURLConnection.HTTP_OK) DevIdeaBuildServerStatus.OK
|
||||
else DevIdeaBuildServerStatus.FAILED
|
||||
productBuildStatus[platformPrefix] =
|
||||
if (statusCode == HttpURLConnection.HTTP_OK) DevIdeaBuildServerStatus.OK
|
||||
else DevIdeaBuildServerStatus.FAILED
|
||||
|
||||
val response = statusMessage.encodeToByteArray()
|
||||
exchange.sendResponseHeaders(statusCode, response.size.toLong())
|
||||
exchange.responseBody.apply {
|
||||
write(response)
|
||||
flush()
|
||||
close()
|
||||
val response = statusMessage.encodeToByteArray()
|
||||
exchange.sendResponseHeaders(statusCode, response.size.toLong())
|
||||
exchange.responseBody.apply {
|
||||
this.write(response)
|
||||
this.flush()
|
||||
this.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun HttpServer.createStatusEndpoint() = createContext("/status") { exchange ->
|
||||
val platformPrefix = exchange.getPlatformPrefix()
|
||||
val buildStatus = productBuildStatus.getOrDefault(platformPrefix, DevIdeaBuildServerStatus.UNDEFINED)
|
||||
private fun createStatusEndpoint(httpServer: HttpServer): HttpContext? {
|
||||
return httpServer.createContext("/status") { exchange ->
|
||||
val platformPrefix = exchange.getPlatformPrefix()
|
||||
val buildStatus = productBuildStatus.getOrDefault(platformPrefix, DevIdeaBuildServerStatus.UNDEFINED)
|
||||
|
||||
exchange.responseHeaders.add("Content-Type", "text/plain")
|
||||
val response = buildStatus.toString().encodeToByteArray()
|
||||
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.size.toLong())
|
||||
exchange.responseBody.apply {
|
||||
write(response)
|
||||
flush()
|
||||
close()
|
||||
exchange.responseHeaders.add("Content-Type", "text/plain")
|
||||
val response = buildStatus.toString().encodeToByteArray()
|
||||
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.size.toLong())
|
||||
exchange.responseBody.apply {
|
||||
this.write(response)
|
||||
this.flush()
|
||||
this.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,8 +169,8 @@ class DevIdeaBuildServer {
|
||||
val httpServer = HttpServer.create()
|
||||
httpServer.bind(InetSocketAddress(InetAddress.getLoopbackAddress(), SERVER_PORT), 2)
|
||||
|
||||
httpServer.createBuildEndpoint(buildServer)
|
||||
httpServer.createStatusEndpoint()
|
||||
createBuildEndpoint(httpServer, buildServer)
|
||||
createStatusEndpoint(httpServer)
|
||||
|
||||
// Serve requests in parallel. Though, there is no guarantee, that 2 requests will be for different endpoints
|
||||
httpServer.executor = Executors.newFixedThreadPool(2)
|
||||
|
||||
@@ -20,7 +20,7 @@ import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
const val UNMODIFIED_MARK_FILE_NAME = ".unmodified"
|
||||
internal const val UNMODIFIED_MARK_FILE_NAME = ".unmodified"
|
||||
|
||||
class IdeBuilder(val pluginBuilder: PluginBuilder,
|
||||
homePath: Path,
|
||||
@@ -65,7 +65,7 @@ internal fun initialBuild(productConfiguration: ProductConfiguration, homePath:
|
||||
val runDir = createRunDirForProduct(homePath, platformPrefix)
|
||||
|
||||
val buildContext = BuildContext.createContext(getCommunityHomePath(homePath).toString(), homePath.toString(), productProperties,
|
||||
ProprietaryBuildTools.DUMMY, createBuildOptions(homePath))
|
||||
ProprietaryBuildTools.DUMMY, createBuildOptions(runDir))
|
||||
val pluginsDir = runDir.resolve("plugins")
|
||||
|
||||
val mainModuleToNonTrivialPlugin = HashMap<String, BuildItem>(bundledMainModuleNames.size)
|
||||
@@ -121,28 +121,44 @@ internal fun initialBuild(productConfiguration: ProductConfiguration, homePath:
|
||||
|
||||
private fun createLibClassPath(context: BuildContext, homePath: Path): String {
|
||||
val platformLayout = DistributionJARsBuilder.createPlatformLayout(emptySet(), context)
|
||||
val isPackagedLib = System.getProperty("dev.server.pack.lib") == "true"
|
||||
val projectStructureMapping = DistributionJARsBuilder.processLibDirectoryLayout(ModuleOutputPatcher(),
|
||||
platformLayout,
|
||||
context,
|
||||
false).fork().join()
|
||||
isPackagedLib).fork().join()
|
||||
// for some reasons maybe duplicated paths - use set
|
||||
val classPath = LinkedHashSet<String>()
|
||||
for (entry in projectStructureMapping) {
|
||||
when (entry) {
|
||||
is ModuleOutputEntry -> {
|
||||
classPath.add(context.getModuleOutputDir(context.findRequiredModule(entry.moduleName)).toString())
|
||||
}
|
||||
is LibraryFileEntry -> {
|
||||
classPath.add(entry.libraryFile.toString())
|
||||
}
|
||||
else -> throw UnsupportedOperationException("Entry $entry is not supported")
|
||||
}
|
||||
if (isPackagedLib) {
|
||||
projectStructureMapping.mapTo(classPath) { it.path.toString() }
|
||||
}
|
||||
else {
|
||||
for (entry in projectStructureMapping) {
|
||||
when (entry) {
|
||||
is ModuleOutputEntry -> {
|
||||
if (isPackagedLib) {
|
||||
classPath.add(entry.path.toString())
|
||||
}
|
||||
else {
|
||||
classPath.add(context.getModuleOutputDir(context.findRequiredModule(entry.moduleName)).toString())
|
||||
}
|
||||
}
|
||||
is LibraryFileEntry -> {
|
||||
if (isPackagedLib) {
|
||||
classPath.add(entry.path.toString())
|
||||
}
|
||||
else {
|
||||
classPath.add(entry.libraryFile.toString())
|
||||
}
|
||||
}
|
||||
else -> throw UnsupportedOperationException("Entry $entry is not supported")
|
||||
}
|
||||
}
|
||||
|
||||
for (libName in platformLayout.projectLibrariesToUnpack.values()) {
|
||||
val library = context.project.libraryCollection.findLibrary(libName) ?: throw IllegalStateException("Cannot find library $libName")
|
||||
library.getRootUrls(JpsOrderRootType.COMPILED).mapTo(classPath) {
|
||||
JpsPathUtil.urlToPath(it)
|
||||
for (libName in platformLayout.projectLibrariesToUnpack.values()) {
|
||||
val library = context.project.libraryCollection.findLibrary(libName) ?: throw IllegalStateException("Cannot find library $libName")
|
||||
library.getRootUrls(JpsOrderRootType.COMPILED).mapTo(classPath) {
|
||||
JpsPathUtil.urlToPath(it)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,13 +211,13 @@ private fun getCommunityHomePath(homePath: Path): Path {
|
||||
return if (Files.isDirectory(communityDotIdea)) communityDotIdea.parent else homePath
|
||||
}
|
||||
|
||||
private fun createBuildOptions(homePath: Path): BuildOptions {
|
||||
private fun createBuildOptions(runDir: Path): BuildOptions {
|
||||
val buildOptions = BuildOptions()
|
||||
buildOptions.useCompiledClassesFromProjectOutput = true
|
||||
buildOptions.targetOS = BuildOptions.OS_NONE
|
||||
buildOptions.cleanOutputFolder = false
|
||||
buildOptions.skipDependencySetup = true
|
||||
buildOptions.outputRootPath = homePath.resolve("out/dev-server").toString()
|
||||
buildOptions.outputRootPath = runDir.toString()
|
||||
buildOptions.buildStepsToSkip.add(BuildOptions.PREBUILD_SHARED_INDEXES)
|
||||
return buildOptions
|
||||
}
|
||||
@@ -88,7 +88,7 @@ final class JavaPluginLayout {
|
||||
withModuleLibrary("jshell-frontend", "intellij.java.execution.impl", "jshell-frontend.jar")
|
||||
withModuleLibrary("sa-jdwp", "intellij.java.debugger.impl", "sa-jdwp.jar")
|
||||
|
||||
withResourceArchive("../jdkAnnotations", "lib/jdkAnnotations.jar")
|
||||
withResourceArchive("../jdkAnnotations", "lib/resources/jdkAnnotations.jar")
|
||||
|
||||
addition.delegate = delegate
|
||||
addition()
|
||||
|
||||
@@ -81,4 +81,8 @@ abstract class BaseLayout {
|
||||
final void withProjectLibraryUnpackedIntoJar(String libraryName, String jarName) {
|
||||
projectLibrariesToUnpack.putValue(jarName, libraryName)
|
||||
}
|
||||
|
||||
void excludeFromModule(String moduleName, String excludedPattern) {
|
||||
moduleExcludes.putValue(moduleName, excludedPattern)
|
||||
}
|
||||
}
|
||||
@@ -74,7 +74,7 @@ class BaseLayoutSpec {
|
||||
* to exclude 'foo' directory
|
||||
*/
|
||||
void excludeFromModule(String moduleName, String excludedPattern) {
|
||||
layout.moduleExcludes.putValue(moduleName, excludedPattern)
|
||||
layout.excludeFromModule(moduleName, excludedPattern)
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -66,6 +66,7 @@ final class BuildHelper {
|
||||
final MethodHandle buildMacZip
|
||||
|
||||
final MethodHandle crossPlatformArchive
|
||||
final MethodHandle consumeDataByPrefix
|
||||
|
||||
private final BiFunction<SpanBuilder, Supplier<?>, ForkJoinTask<?>> createTask
|
||||
private final BiConsumer<SpanBuilder, Runnable> spanImpl
|
||||
@@ -124,8 +125,9 @@ final class BuildHelper {
|
||||
setAppInfo = lookup.findStatic(helperClassLoader.loadClass("org.jetbrains.intellij.build.tasks.AsmKt"), "injectAppInfo",
|
||||
MethodType.methodType(byte[].class, path, string))
|
||||
|
||||
buildKeymapPlugins = lookup.findStatic(helperClassLoader.loadClass("org.jetbrains.intellij.build.tasks.KeymapPluginKt"), "buildKeymapPlugins",
|
||||
MethodType.methodType(ForkJoinTask.class, string, path, path))
|
||||
buildKeymapPlugins =
|
||||
lookup.findStatic(helperClassLoader.loadClass("org.jetbrains.intellij.build.tasks.KeymapPluginKt"), "buildKeymapPlugins",
|
||||
MethodType.methodType(ForkJoinTask.class, string, path, path))
|
||||
|
||||
Map<String, ?> expose = (Map<String, ?>)(lookup.findStatic(helperClassLoader.loadClass("org.jetbrains.intellij.build.tasks.MainKt"),
|
||||
"expose",
|
||||
@@ -178,10 +180,10 @@ final class BuildHelper {
|
||||
list, int.class))
|
||||
|
||||
prepareMacZip = lookup.findStatic(helperClassLoader.loadClass("org.jetbrains.intellij.build.tasks.SignKt"),
|
||||
"prepareMacZip",
|
||||
MethodType.methodType(aVoid,
|
||||
path, path, byte[].class,
|
||||
path, string))
|
||||
"prepareMacZip",
|
||||
MethodType.methodType(aVoid,
|
||||
path, path, byte[].class,
|
||||
path, string))
|
||||
|
||||
crossPlatformArchive = lookup.findStatic(helperClassLoader.loadClass("org.jetbrains.intellij.build.tasks.ArchiveKt"),
|
||||
"crossPlatformZip",
|
||||
@@ -193,6 +195,10 @@ final class BuildHelper {
|
||||
Collection.class,
|
||||
Map.class,
|
||||
path))
|
||||
|
||||
consumeDataByPrefix = lookup.findStatic(helperClassLoader.loadClass("org.jetbrains.intellij.build.tasks.ArchiveKt"),
|
||||
"consumeDataByPrefix",
|
||||
MethodType.methodType(aVoid, path, string, BiConsumer.class))
|
||||
}
|
||||
|
||||
@NotNull
|
||||
|
||||
@@ -31,8 +31,8 @@ import java.lang.invoke.MethodHandle
|
||||
import java.nio.file.Path
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.concurrent.ConcurrentLinkedQueue
|
||||
import java.util.function.BiConsumer
|
||||
import java.util.function.BiFunction
|
||||
import java.util.function.Function
|
||||
import java.util.function.IntConsumer
|
||||
import java.util.function.Predicate
|
||||
import java.util.regex.Matcher
|
||||
@@ -99,7 +99,8 @@ final class JarPackager {
|
||||
packager.addLibrary(library, targetFile, getLibraryFiles(library, copiedFiles, true))
|
||||
}
|
||||
|
||||
Map<JpsLibrary, List<Path>> libraryToMerge = packager.packLibraries(actualModuleJars, outputDir, layout, copiedFiles)
|
||||
Map<String, List> extraLibSources = new HashMap<>()
|
||||
Map<JpsLibrary, List<Path>> libraryToMerge = packager.packLibraries(actualModuleJars, outputDir, layout, copiedFiles, extraLibSources)
|
||||
|
||||
boolean isRootDir = context.paths.distAllDir == outputDir.parent
|
||||
if (isRootDir) {
|
||||
@@ -164,6 +165,11 @@ final class JarPackager {
|
||||
libSources = null
|
||||
}
|
||||
|
||||
List extra = extraLibSources.get(entry.key)
|
||||
if (extra != null) {
|
||||
sourceList.addAll(extra)
|
||||
}
|
||||
|
||||
packager.jarDescriptors.add(packager.packModuleOutputAndUnpackedProjectLibraries(entry.value,
|
||||
jarPath,
|
||||
jarFile,
|
||||
@@ -238,40 +244,6 @@ final class JarPackager {
|
||||
return sources
|
||||
}
|
||||
|
||||
private static void processModuleLibs(Map<String, List<String>> actualModuleJars,
|
||||
BaseLayout layout,
|
||||
BuildContext context,
|
||||
BiConsumer<JpsLibrary, String> consumer) {
|
||||
// include all module libraries from the plugin modules added to IDE classpath to layout
|
||||
actualModuleJars.entrySet()
|
||||
.stream()
|
||||
.filter { !it.key.contains("/") }
|
||||
.flatMap { it.value.stream() }
|
||||
.filter { !layout.modulesWithExcludedModuleLibraries.contains(it) }
|
||||
.forEach { moduleName ->
|
||||
Collection<String> excluded = layout.excludedModuleLibraries.get(moduleName)
|
||||
for (JpsDependencyElement element : context.findRequiredModule(moduleName).dependenciesList.dependencies) {
|
||||
if (!(element instanceof JpsLibraryDependency)) {
|
||||
continue
|
||||
}
|
||||
|
||||
JpsLibraryDependency libraryDependency = (JpsLibraryDependency)element
|
||||
if (!(libraryDependency.libraryReference?.parentReference?.resolve() instanceof JpsModule)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (JpsJavaExtensionService.instance.getDependencyExtension(element)?.scope
|
||||
?.isIncludedIn(JpsJavaClasspathKind.PRODUCTION_RUNTIME) ?: false) {
|
||||
JpsLibrary library = libraryDependency.library
|
||||
String libraryName = LayoutBuilder.getLibraryName(library)
|
||||
if (!excluded.contains(libraryName) &&
|
||||
!layout.includedModuleLibraries.any { it.libraryName == libraryName }) {
|
||||
consumer.accept(library, moduleName)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static Path getSearchableOptionsDir(BuildContext buildContext) {
|
||||
return buildContext.paths.tempDir.resolve("searchableOptionsResult")
|
||||
@@ -343,7 +315,8 @@ final class JarPackager {
|
||||
private Map<JpsLibrary, List<Path>> packLibraries(Map<String, List<String>> jarToModuleNames,
|
||||
Path outputDir,
|
||||
BaseLayout layout,
|
||||
Map<Path, JpsLibrary> copiedFiles) {
|
||||
Map<Path, JpsLibrary> copiedFiles,
|
||||
Map<String, List> extraLibSources) {
|
||||
Map<JpsLibrary, List<Path>> toMerge = new HashMap<JpsLibrary, List<Path>>()
|
||||
Predicate<String> isLibraryMergeable = buildHelper.isLibraryMergeable
|
||||
|
||||
@@ -393,40 +366,87 @@ final class JarPackager {
|
||||
}
|
||||
}
|
||||
|
||||
processModuleLibs(jarToModuleNames, layout, context, new BiConsumer<JpsLibrary, String>() {
|
||||
@Override
|
||||
void accept(JpsLibrary library, String moduleName) {
|
||||
String libName = library.name
|
||||
List<Path> files = getLibraryFiles(library, copiedFiles, true)
|
||||
for (Map.Entry<String, List<String>> entry : jarToModuleNames.entrySet()) {
|
||||
if (entry.key.contains("/")) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (libName == "async-profiler-windows") {
|
||||
// custom name, removeVersionFromJar doesn't support strings like `2.1-ea-4`
|
||||
addLibrary(library, outputDir.resolve("async-profiler-windows.jar"), files)
|
||||
return
|
||||
for (String moduleName : entry.value) {
|
||||
if (layout.modulesWithExcludedModuleLibraries.contains(moduleName)) {
|
||||
continue
|
||||
}
|
||||
|
||||
boolean isJpsModule = moduleName.endsWith(".jps")
|
||||
for (int i = files.size() - 1; i >= 0; i--) {
|
||||
Path file = files.get(i)
|
||||
String fileName = file.fileName.toString()
|
||||
if (isJpsModule) {
|
||||
files.remove(i)
|
||||
addLibrary(library, outputDir.resolve(fileName), List.of(file))
|
||||
Collection<String> excluded = layout.excludedModuleLibraries.get(moduleName)
|
||||
for (JpsDependencyElement element : context.findRequiredModule(moduleName).dependenciesList.dependencies) {
|
||||
if (!(element instanceof JpsLibraryDependency)) {
|
||||
continue
|
||||
}
|
||||
else {
|
||||
//noinspection SpellCheckingInspection
|
||||
if (fileName.endsWith("-rt.jar") || fileName.startsWith("jps-") || fileName.contains("-agent") ||
|
||||
fileName == "yjp-controller-api-redist.jar") {
|
||||
files.remove(i)
|
||||
addLibrary(library, outputDir.resolve(removeVersionFromJar(fileName)), List.of(file))
|
||||
|
||||
JpsLibraryDependency libraryDependency = (JpsLibraryDependency)element
|
||||
JpsCompositeElement parent = libraryDependency.libraryReference?.parentReference?.resolve()
|
||||
if (!(parent instanceof JpsModule)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!(JpsJavaExtensionService.instance.getDependencyExtension(element)?.scope
|
||||
?.isIncludedIn(JpsJavaClasspathKind.PRODUCTION_RUNTIME) ?: false)) {
|
||||
continue
|
||||
}
|
||||
|
||||
JpsLibrary library = libraryDependency.library
|
||||
String libraryName = LayoutBuilder.getLibraryName(library)
|
||||
if (!excluded.contains(libraryName) &&
|
||||
!layout.includedModuleLibraries.any { it.libraryName == libraryName }) {
|
||||
String libName = library.name
|
||||
List<Path> files = getLibraryFiles(library, copiedFiles, true)
|
||||
|
||||
if (libName == "async-profiler-windows") {
|
||||
// custom name, removeVersionFromJar doesn't support strings like `2.1-ea-4`
|
||||
addLibrary(library, outputDir.resolve("async-profiler-windows.jar"), files)
|
||||
continue
|
||||
}
|
||||
|
||||
boolean isJpsModule = moduleName.endsWith(".jps")
|
||||
for (int i = files.size() - 1; i >= 0; i--) {
|
||||
Path file = files.get(i)
|
||||
String fileName = file.fileName.toString()
|
||||
if (isJpsModule) {
|
||||
files.remove(i)
|
||||
addLibrary(library, outputDir.resolve(fileName), List.of(file))
|
||||
}
|
||||
else {
|
||||
//noinspection SpellCheckingInspection
|
||||
if (fileName.endsWith("-rt.jar") || fileName.startsWith("jps-") || fileName.contains("-agent") ||
|
||||
fileName == "yjp-controller-api-redist.jar") {
|
||||
files.remove(i)
|
||||
addLibrary(library, outputDir.resolve(removeVersionFromJar(fileName)), List.of(file))
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!files.isEmpty()) {
|
||||
BiFunction<Path, IntConsumer, ?> createZipSource = buildHelper.createZipSource
|
||||
Path targetFile = outputDir.resolve(entry.key)
|
||||
|
||||
List sources = extraLibSources.computeIfAbsent(entry.key, new Function<String, List>() {
|
||||
@Override
|
||||
List apply(String s) {
|
||||
return new ArrayList()
|
||||
}
|
||||
})
|
||||
|
||||
for (Path file : files) {
|
||||
sources.add(createZipSource.apply(file, new IntConsumer() {
|
||||
@Override
|
||||
void accept(int size) {
|
||||
projectStructureMapping.add(new ModuleLibraryFileEntry(targetFile, moduleName, file, size))
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!files.isEmpty()) {
|
||||
toMerge.put(library, files)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return toMerge
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.intellij.build.impl
|
||||
|
||||
import groovy.transform.CompileStatic
|
||||
|
||||
@@ -182,10 +182,11 @@ final class PlatformModules {
|
||||
}
|
||||
|
||||
jar(UTIL_JAR, List.of(
|
||||
"intellij.platform.util.rt",
|
||||
"intellij.platform.util.rt.java8",
|
||||
"intellij.platform.util.zip",
|
||||
"intellij.platform.util.classLoader",
|
||||
"intellij.platform.bootstrap",
|
||||
"intellij.platform.util.rt",
|
||||
"intellij.platform.util",
|
||||
"intellij.platform.util.text.matching",
|
||||
"intellij.platform.util.base",
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
package org.jetbrains.intellij.build.kotlin
|
||||
|
||||
import com.intellij.util.io.Decompressor
|
||||
import com.intellij.util.lang.ImmutableZipEntry
|
||||
import com.intellij.util.lang.ImmutableZipFile
|
||||
import groovy.transform.CompileStatic
|
||||
import org.jetbrains.intellij.build.BuildContext
|
||||
import org.jetbrains.intellij.build.BuildTasks
|
||||
import org.jetbrains.intellij.build.ProductProperties
|
||||
import org.jetbrains.intellij.build.impl.BuildHelper
|
||||
import org.jetbrains.intellij.build.impl.ModuleOutputPatcher
|
||||
import org.jetbrains.intellij.build.impl.PluginLayout
|
||||
import org.jetbrains.intellij.build.impl.ProjectLibraryData
|
||||
@@ -15,7 +14,6 @@ import org.jetbrains.jps.model.library.JpsLibrary
|
||||
import org.jetbrains.jps.model.library.JpsOrderRootType
|
||||
import org.jetbrains.jps.model.library.JpsRepositoryLibraryType
|
||||
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.util.function.BiConsumer
|
||||
@@ -221,22 +219,13 @@ final class KotlinPluginBuilder {
|
||||
throw new IllegalStateException("$kotlincKotlinCompiler is expected to have only one jar")
|
||||
}
|
||||
|
||||
String prefixWithEndingSlash = "META-INF/extensions/"
|
||||
ImmutableZipFile.load(jars[0].toPath()).withCloseable { zip ->
|
||||
for (ImmutableZipEntry entry : zip.entries) {
|
||||
if (entry.name.startsWith(prefixWithEndingSlash)) {
|
||||
ByteBuffer buffer = entry.getByteBuffer(zip)
|
||||
try {
|
||||
byte[] bytes = new byte[buffer.remaining()]
|
||||
buffer.get(bytes)
|
||||
patcher.patchModuleOutput(MAIN_KOTLIN_PLUGIN_MODULE, entry.name, bytes)
|
||||
}
|
||||
finally {
|
||||
entry.releaseBuffer(buffer)
|
||||
}
|
||||
BuildHelper.getInstance(context).consumeDataByPrefix
|
||||
.invokeWithArguments(jars[0].toPath(), "META-INF/extensions/", new BiConsumer<String, byte[]>() {
|
||||
@Override
|
||||
void accept(String name, byte[] data) {
|
||||
patcher.patchModuleOutput(MAIN_KOTLIN_PLUGIN_MODULE, name, data)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -27,5 +27,7 @@
|
||||
<orderEntry type="module" module-name="intellij.platform.util.xmlDom" scope="PROVIDED" />
|
||||
<orderEntry type="library" name="okhttp" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.idea.community.build.tasks" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.immutableKeyValueStore" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt.java8" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -9,9 +9,9 @@ import com.intellij.util.LineSeparator
|
||||
import com.intellij.util.containers.CollectionFactory
|
||||
import com.intellij.util.containers.ContainerUtil
|
||||
import com.intellij.util.diff.Diff
|
||||
import com.intellij.util.io.Murmur3_32Hash
|
||||
import com.intellij.util.io.directoryStreamIfExists
|
||||
import com.intellij.util.io.systemIndependentPath
|
||||
import com.intellij.util.lang.Murmur3_32Hash
|
||||
import com.intellij.util.readXmlAsModel
|
||||
import org.jetbrains.jps.model.JpsSimpleElement
|
||||
import org.jetbrains.jps.model.java.JavaResourceRootType
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.intellij.build
|
||||
|
||||
import gnu.trove.THashSet
|
||||
@@ -21,6 +21,7 @@ class LibraryLicensesTester(private val project: JpsProject, private val license
|
||||
project.modules.filter { it.name !in nonPublicModules
|
||||
&& !it.name.contains("guiTests")
|
||||
&& !it.name.startsWith("fleet")
|
||||
&& it.name != "intellij.platform.util.immutableKeyValueStore.benchmark"
|
||||
&& !it.name.contains("integrationTests", ignoreCase = true)}.forEach { module ->
|
||||
JpsJavaExtensionService.dependencies(module).includedIn(JpsJavaClasspathKind.PRODUCTION_RUNTIME).libraries.forEach {
|
||||
libraries[it] = module
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
@file:Suppress("ReplaceNegatedIsEmptyWithIsNotEmpty", "ReplaceGetOrSet")
|
||||
@file:Suppress("ReplaceNegatedIsEmptyWithIsNotEmpty", "ReplaceGetOrSet", "ReplacePutWithAssignment")
|
||||
package com.intellij.ide.plugins
|
||||
|
||||
import com.intellij.diagnostic.PluginException
|
||||
@@ -20,7 +20,6 @@ import java.util.function.BiPredicate
|
||||
import java.util.function.Function
|
||||
|
||||
private val DEFAULT_CLASSLOADER_CONFIGURATION = UrlClassLoader.build().useCache()
|
||||
private val EMPTY_DESCRIPTOR_ARRAY = emptyArray<IdeaPluginDescriptorImpl>()
|
||||
|
||||
@ApiStatus.Internal
|
||||
class ClassLoaderConfigurator(
|
||||
@@ -109,8 +108,7 @@ class ClassLoaderConfigurator(
|
||||
}
|
||||
|
||||
val mimicJarUrlConnection = !module.isBundled && module.vendor != "JetBrains"
|
||||
val pluginClassPath = ClassPath(files, Collections.emptySet(), DEFAULT_CLASSLOADER_CONFIGURATION, resourceFileFactory,
|
||||
mimicJarUrlConnection)
|
||||
val pluginClassPath = ClassPath(files, DEFAULT_CLASSLOADER_CONFIGURATION, resourceFileFactory, mimicJarUrlConnection)
|
||||
val mainInfo = MainInfo(classPath = pluginClassPath, files = files, libDirectories = libDirectories)
|
||||
val existing = mainToClassPath.put(module.pluginId, mainInfo)
|
||||
if (existing != null) {
|
||||
|
||||
@@ -11,6 +11,7 @@ import com.intellij.openapi.extensions.PluginId;
|
||||
import com.intellij.openapi.util.ShutDownTracker;
|
||||
import com.intellij.util.SmartList;
|
||||
import com.intellij.util.lang.ClassPath;
|
||||
import com.intellij.util.lang.ClasspathCache;
|
||||
import com.intellij.util.lang.Resource;
|
||||
import com.intellij.util.lang.UrlClassLoader;
|
||||
import com.intellij.util.ui.EDT;
|
||||
@@ -143,7 +144,7 @@ public final class PluginClassLoader extends UrlClassLoader implements PluginAwa
|
||||
@NotNull PluginDescriptor pluginDescriptor,
|
||||
@Nullable Path pluginRoot,
|
||||
@NotNull ClassLoader coreLoader) {
|
||||
super(builder, null, isParallelCapable, false);
|
||||
super(builder, null, isParallelCapable);
|
||||
|
||||
instanceId = instanceIdProducer.incrementAndGet();
|
||||
|
||||
@@ -244,13 +245,17 @@ public final class PluginClassLoader extends UrlClassLoader implements PluginAwa
|
||||
return coreLoader.loadClass(name);
|
||||
}
|
||||
|
||||
String fileNameWithoutExtension = name.replace('.', '/');
|
||||
String fileName = fileNameWithoutExtension + ClasspathCache.CLASS_EXTENSION;
|
||||
long packageNameHash = ClasspathCache.getPackageNameHash(fileNameWithoutExtension, fileNameWithoutExtension.lastIndexOf('/'));
|
||||
|
||||
long startTime = StartUpMeasurer.measuringPluginStartupCosts ? StartUpMeasurer.getCurrentTime() : -1;
|
||||
Class<?> c;
|
||||
PluginException error = null;
|
||||
try {
|
||||
String consistencyError = resolveScopeManager.isDefinitelyAlienClass(name, packagePrefix, forceLoadFromSubPluginClassloader);
|
||||
if (consistencyError == null) {
|
||||
c = loadClassInsideSelf(name, forceLoadFromSubPluginClassloader);
|
||||
c = loadClassInsideSelf(name, fileName, packageNameHash, forceLoadFromSubPluginClassloader);
|
||||
}
|
||||
else {
|
||||
if (!consistencyError.isEmpty()) {
|
||||
@@ -278,7 +283,7 @@ public final class PluginClassLoader extends UrlClassLoader implements PluginAwa
|
||||
}
|
||||
continue;
|
||||
}
|
||||
c = pluginClassLoader.loadClassInsideSelf(name, false);
|
||||
c = pluginClassLoader.loadClassInsideSelf(name, fileName, packageNameHash, false);
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new ClassNotFoundException(name, e);
|
||||
@@ -289,7 +294,7 @@ public final class PluginClassLoader extends UrlClassLoader implements PluginAwa
|
||||
}
|
||||
else if (classloader instanceof UrlClassLoader) {
|
||||
try {
|
||||
c = ((UrlClassLoader)classloader).loadClassInsideSelf(name, false);
|
||||
c = ((UrlClassLoader)classloader).loadClassInsideSelf(name, fileName, packageNameHash, false);
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new ClassNotFoundException(name, e);
|
||||
@@ -384,7 +389,10 @@ public final class PluginClassLoader extends UrlClassLoader implements PluginAwa
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable Class<?> loadClassInsideSelf(@NotNull String name, boolean forceLoadFromSubPluginClassloader) throws IOException {
|
||||
public @Nullable Class<?> loadClassInsideSelf(String name,
|
||||
String fileName,
|
||||
long packageNameHash,
|
||||
boolean forceLoadFromSubPluginClassloader) throws IOException {
|
||||
synchronized (getClassLoadingLock(name)) {
|
||||
Class<?> c = findLoadedClass(name);
|
||||
if (c != null && c.getClassLoader() == this) {
|
||||
@@ -393,7 +401,7 @@ public final class PluginClassLoader extends UrlClassLoader implements PluginAwa
|
||||
|
||||
Writer logStream = PluginClassLoader.logStream;
|
||||
try {
|
||||
c = classPath.findClass(name, classDataConsumer);
|
||||
c = classPath.findClass(name, fileName, packageNameHash, classDataConsumer);
|
||||
}
|
||||
catch (LinkageError e) {
|
||||
if (logStream != null) {
|
||||
@@ -470,7 +478,7 @@ public final class PluginClassLoader extends UrlClassLoader implements PluginAwa
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -242,7 +242,7 @@ class JdkCommandLineSetup(private val request: TargetEnvironmentRequest) {
|
||||
var dynamicMainClass = false
|
||||
|
||||
// copies agent .jar files to the beginning of the classpath to load agent classes faster
|
||||
if (vmParameters.isUrlClassloader()) {
|
||||
if (isUrlClassloader(vmParameters)) {
|
||||
if (request !is LocalTargetEnvironmentRequest) {
|
||||
throw CantRunException(LangCoreBundle.message("error.message.cannot.run.application.with.urlclasspath.on.the.remote.target"))
|
||||
}
|
||||
@@ -480,7 +480,7 @@ class JdkCommandLineSetup(private val request: TargetEnvironmentRequest) {
|
||||
classpath.add(requestUploadIntoTarget(JavaLanguageRuntimeType.CLASS_PATH_VOLUME, it))
|
||||
}
|
||||
}
|
||||
if (vmParameters.isUrlClassloader()) {
|
||||
if (isUrlClassloader(vmParameters)) {
|
||||
if (request !is LocalTargetEnvironmentRequest) {
|
||||
throw CantRunException(LangCoreBundle.message("error.message.cannot.run.application.with.urlclasspath.on.the.remote.target"))
|
||||
}
|
||||
@@ -708,8 +708,8 @@ class JdkCommandLineSetup(private val request: TargetEnvironmentRequest) {
|
||||
return this.hasParameter("-cp") || this.hasParameter("-classpath") || this.hasParameter("--class-path")
|
||||
}
|
||||
|
||||
private fun ParametersList.isUrlClassloader(): Boolean {
|
||||
return UrlClassLoader::class.java.name == this.getPropertyValue("java.system.class.loader")
|
||||
private fun isUrlClassloader(parametersList: ParametersList): Boolean {
|
||||
return (parametersList.getPropertyValue("java.system.class.loader") ?: "").startsWith("com.intellij.util.lang.")
|
||||
}
|
||||
|
||||
private fun ParametersList.isExplicitModulePath(): Boolean {
|
||||
|
||||
@@ -13,14 +13,12 @@ import com.intellij.openapi.application.PathManager;
|
||||
import com.intellij.openapi.diagnostic.Logger;
|
||||
import com.intellij.openapi.project.Project;
|
||||
import com.intellij.openapi.util.ClearableLazyValue;
|
||||
import com.intellij.openapi.util.Pair;
|
||||
import com.intellij.openapi.util.io.FileUtil;
|
||||
import com.intellij.openapi.util.io.FileUtilRt;
|
||||
import com.intellij.openapi.util.objectTree.ThrowableInterner;
|
||||
import com.intellij.project.ProjectKt;
|
||||
import com.intellij.util.ReflectionUtil;
|
||||
import com.intellij.util.UriUtil;
|
||||
import com.intellij.util.containers.ContainerUtil;
|
||||
import com.intellij.util.containers.MultiMap;
|
||||
import com.intellij.util.io.URLUtil;
|
||||
import org.apache.velocity.runtime.ParserPool;
|
||||
@@ -49,12 +47,12 @@ class FileTemplatesLoader implements Disposable {
|
||||
private static final String DESCRIPTION_FILE_EXTENSION = "html";
|
||||
private static final String DESCRIPTION_EXTENSION_SUFFIX = "." + DESCRIPTION_FILE_EXTENSION;
|
||||
|
||||
private static final Map<String, String> MANAGER_TO_DIR = ContainerUtil.newHashMap(
|
||||
Pair.create(FileTemplateManager.DEFAULT_TEMPLATES_CATEGORY, ""),
|
||||
Pair.create(FileTemplateManager.INTERNAL_TEMPLATES_CATEGORY, "internal"),
|
||||
Pair.create(FileTemplateManager.INCLUDES_TEMPLATES_CATEGORY, "includes"),
|
||||
Pair.create(FileTemplateManager.CODE_TEMPLATES_CATEGORY, "code"),
|
||||
Pair.create(FileTemplateManager.J2EE_TEMPLATES_CATEGORY, "j2ee")
|
||||
private static final Map<String, String> MANAGER_TO_DIR = Map.of(
|
||||
FileTemplateManager.DEFAULT_TEMPLATES_CATEGORY, "",
|
||||
FileTemplateManager.INTERNAL_TEMPLATES_CATEGORY, "internal",
|
||||
FileTemplateManager.INCLUDES_TEMPLATES_CATEGORY, "includes",
|
||||
FileTemplateManager.CODE_TEMPLATES_CATEGORY, "code",
|
||||
FileTemplateManager.J2EE_TEMPLATES_CATEGORY, "j2ee"
|
||||
);
|
||||
|
||||
private final ClearableLazyValue<LoadedConfiguration> myManagers;
|
||||
@@ -176,10 +174,10 @@ class FileTemplatesLoader implements Disposable {
|
||||
private static @NotNull FileTemplateLoadResult loadDefaultTemplates(@NotNull List<String> prefixes) {
|
||||
FileTemplateLoadResult result = new FileTemplateLoadResult(new MultiMap<>());
|
||||
Set<URL> processedUrls = new HashSet<>();
|
||||
Set<ClassLoader> processedLoaders = new HashSet<>();
|
||||
Set<ClassLoader> processedLoaders = Collections.newSetFromMap(new IdentityHashMap<>());
|
||||
for (IdeaPluginDescriptorImpl plugin : PluginManagerCore.getPluginSet().enabledPlugins) {
|
||||
ClassLoader loader = plugin.getClassLoader();
|
||||
if (loader instanceof PluginAwareClassLoader && ((PluginAwareClassLoader)loader).getFiles().isEmpty() ||
|
||||
if (((loader instanceof PluginAwareClassLoader) && ((PluginAwareClassLoader)loader).getFiles().isEmpty()) ||
|
||||
!processedLoaders.add(loader)) {
|
||||
// test or development mode, when IDEA_CORE's loader contains all the classpath
|
||||
continue;
|
||||
|
||||
@@ -17,9 +17,8 @@ import com.intellij.openapi.util.JDOMUtil;
|
||||
import com.intellij.openapi.util.text.StringUtil;
|
||||
import com.intellij.openapi.util.text.Strings;
|
||||
import com.intellij.util.CollectConsumer;
|
||||
import com.intellij.util.ExceptionUtil;
|
||||
import com.intellij.util.ResourceUtil;
|
||||
import com.intellij.util.containers.ContainerUtil;
|
||||
import com.intellij.util.lang.UrlClassLoader;
|
||||
import kotlin.Pair;
|
||||
import org.jdom.Element;
|
||||
import org.jdom.JDOMException;
|
||||
@@ -33,9 +32,6 @@ import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.lang.invoke.MethodHandle;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.lang.invoke.MethodType;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
@@ -135,47 +131,16 @@ public final class SearchableOptionsRegistrarImpl extends SearchableOptionsRegis
|
||||
identifierTable = processor.getIdentifierTable();
|
||||
}
|
||||
|
||||
static void processSearchableOptions(@NotNull Predicate<? super String> fileNameFilter, @NotNull BiConsumer<? super String, ? super Element> consumer) {
|
||||
static void processSearchableOptions(@NotNull Predicate<String> fileNameFilter, @NotNull BiConsumer<String, Element> consumer) {
|
||||
Set<ClassLoader> visited = Collections.newSetFromMap(new IdentityHashMap<>());
|
||||
MethodType methodType = MethodType.methodType(void.class, String.class, Predicate.class, BiConsumer.class);
|
||||
MethodHandles.Lookup lookup = MethodHandles.lookup();
|
||||
|
||||
Map<Class<?>, MethodHandle> handleCache = new HashMap<>();
|
||||
|
||||
for (IdeaPluginDescriptor plugin : PluginManagerCore.getLoadedPlugins()) {
|
||||
ClassLoader classLoader = plugin.getClassLoader();
|
||||
if (!visited.add(classLoader)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
MethodHandle methodHandle;
|
||||
Class<?> loaderClass = classLoader.getClass();
|
||||
if (loaderClass.isAnonymousClass() || loaderClass.isMemberClass()) {
|
||||
loaderClass = loaderClass.getSuperclass();
|
||||
}
|
||||
|
||||
try {
|
||||
methodHandle = handleCache.computeIfAbsent(loaderClass, aClass -> {
|
||||
try {
|
||||
return lookup.findVirtual(aClass, "processResources", methodType);
|
||||
}
|
||||
catch (NoSuchMethodException | IllegalAccessException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (RuntimeException e) {
|
||||
if (e.getCause() instanceof NoSuchMethodException) {
|
||||
LOG.error(loaderClass + " is not supported", e);
|
||||
}
|
||||
else {
|
||||
LOG.error(e);
|
||||
}
|
||||
for (IdeaPluginDescriptor plugin : PluginManagerCore.getPluginSet().getRawListOfEnabledModules()) {
|
||||
ClassLoader classLoader = plugin.getPluginClassLoader();
|
||||
if (!(classLoader instanceof UrlClassLoader) || !visited.add(classLoader)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
methodHandle.invoke(classLoader, "search", fileNameFilter, (BiConsumer<String, InputStream>)(name, stream) -> {
|
||||
((UrlClassLoader)classLoader).processResources("search", fileNameFilter, (name, stream) -> {
|
||||
try {
|
||||
consumer.accept(name, JDOMUtil.load(stream));
|
||||
}
|
||||
@@ -184,8 +149,8 @@ public final class SearchableOptionsRegistrarImpl extends SearchableOptionsRegis
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (Throwable throwable) {
|
||||
ExceptionUtil.rethrow(throwable);
|
||||
catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -242,9 +207,10 @@ public final class SearchableOptionsRegistrarImpl extends SearchableOptionsRegis
|
||||
@Nullable Collection<Configurable> configurables,
|
||||
@NotNull String option,
|
||||
@Nullable Project project) {
|
||||
if (ContainerUtil.isEmpty(configurables)) {
|
||||
if (configurables == null || configurables.isEmpty()) {
|
||||
configurables = null;
|
||||
}
|
||||
|
||||
Collection<Configurable> effectiveConfigurables;
|
||||
if (configurables == null) {
|
||||
effectiveConfigurables = new LinkedHashSet<>();
|
||||
@@ -257,15 +223,17 @@ public final class SearchableOptionsRegistrarImpl extends SearchableOptionsRegis
|
||||
effectiveConfigurables = configurables;
|
||||
}
|
||||
|
||||
String optionToCheck = StringUtil.toLowerCase(option.trim());
|
||||
String optionToCheck = Strings.toLowerCase(option.trim());
|
||||
Set<String> options = getProcessedWordsWithoutStemming(optionToCheck);
|
||||
|
||||
Set<Configurable> nameHits = new LinkedHashSet<>();
|
||||
Set<Configurable> nameFullHits = new LinkedHashSet<>();
|
||||
|
||||
for (Configurable each : effectiveConfigurables) {
|
||||
if (each.getDisplayName() == null) continue;
|
||||
final String displayName = StringUtil.toLowerCase(each.getDisplayName());
|
||||
if (each.getDisplayName() == null) {
|
||||
continue;
|
||||
}
|
||||
final String displayName = Strings.toLowerCase(each.getDisplayName());
|
||||
final List<String> allWords = StringUtil.getWordsIn(displayName);
|
||||
if (displayName.contains(optionToCheck)) {
|
||||
nameFullHits.add(each);
|
||||
|
||||
@@ -108,8 +108,6 @@ public final class StartupUtil {
|
||||
StartUpMeasurer.addTimings(startupTimings, "bootstrap");
|
||||
startupStart = StartUpMeasurer.startActivity("app initialization preparation");
|
||||
|
||||
Main.setFlags(args);
|
||||
|
||||
CommandLineArgs.parse(args);
|
||||
|
||||
LoadingState.setStrictMode();
|
||||
@@ -218,11 +216,11 @@ public final class StartupUtil {
|
||||
activity.end();
|
||||
|
||||
// plugins cannot be loaded when config import is needed, because plugins may be added after importing
|
||||
Java11Shim.INSTANCE = new Java11ShimImpl();
|
||||
if (!configImportNeeded) {
|
||||
ZipFilePool.POOL = new ZipFilePoolImpl();
|
||||
PluginManagerCore.scheduleDescriptorLoading();
|
||||
}
|
||||
Java11Shim.INSTANCE = new Java11ShimImpl();
|
||||
|
||||
forkJoinPool.execute(() -> {
|
||||
setupSystemLibraries();
|
||||
@@ -565,7 +563,7 @@ public final class StartupUtil {
|
||||
if (document != null) {
|
||||
Agreements.showEndUserAndDataSharingAgreements(document);
|
||||
}
|
||||
else if (AppUIUtil.needToShowUsageStatsConsent()){
|
||||
else if (AppUIUtil.needToShowUsageStatsConsent()) {
|
||||
Agreements.showDataSharingAgreement();
|
||||
}
|
||||
activity.end();
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
package com.intellij.idea;
|
||||
|
||||
import com.intellij.util.lang.ClassLoadingLocks;
|
||||
import com.intellij.util.lang.ImmutableZipEntry;
|
||||
import com.intellij.util.lang.ImmutableZipFile;
|
||||
import com.intellij.util.lang.ZipFile;
|
||||
import com.intellij.util.lang.ZipFilePool;
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
@@ -21,7 +21,7 @@ public final class ZipFilePoolImpl extends ZipFilePool {
|
||||
private final ClassLoadingLocks<Path> lock = new ClassLoadingLocks<>();
|
||||
|
||||
@Override
|
||||
public @NotNull ImmutableZipFile loadZipFile(@NotNull Path file) throws IOException {
|
||||
public @NotNull ZipFile loadZipFile(@NotNull Path file) throws IOException {
|
||||
MyEntryResolver resolver = pool.get(file);
|
||||
if (resolver == null) {
|
||||
// doesn't make sense to use pool for requests from class loader (requested only once per class loader)
|
||||
@@ -39,7 +39,7 @@ public final class ZipFilePoolImpl extends ZipFilePool {
|
||||
synchronized (lock.getOrCreateLock(file)) {
|
||||
resolver = pool.get(file);
|
||||
if (resolver == null) {
|
||||
ImmutableZipFile zipFile = ImmutableZipFile.load(file);
|
||||
ZipFile zipFile = ImmutableZipFile.load(file);
|
||||
resolver = new MyEntryResolver(zipFile);
|
||||
pool.put(file, resolver);
|
||||
}
|
||||
@@ -49,16 +49,15 @@ public final class ZipFilePoolImpl extends ZipFilePool {
|
||||
}
|
||||
|
||||
private static final class MyEntryResolver implements ZipFilePool.EntryResolver {
|
||||
private final ImmutableZipFile zipFile;
|
||||
private final ZipFile zipFile;
|
||||
|
||||
MyEntryResolver(ImmutableZipFile zipFile) {
|
||||
MyEntryResolver(ZipFile zipFile) {
|
||||
this.zipFile = zipFile;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable InputStream loadZipEntry(@NotNull String path) throws IOException {
|
||||
ImmutableZipEntry entry = zipFile.getEntry(path.charAt(0) == '/' ? path.substring(1) : path);
|
||||
return entry == null ? null : entry.getInputStream(zipFile);
|
||||
return zipFile.getInputStream(path.charAt(0) == '/' ? path.substring(1) : path);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -67,5 +67,6 @@
|
||||
<orderEntry type="module" module-name="intellij.platform.ide.util.netty" scope="TEST" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit5" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.zip" scope="TEST" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt.java8" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -9,8 +9,8 @@ import com.intellij.openapi.util.BuildNumber
|
||||
import com.intellij.testFramework.assertions.Assertions.assertThat
|
||||
import com.intellij.testFramework.assertions.Assertions.assertThatThrownBy
|
||||
import com.intellij.testFramework.rules.InMemoryFsRule
|
||||
import com.intellij.util.io.Murmur3_32Hash
|
||||
import com.intellij.util.io.directoryStreamIfExists
|
||||
import org.jetbrains.xxh3.Xxh3
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.TestName
|
||||
@@ -111,7 +111,8 @@ internal class ClassLoaderConfiguratorTest {
|
||||
val rootDir = inMemoryFs.fs.getPath("/")
|
||||
|
||||
// toUnsignedLong - avoid `-` symbol
|
||||
val pluginIdSuffix = Integer.toUnsignedLong(Murmur3_32Hash.MURMUR3_32.hashString(javaClass.name + name.methodName)).toString(36)
|
||||
val pluginIdSuffix = Integer.toUnsignedLong(
|
||||
Xxh3.hash32(javaClass.name + name.methodName)).toString(36)
|
||||
val dependencyId = "p_dependency_$pluginIdSuffix"
|
||||
plugin(rootDir, """
|
||||
<idea-plugin package="com.bar">
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.search;
|
||||
|
||||
import com.intellij.ide.fileTemplates.impl.AllFileTemplatesConfigurable;
|
||||
import com.intellij.ide.ui.search.ConfigurableHit;
|
||||
import com.intellij.ide.ui.search.SearchableOptionsRegistrar;
|
||||
import com.intellij.openapi.options.Configurable;
|
||||
import com.intellij.openapi.options.ConfigurableGroup;
|
||||
import com.intellij.openapi.options.ex.ConfigurableExtensionPointUtil;
|
||||
import com.intellij.testFramework.LightPlatformTestCase;
|
||||
|
||||
import javax.swing.event.DocumentEvent;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public class SearchableOptionsTest extends LightPlatformTestCase {
|
||||
public void testFindCodeTemplates() {
|
||||
List<ConfigurableGroup> groups = Collections.singletonList(ConfigurableExtensionPointUtil.getConfigurableGroup(getProject(), false));
|
||||
ConfigurableHit configurables = SearchableOptionsRegistrar.getInstance().getConfigurables(groups, DocumentEvent.EventType.INSERT, null, "method", getProject());
|
||||
Set<Configurable> configurableSet = configurables.getAll();
|
||||
for (Configurable configurable : configurableSet) {
|
||||
if (configurable.getDisplayName().equals(new AllFileTemplatesConfigurable(getProject()).getDisplayName())) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
fail("File Templates are not found");
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.execution.process.impl;
|
||||
|
||||
import com.intellij.execution.ExecutionException;
|
||||
@@ -22,7 +22,10 @@ import org.jetbrains.annotations.NonNls;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.io.*;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
@@ -390,7 +393,7 @@ public final class ProcessListUtil {
|
||||
try {
|
||||
return PathManager.findBinFileWithException(WIN_PROCESS_LIST_HELPER_FILENAME);
|
||||
}
|
||||
catch (FileNotFoundException e) {
|
||||
catch (RuntimeException e) {
|
||||
LOG.error(e);
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt" />
|
||||
<orderEntry type="library" name="jetbrains-annotations" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt.java8" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -1,7 +1,6 @@
|
||||
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util;
|
||||
|
||||
import com.intellij.openapi.diagnostic.LoggerRt;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
@@ -35,7 +34,8 @@ public final class UrlUtilRt {
|
||||
return url;
|
||||
}
|
||||
catch (MalformedURLException e) {
|
||||
LoggerRt.getInstance(UrlUtilRt.class).error(e);
|
||||
//noinspection CallToPrintStackTrace
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import com.intellij.util.lang.Loader.Attribute;
|
||||
import com.intellij.util.lang.JarLoader.Attribute;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.nio.file.Path;
|
||||
@@ -9,7 +9,7 @@ import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
final class CachePoolImpl implements UrlClassLoader.CachePool {
|
||||
final Map<Path, ClasspathCache.LoaderData> loaderIndexCache = new ConcurrentHashMap<>();
|
||||
final Map<Path, ClasspathCache.IndexRegistrar> loaderIndexCache = new ConcurrentHashMap<>();
|
||||
private final Map<Path, Map<Attribute, String>> manifestData = new ConcurrentHashMap<>();
|
||||
|
||||
Map<Attribute, String> getManifestData(@NotNull Path file) {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import com.intellij.openapi.diagnostic.LoggerRt;
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
@@ -16,7 +15,6 @@ import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.security.ProtectionDomain;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@@ -25,11 +23,10 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.jar.Attributes;
|
||||
|
||||
@ApiStatus.Internal
|
||||
public final class ClassPath {
|
||||
static final String CLASS_EXTENSION = ".class";
|
||||
|
||||
public static final String CLASSPATH_JAR_FILE_NAME_PREFIX = "classpath";
|
||||
|
||||
// record loaded class name and source path
|
||||
@@ -55,7 +52,6 @@ public final class ClassPath {
|
||||
private final AtomicInteger lastLoaderProcessed = new AtomicInteger();
|
||||
private final Map<Path, Loader> loaderMap = new HashMap<>();
|
||||
private final ClasspathCache cache = new ClasspathCache();
|
||||
private final Set<Path> filesWithProtectionDomain;
|
||||
|
||||
// true implies that the .jar file will not be modified in the lifetime of the JarLoader
|
||||
final boolean lockJars;
|
||||
@@ -63,7 +59,6 @@ public final class ClassPath {
|
||||
final boolean isClassPathIndexEnabled;
|
||||
private final @Nullable CachePoolImpl cachePool;
|
||||
private final @Nullable Predicate<? super Path> cachingCondition;
|
||||
final boolean errorOnMissingJar;
|
||||
static {
|
||||
// insertion order must be preserved
|
||||
loadedClasses = recordLoadingInfo ? new ConcurrentLinkedQueue<>() : null;
|
||||
@@ -79,11 +74,11 @@ public final class ClassPath {
|
||||
}
|
||||
|
||||
interface ClassDataConsumer {
|
||||
boolean isByteBufferSupported(String name, @Nullable ProtectionDomain protectionDomain);
|
||||
boolean isByteBufferSupported(String name);
|
||||
|
||||
Class<?> consumeClassData(String name, byte[] data, Loader loader, @Nullable ProtectionDomain protectionDomain) throws IOException;
|
||||
Class<?> consumeClassData(String name, byte[] data, Loader loader) throws IOException;
|
||||
|
||||
Class<?> consumeClassData(String name, ByteBuffer data, Loader loader, @Nullable ProtectionDomain protectionDomain) throws IOException;
|
||||
Class<?> consumeClassData(String name, ByteBuffer data, Loader loader) throws IOException;
|
||||
}
|
||||
|
||||
public @Nullable Function<Path, ResourceFile> getResourceFileFactory() {
|
||||
@@ -91,7 +86,6 @@ public final class ClassPath {
|
||||
}
|
||||
|
||||
public ClassPath(@NotNull List<Path> files,
|
||||
@NotNull Set<Path> filesWithProtectionDomain,
|
||||
@NotNull UrlClassLoader.Builder configuration,
|
||||
@Nullable Function<Path, ResourceFile> resourceFileFactory,
|
||||
boolean mimicJarUrlConnection) {
|
||||
@@ -100,8 +94,6 @@ public final class ClassPath {
|
||||
cachePool = configuration.cachePool;
|
||||
cachingCondition = configuration.cachingCondition;
|
||||
isClassPathIndexEnabled = configuration.isClassPathIndexEnabled;
|
||||
errorOnMissingJar = configuration.errorOnMissingJar;
|
||||
this.filesWithProtectionDomain = filesWithProtectionDomain;
|
||||
this.mimicJarUrlConnection = mimicJarUrlConnection;
|
||||
|
||||
this.files = new ArrayList<>(files.size());
|
||||
@@ -168,13 +160,15 @@ public final class ClassPath {
|
||||
allUrlsWereProcessed = false;
|
||||
}
|
||||
|
||||
public @Nullable Class<?> findClass(@NotNull String className, @NotNull ClassDataConsumer classDataConsumer) throws IOException {
|
||||
public @Nullable Class<?> findClass(String className,
|
||||
String fileName,
|
||||
long packageNameHash,
|
||||
ClassDataConsumer classDataConsumer) throws IOException {
|
||||
long start = classLoading.startTiming();
|
||||
try {
|
||||
String fileName = className.replace('.', '/') + CLASS_EXTENSION;
|
||||
int i;
|
||||
if (useCache) {
|
||||
Loader[] loaders = cache.getClassLoadersByName(fileName);
|
||||
Loader[] loaders = cache.getClassLoadersByPackageNameHash(packageNameHash);
|
||||
if (loaders != null) {
|
||||
for (Loader loader : loaders) {
|
||||
if (loader.containsName(fileName)) {
|
||||
@@ -224,7 +218,7 @@ public final class ClassPath {
|
||||
return null;
|
||||
}
|
||||
if (loadedClasses != null) {
|
||||
loadedClasses.add(new AbstractMap.SimpleImmutableEntry<>(fileName, loader.path));
|
||||
loadedClasses.add(new AbstractMap.SimpleImmutableEntry<>(fileName, loader.getPath()));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -241,7 +235,7 @@ public final class ClassPath {
|
||||
Resource resource = loader.getResource(resourceName);
|
||||
if (resource != null) {
|
||||
if (loadedClasses != null) {
|
||||
loadedClasses.add(new AbstractMap.SimpleImmutableEntry<>(resourceName, loader.path));
|
||||
loadedClasses.add(new AbstractMap.SimpleImmutableEntry<>(resourceName, loader.getPath()));
|
||||
}
|
||||
return resource;
|
||||
}
|
||||
@@ -268,7 +262,7 @@ public final class ClassPath {
|
||||
Resource resource = loader.getResource(resourceName);
|
||||
if (resource != null) {
|
||||
if (loadedClasses != null) {
|
||||
loadedClasses.add(new AbstractMap.SimpleImmutableEntry<>(resourceName, loader.path));
|
||||
loadedClasses.add(new AbstractMap.SimpleImmutableEntry<>(resourceName, loader.getPath()));
|
||||
}
|
||||
return resource;
|
||||
}
|
||||
@@ -299,7 +293,7 @@ public final class ClassPath {
|
||||
@NotNull BiConsumer<? super String, ? super InputStream> consumer) throws IOException {
|
||||
if (useCache && allUrlsWereProcessed) {
|
||||
// getLoadersByName compute package name by name, so, add ending slash
|
||||
Loader[] loaders = cache.getLoadersByName(dir + '/');
|
||||
Loader[] loaders = cache.getLoadersByResourcePackageDir(dir);
|
||||
if (loaders != null) {
|
||||
for (Loader loader : loaders) {
|
||||
loader.processResources(dir, fileNameFilter, consumer);
|
||||
@@ -338,9 +332,10 @@ public final class ClassPath {
|
||||
try {
|
||||
Loader loader = createLoader(path);
|
||||
if (loader != null) {
|
||||
if (useCache) {
|
||||
initLoaderCache(path, loader);
|
||||
if (useCache && files.isEmpty()) {
|
||||
allUrlsWereProcessed = true;
|
||||
}
|
||||
|
||||
loaders.add(loader);
|
||||
// volatile write
|
||||
loaderMap.put(path, loader);
|
||||
@@ -348,7 +343,8 @@ public final class ClassPath {
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
LoggerRt.getInstance(ClassPath.class).info("path: " + path, e);
|
||||
//noinspection CallToPrintStackTrace
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -358,7 +354,7 @@ public final class ClassPath {
|
||||
public @NotNull List<Path> getBaseUrls() {
|
||||
List<Path> result = new ArrayList<>();
|
||||
for (Loader loader : loaders) {
|
||||
result.add(loader.path);
|
||||
result.add(loader.getPath());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -373,76 +369,70 @@ public final class ClassPath {
|
||||
}
|
||||
|
||||
if (fileAttributes.isDirectory()) {
|
||||
return new FileLoader(file, isClassPathIndexEnabled);
|
||||
return useCache ? createCachingFileLoader(file) : new FileLoader(file, null, null, isClassPathIndexEnabled);
|
||||
}
|
||||
else if (!fileAttributes.isRegularFile()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
JarLoader loader;
|
||||
if (filesWithProtectionDomain.contains(file)) {
|
||||
loader = new SecureJarLoader(file, this);
|
||||
}
|
||||
else {
|
||||
ResourceFile zipFile;
|
||||
if (resourceFileFactory == null) {
|
||||
zipFile = new JdkZipResourceFile(file, lockJars, false);
|
||||
ResourceFile zipFile = resourceFileFactory == null ? new JdkZipResourceFile(file, lockJars) : resourceFileFactory.apply(file);
|
||||
JarLoader loader = new JarLoader(file, this, zipFile);
|
||||
if (useCache) {
|
||||
ClasspathCache.IndexRegistrar data = cachePool == null ? null : cachePool.loaderIndexCache.get(file);
|
||||
if (data == null) {
|
||||
data = zipFile.buildClassPathCacheData();
|
||||
if (cachePool != null && cachingCondition != null && cachingCondition.test(file)) {
|
||||
cachePool.loaderIndexCache.put(file, data);
|
||||
}
|
||||
}
|
||||
else {
|
||||
zipFile = resourceFileFactory.apply(file);
|
||||
}
|
||||
loader = new JarLoader(file, this, zipFile);
|
||||
cache.applyLoaderData(data, loader);
|
||||
}
|
||||
|
||||
String filePath = file.toString();
|
||||
if (filePath.startsWith(CLASSPATH_JAR_FILE_NAME_PREFIX, filePath.lastIndexOf(File.separatorChar) + 1)) {
|
||||
String[] referencedJars = loadManifestClasspath(loader);
|
||||
if (referencedJars != null) {
|
||||
long startReferenced = logLoadingInfo ? System.nanoTime() : 0;
|
||||
List<Path> urls = new ArrayList<>(referencedJars.length);
|
||||
for (String referencedJar : referencedJars) {
|
||||
try {
|
||||
urls.add(Paths.get(UrlClassLoader.urlToFilePath(referencedJar)));
|
||||
}
|
||||
catch (Exception e) {
|
||||
LoggerRt.getInstance(ClassPath.class).warn("file: " + file + " / " + referencedJar, e);
|
||||
}
|
||||
}
|
||||
addFiles(urls);
|
||||
if (logLoadingInfo) {
|
||||
//noinspection UseOfSystemOutOrSystemErr
|
||||
System.out.println("Loaded all " + referencedJars.length + " files " + (System.nanoTime() - startReferenced) / 1000000 + "ms");
|
||||
}
|
||||
}
|
||||
addFromManifestClassPathIfNeeded(file, zipFile, loader);
|
||||
}
|
||||
return loader;
|
||||
}
|
||||
|
||||
private void initLoaderCache(@NotNull Path file, @NotNull Loader loader) throws IOException {
|
||||
ClasspathCache.IndexRegistrar data = cachePool == null ? null : cachePool.loaderIndexCache.get(file);
|
||||
if (data == null) {
|
||||
data = loader.buildData();
|
||||
if (cachePool != null && cachingCondition != null && cachingCondition.test(file)) {
|
||||
ClasspathCache.LoaderData loaderData =
|
||||
data instanceof ClasspathCache.LoaderData ? (ClasspathCache.LoaderData)data : ((ClasspathCache.LoaderDataBuilder)data).build();
|
||||
cachePool.loaderIndexCache.put(file, loaderData);
|
||||
data = loaderData;
|
||||
private void addFromManifestClassPathIfNeeded(@NotNull Path file, ResourceFile zipFile, JarLoader loader) {
|
||||
String[] referencedJars = loadManifestClasspath(loader, zipFile);
|
||||
if (referencedJars != null) {
|
||||
long startReferenced = logLoadingInfo ? System.nanoTime() : 0;
|
||||
List<Path> urls = new ArrayList<>(referencedJars.length);
|
||||
for (String referencedJar : referencedJars) {
|
||||
try {
|
||||
urls.add(Paths.get(UrlClassLoader.urlToFilePath(referencedJar)));
|
||||
}
|
||||
catch (Exception e) {
|
||||
//noinspection UseOfSystemOutOrSystemErr
|
||||
System.err.println("file: " + file + " / " + referencedJar + " " + e);
|
||||
}
|
||||
}
|
||||
addFiles(urls);
|
||||
if (logLoadingInfo) {
|
||||
//noinspection UseOfSystemOutOrSystemErr
|
||||
System.out.println("Loaded all " + referencedJars.length + " files " + (System.nanoTime() - startReferenced) / 1000000 + "ms");
|
||||
}
|
||||
}
|
||||
cache.applyLoaderData(data, loader);
|
||||
}
|
||||
|
||||
if (files.isEmpty()) {
|
||||
allUrlsWereProcessed = true;
|
||||
private @NotNull FileLoader createCachingFileLoader(@NotNull Path file) {
|
||||
ClasspathCache.IndexRegistrar data = cachePool == null ? null : cachePool.loaderIndexCache.get(file);
|
||||
BiConsumer<ClasspathCache.IndexRegistrar, Loader> consumer;
|
||||
if (data == null) {
|
||||
consumer = (registrar, loader) -> {
|
||||
if (cachePool != null && cachingCondition != null && cachingCondition.test(file)) {
|
||||
cachePool.loaderIndexCache.put(file, registrar);
|
||||
}
|
||||
cache.applyLoaderData(registrar, loader);
|
||||
};
|
||||
return new FileLoader(file, null, consumer, isClassPathIndexEnabled);
|
||||
}
|
||||
}
|
||||
|
||||
Map<Loader.Attribute, String> getManifestData(@NotNull Path file) {
|
||||
return useCache && cachePool != null ? cachePool.getManifestData(file) : null;
|
||||
}
|
||||
|
||||
void cacheManifestData(@NotNull Path file, @NotNull Map<Loader.Attribute, String> manifestAttributes) {
|
||||
if (useCache && cachePool != null && cachingCondition != null && cachingCondition.test(file)) {
|
||||
cachePool.cacheManifestData(file, manifestAttributes);
|
||||
else {
|
||||
FileLoader loader = new FileLoader(file, data.getNameFilter(), null, isClassPathIndexEnabled);
|
||||
cache.applyLoaderData(data, loader);
|
||||
return loader;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -554,9 +544,17 @@ public final class ClassPath {
|
||||
}
|
||||
}
|
||||
|
||||
private static String @Nullable [] loadManifestClasspath(@NotNull JarLoader loader) {
|
||||
private String @Nullable [] loadManifestClasspath(@NotNull JarLoader loader, @NotNull ResourceFile zipFile) {
|
||||
try {
|
||||
String classPath = loader.getClassPathManifestAttribute();
|
||||
Map<JarLoader.Attribute, String> result = useCache && cachePool != null ? cachePool.getManifestData(loader.getPath()) : null;
|
||||
if (result == null) {
|
||||
Attributes manifestAttributes = zipFile.loadManifestAttributes();
|
||||
result = manifestAttributes == null ? Collections.emptyMap() : JarLoader.getAttributes(manifestAttributes);
|
||||
if (useCache && cachePool != null && cachingCondition != null && cachingCondition.test(loader.getPath())) {
|
||||
cachePool.cacheManifestData(loader.getPath(), result);
|
||||
}
|
||||
}
|
||||
String classPath = result.get(JarLoader.Attribute.CLASS_PATH);
|
||||
if (classPath != null) {
|
||||
String[] urls = classPath.split(" ");
|
||||
if (urls.length > 0 && urls[0].startsWith("file:")) {
|
||||
@@ -579,18 +577,15 @@ public final class ClassPath {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isByteBufferSupported(String name, @Nullable ProtectionDomain protectionDomain) {
|
||||
return classDataConsumer.isByteBufferSupported(name, protectionDomain);
|
||||
public boolean isByteBufferSupported(String name) {
|
||||
return classDataConsumer.isByteBufferSupported(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> consumeClassData(String name,
|
||||
byte[] data,
|
||||
Loader loader,
|
||||
@Nullable ProtectionDomain protectionDomain) throws IOException {
|
||||
public Class<?> consumeClassData(String name, byte[] data, Loader loader) throws IOException {
|
||||
long start = startTiming();
|
||||
try {
|
||||
return classDataConsumer.consumeClassData(name, data, loader, protectionDomain);
|
||||
return classDataConsumer.consumeClassData(name, data, loader);
|
||||
}
|
||||
finally {
|
||||
record(start);
|
||||
@@ -598,13 +593,10 @@ public final class ClassPath {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> consumeClassData(String name,
|
||||
ByteBuffer data,
|
||||
Loader loader,
|
||||
@Nullable ProtectionDomain protectionDomain) throws IOException {
|
||||
public Class<?> consumeClassData(String name, ByteBuffer data, Loader loader) throws IOException {
|
||||
long start = startTiming();
|
||||
try {
|
||||
return classDataConsumer.consumeClassData(name, data, loader, protectionDomain);
|
||||
return classDataConsumer.consumeClassData(name, data, loader);
|
||||
}
|
||||
finally {
|
||||
record(start);
|
||||
@@ -657,10 +649,10 @@ public final class ClassPath {
|
||||
long totalTime = timeCounter.addAndGet(time);
|
||||
int totalRequests = requestCounter.incrementAndGet();
|
||||
if (logLoadingInfo) {
|
||||
if (time > 3000000L) {
|
||||
if (time > 3_000_000L) {
|
||||
System.out.println(TimeUnit.NANOSECONDS.toMillis(time) + " ms for " + resourceName);
|
||||
}
|
||||
if (totalRequests % 10000 == 0) {
|
||||
if (totalRequests % 10_000 == 0) {
|
||||
System.out.println(ClassPath.class.getClassLoader() + ", requests: " + totalRequests +
|
||||
", time:" + TimeUnit.NANOSECONDS.toMillis(totalTime) + "ms");
|
||||
}
|
||||
|
||||
@@ -1,172 +1,165 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import com.intellij.util.BloomFilterBase;
|
||||
import com.intellij.util.containers.IntObjectHashMap;
|
||||
import com.intellij.util.io.Murmur3_32Hash;
|
||||
import com.intellij.util.lang.fastutil.StrippedIntOpenHashSet;
|
||||
import com.intellij.util.lang.fastutil.StrippedLongOpenHashSet;
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.jetbrains.xxh3.Xx3UnencodedString;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.IntBuffer;
|
||||
import java.util.function.IntFunction;
|
||||
import java.util.function.LongFunction;
|
||||
import java.util.function.LongPredicate;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
@ApiStatus.Internal
|
||||
public final class ClasspathCache {
|
||||
private static final double PROBABILITY = 0.005d;
|
||||
private static final IntObjectHashMap.ArrayProducer<Loader[][]> ARRAY_FACTORY = size -> new Loader[size][];
|
||||
public static final String CLASS_EXTENSION = ".class";
|
||||
|
||||
private volatile IntObjectHashMap<Loader[]> classPackageCache = new IntObjectHashMap<>(ARRAY_FACTORY);
|
||||
private volatile IntObjectHashMap<Loader[]> resourcePackageCache = new IntObjectHashMap<>(ARRAY_FACTORY);
|
||||
private static final IntFunction<Loader[][]> ARRAY_FACTORY = size -> new Loader[size][];
|
||||
|
||||
private StrippedLongToObjectMap<Loader[]> classPackageCache;
|
||||
private StrippedLongToObjectMap<Loader[]> resourcePackageCache;
|
||||
|
||||
private static final LongFunction<Loader[]> NULL = value -> null;
|
||||
private volatile LongFunction<Loader[]> classPackageCacheGetter = NULL;
|
||||
private volatile LongFunction<Loader[]> resourcePackageCacheGetter = NULL;
|
||||
|
||||
public interface IndexRegistrar {
|
||||
void registerPackageIndex(IntObjectHashMap<Loader[]> classMap, IntObjectHashMap<Loader[]> resourceMap, Loader loader);
|
||||
}
|
||||
|
||||
public static final class LoaderData implements IndexRegistrar {
|
||||
private final int[] resourcePackageHashes;
|
||||
private final int[] classPackageHashes;
|
||||
private final NameFilter nameFilter;
|
||||
|
||||
LoaderData(int[] resourcePackageHashes, int[] classPackageHashes, NameFilter nameFilter) {
|
||||
this.resourcePackageHashes = resourcePackageHashes;
|
||||
this.classPackageHashes = classPackageHashes;
|
||||
this.nameFilter = nameFilter;
|
||||
default Predicate<String> getNameFilter() {
|
||||
return null;
|
||||
}
|
||||
|
||||
int sizeInBytes() {
|
||||
return Integer.BYTES * 2 +
|
||||
classPackageHashes.length * Integer.BYTES +
|
||||
resourcePackageHashes.length * Integer.BYTES +
|
||||
nameFilter.sizeInBytes();
|
||||
}
|
||||
int classPackageCount();
|
||||
|
||||
void save(@NotNull ByteBuffer buffer) throws IOException {
|
||||
buffer.putInt(classPackageHashes.length);
|
||||
buffer.putInt(resourcePackageHashes.length);
|
||||
IntBuffer intBuffer = buffer.asIntBuffer();
|
||||
intBuffer.put(classPackageHashes);
|
||||
intBuffer.put(resourcePackageHashes);
|
||||
buffer.position(buffer.position() + intBuffer.position() * Integer.BYTES);
|
||||
nameFilter.save(buffer);
|
||||
}
|
||||
int resourcePackageCount();
|
||||
|
||||
@Override
|
||||
public void registerPackageIndex(IntObjectHashMap<Loader[]> classMap, IntObjectHashMap<Loader[]> resourceMap, Loader loader) {
|
||||
addResourceEntries(classPackageHashes, classMap, loader);
|
||||
addResourceEntries(resourcePackageHashes, resourceMap, loader);
|
||||
long[] classPackages();
|
||||
|
||||
loader.setNameFilter(nameFilter);
|
||||
long[] resourcePackages();
|
||||
|
||||
default @Nullable LongPredicate getKeyFilter(boolean forClasses) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
static final class LoaderDataBuilder implements IndexRegistrar {
|
||||
private final StrippedLongOpenHashSet usedNameFingerprints;
|
||||
private final StrippedIntOpenHashSet resourcePackageHashes = new StrippedIntOpenHashSet();
|
||||
private final StrippedIntOpenHashSet classPackageHashes = new StrippedIntOpenHashSet();
|
||||
final StrippedLongSet classPackageHashes = new StrippedLongSet();
|
||||
final StrippedLongSet resourcePackageHashes = new StrippedLongSet();
|
||||
|
||||
LoaderDataBuilder(boolean isNameFilterRequired) {
|
||||
usedNameFingerprints = isNameFilterRequired ? new StrippedLongOpenHashSet() : null;
|
||||
@Override
|
||||
public int classPackageCount() {
|
||||
return classPackageHashes.size();
|
||||
}
|
||||
|
||||
void andClassName(@NotNull String name) {
|
||||
usedNameFingerprints.add(NameFilter.toNameFingerprint(name, name.length()));
|
||||
@Override
|
||||
public int resourcePackageCount() {
|
||||
return resourcePackageHashes.size();
|
||||
}
|
||||
|
||||
void addResourceName(@NotNull String name, int end) {
|
||||
usedNameFingerprints.add(NameFilter.toNameFingerprint(name, end));
|
||||
@Override
|
||||
public long[] classPackages() {
|
||||
return classPackageHashes.keys;
|
||||
}
|
||||
|
||||
void addResourcePackageFromName(@NotNull String path) {
|
||||
resourcePackageHashes.add(getPackageNameHash(path, path.lastIndexOf('/')));
|
||||
@Override
|
||||
public long[] resourcePackages() {
|
||||
return resourcePackageHashes.keys;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongPredicate getKeyFilter(boolean forClasses) {
|
||||
return new LongPredicate() {
|
||||
boolean addZero = forClasses ? classPackageHashes.hasNull() : resourcePackageHashes.hasNull();
|
||||
|
||||
@Override
|
||||
public boolean test(long it) {
|
||||
if (it == 0) {
|
||||
if (!addZero) {
|
||||
return false;
|
||||
}
|
||||
|
||||
addZero = false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
void addResourcePackage(@NotNull String path) {
|
||||
resourcePackageHashes.add(getPackageNameHash(path, path.length()));
|
||||
}
|
||||
|
||||
void addClassPackageFromName(@NotNull String path) {
|
||||
classPackageHashes.add(getPackageNameHash(path, path.lastIndexOf('/')));
|
||||
void addPackageFromName(@NotNull String path) {
|
||||
StrippedLongSet set = path.endsWith(CLASS_EXTENSION) ? classPackageHashes : resourcePackageHashes;
|
||||
set.add(getPackageNameHash(path, path.lastIndexOf('/')));
|
||||
}
|
||||
|
||||
void addClassPackage(@NotNull String path) {
|
||||
classPackageHashes.add(getPackageNameHash(path, path.length()));
|
||||
}
|
||||
|
||||
@NotNull LoaderData build() {
|
||||
return new ClasspathCache.LoaderData(resourcePackageHashes.toArray(), classPackageHashes.toArray(), createNameFilter());
|
||||
}
|
||||
|
||||
private @NotNull NameFilter createNameFilter() {
|
||||
NameFilter nameFilter = new NameFilter(usedNameFingerprints.size(), PROBABILITY);
|
||||
StrippedLongOpenHashSet.SetIterator iterator = usedNameFingerprints.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
nameFilter.addNameFingerprint(iterator.nextLong());
|
||||
}
|
||||
return nameFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void registerPackageIndex(IntObjectHashMap<Loader[]> classMap, IntObjectHashMap<Loader[]> resourceMap, Loader loader) {
|
||||
StrippedIntOpenHashSet.SetIterator classIterator = classPackageHashes.iterator();
|
||||
while (classIterator.hasNext()) {
|
||||
addResourceEntry(classIterator.nextInt(), classMap, loader);
|
||||
}
|
||||
|
||||
StrippedIntOpenHashSet.SetIterator resourceIterator = resourcePackageHashes.iterator();
|
||||
while (resourceIterator.hasNext()) {
|
||||
addResourceEntry(resourceIterator.nextInt(), resourceMap, loader);
|
||||
}
|
||||
|
||||
if (usedNameFingerprints != null) {
|
||||
loader.setNameFilter(createNameFilter());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void clearCache() {
|
||||
classPackageCache = new IntObjectHashMap<>(ARRAY_FACTORY);
|
||||
resourcePackageCache = new IntObjectHashMap<>(ARRAY_FACTORY);
|
||||
classPackageCacheGetter = NULL;
|
||||
resourcePackageCacheGetter = NULL;
|
||||
classPackageCache = null;
|
||||
resourcePackageCache = null;
|
||||
}
|
||||
|
||||
// executed as part of synchronized getLoaderSlowPath - no concurrent write
|
||||
// executed as part of synchronized getLoaderSlowPath - not a concurrent write
|
||||
void applyLoaderData(@NotNull IndexRegistrar registrar, @NotNull Loader loader) {
|
||||
IntObjectHashMap<Loader[]> newClassPackageCache = new IntObjectHashMap<>(classPackageCache);
|
||||
IntObjectHashMap<Loader[]> newResourcePackageCache = new IntObjectHashMap<>(resourcePackageCache);
|
||||
registrar.registerPackageIndex(newClassPackageCache, newResourcePackageCache, loader);
|
||||
classPackageCache = newClassPackageCache;
|
||||
resourcePackageCache = newResourcePackageCache;
|
||||
if (registrar.classPackageCount() != 0) {
|
||||
StrippedLongToObjectMap<Loader[]> newClassMap = classPackageCache == null
|
||||
? new StrippedLongToObjectMap<>(ARRAY_FACTORY, registrar.classPackageCount())
|
||||
: new StrippedLongToObjectMap<>(classPackageCache);
|
||||
addPackages(registrar.classPackages(), newClassMap, registrar.getKeyFilter(true), loader);
|
||||
classPackageCache = newClassMap;
|
||||
classPackageCacheGetter = newClassMap;
|
||||
}
|
||||
if (registrar.resourcePackageCount() != 0) {
|
||||
StrippedLongToObjectMap<Loader[]> newResourceMap = resourcePackageCache == null
|
||||
? new StrippedLongToObjectMap<>(ARRAY_FACTORY, registrar.resourcePackageCount())
|
||||
: new StrippedLongToObjectMap<>(resourcePackageCache);
|
||||
resourcePackageCache = newResourceMap;
|
||||
resourcePackageCacheGetter = newResourceMap;
|
||||
addPackages(registrar.resourcePackages(), newResourceMap, registrar.getKeyFilter(false), loader);
|
||||
}
|
||||
}
|
||||
|
||||
Loader @Nullable [] getLoadersByName(@NotNull String resourcePath) {
|
||||
IntObjectHashMap<Loader[]> map = resourcePath.endsWith(ClassPath.CLASS_EXTENSION) ? classPackageCache : resourcePackageCache;
|
||||
return map.get(getPackageNameHash(resourcePath, resourcePath.lastIndexOf('/')));
|
||||
Loader @Nullable [] getLoadersByName(@NotNull String path) {
|
||||
return (path.endsWith(CLASS_EXTENSION)
|
||||
? classPackageCacheGetter
|
||||
: resourcePackageCacheGetter).apply(getPackageNameHash(path, path.lastIndexOf('/')));
|
||||
}
|
||||
|
||||
Loader @Nullable [] getClassLoadersByName(@NotNull String resourcePath) {
|
||||
return classPackageCache.get(getPackageNameHash(resourcePath, resourcePath.lastIndexOf('/')));
|
||||
Loader @Nullable [] getLoadersByResourcePackageDir(@NotNull String resourcePath) {
|
||||
return resourcePackageCacheGetter.apply(getPackageNameHash(resourcePath, resourcePath.length()));
|
||||
}
|
||||
|
||||
static int getPackageNameHash(@NotNull String resourcePath, int endIndex) {
|
||||
return endIndex <= 0 ? 0 : Murmur3_32Hash.MURMUR3_32.hashString(resourcePath, 0, endIndex);
|
||||
Loader @Nullable [] getClassLoadersByPackageNameHash(long packageNameHash) {
|
||||
return classPackageCacheGetter.apply(packageNameHash);
|
||||
}
|
||||
|
||||
public static void addResourceEntries(int[] hashes, @NotNull IntObjectHashMap<Loader[]> map, @NotNull Loader loader) {
|
||||
public static long getPackageNameHash(@NotNull String resourcePath, int endIndex) {
|
||||
return endIndex <= 0 ? 0 : Xx3UnencodedString.hashUnencodedStringRange(resourcePath, 0, endIndex);
|
||||
}
|
||||
|
||||
private static void addPackages(long[] hashes, StrippedLongToObjectMap<Loader[]> map, @Nullable LongPredicate hashFilter, Loader loader) {
|
||||
Loader[] singleArray = null;
|
||||
for (int hash : hashes) {
|
||||
for (long hash : hashes) {
|
||||
if (hashFilter != null && !hashFilter.test(hash)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
int index = map.index(hash);
|
||||
Loader[] loaders = map.getByIndex(index, hash);
|
||||
if (loaders == null) {
|
||||
if (index < 0) {
|
||||
if (singleArray == null) {
|
||||
singleArray = new Loader[]{loader};
|
||||
}
|
||||
map.addByIndex(index, hash, singleArray);
|
||||
}
|
||||
else {
|
||||
Loader[] loaders = map.getByIndex(index);
|
||||
Loader[] newList = new Loader[loaders.length + 1];
|
||||
System.arraycopy(loaders, 0, newList, 0, loaders.length);
|
||||
newList[loaders.length] = loader;
|
||||
@@ -174,57 +167,4 @@ public final class ClasspathCache {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void addResourceEntry(int hash, @NotNull IntObjectHashMap<Loader[]> map, @NotNull Loader loader) {
|
||||
int index = map.index(hash);
|
||||
Loader[] loaders = map.getByIndex(index, hash);
|
||||
if (loaders == null) {
|
||||
map.addByIndex(index, hash, new Loader[]{loader});
|
||||
}
|
||||
else {
|
||||
if (ClassPath.recordLoadingInfo) {
|
||||
for (Loader value : loaders) {
|
||||
if (loader == value) {
|
||||
throw new IllegalStateException("Duplicated loader");
|
||||
}
|
||||
}
|
||||
}
|
||||
Loader[] newList = new Loader[loaders.length + 1];
|
||||
System.arraycopy(loaders, 0, newList, 0, loaders.length);
|
||||
newList[loaders.length] = loader;
|
||||
map.replaceByIndex(index, hash, newList);
|
||||
}
|
||||
}
|
||||
|
||||
static final class NameFilter extends BloomFilterBase implements Predicate<String> {
|
||||
private static final Murmur3_32Hash MURMUR3_32_CUSTOM_SEED = new Murmur3_32Hash(85_486);
|
||||
|
||||
NameFilter(int _maxElementCount, double probability) {
|
||||
super(_maxElementCount, probability);
|
||||
}
|
||||
|
||||
NameFilter(@NotNull ByteBuffer buffer) throws IOException {
|
||||
super(buffer);
|
||||
}
|
||||
|
||||
private void addNameFingerprint(long nameFingerprint) {
|
||||
int hash = (int)(nameFingerprint >> 32);
|
||||
int hash2 = (int)nameFingerprint;
|
||||
addIt(hash, hash2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean test(@NotNull String name) {
|
||||
int end = name.endsWith("/") ? (name.length() - 1) : name.length();
|
||||
int hash = MURMUR3_32_CUSTOM_SEED.hashString(name, 0, end);
|
||||
int hash2 = Murmur3_32Hash.MURMUR3_32.hashString(name, 0, end);
|
||||
return maybeContains(hash, hash2);
|
||||
}
|
||||
|
||||
private static long toNameFingerprint(@NotNull String name, int end) {
|
||||
int hash = MURMUR3_32_CUSTOM_SEED.hashString(name, 0, end);
|
||||
int hash2 = Murmur3_32Hash.MURMUR3_32.hashString(name, 0, end);
|
||||
return ((long)hash << 32) | (hash2 & 0xFFFFFFFFL);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,9 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import com.intellij.openapi.diagnostic.LoggerRt;
|
||||
import com.intellij.util.io.DirectByteBufferPool;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.jetbrains.xxh3.Xx3UnencodedString;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.File;
|
||||
@@ -14,23 +13,20 @@ import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.IntBuffer;
|
||||
import java.nio.LongBuffer;
|
||||
import java.nio.channels.SeekableByteChannel;
|
||||
import java.nio.file.*;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.BlockingDeque;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.LinkedBlockingDeque;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Deque;
|
||||
import java.util.EnumSet;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
final class FileLoader extends Loader {
|
||||
final class FileLoader implements Loader {
|
||||
private static final EnumSet<StandardOpenOption> READ_OPTIONS = EnumSet.of(StandardOpenOption.READ);
|
||||
private static final EnumSet<StandardOpenOption> WRITE_OPTIONS = EnumSet.of(StandardOpenOption.WRITE, StandardOpenOption.CREATE);
|
||||
private static final EnumSet<StandardOpenOption> WRITE_OPTIONS = EnumSet.of(StandardOpenOption.WRITE, StandardOpenOption.CREATE);
|
||||
|
||||
private static final AtomicInteger totalLoaders = new AtomicInteger();
|
||||
private static final AtomicLong totalScanning = new AtomicLong();
|
||||
@@ -39,23 +35,38 @@ final class FileLoader extends Loader {
|
||||
|
||||
private static final Boolean doFsActivityLogging = false;
|
||||
// find . -name "classpath.index" -delete
|
||||
private static final short ourVersion = 23;
|
||||
private static final short indexFileVersion = 24;
|
||||
|
||||
private final int rootDirAbsolutePathLength;
|
||||
private final boolean isClassPathIndexEnabled;
|
||||
|
||||
private static final BlockingDeque<Map.Entry<ClasspathCache.LoaderData, Path>> loaderDataToSave = new LinkedBlockingDeque<>();
|
||||
private static final AtomicBoolean isSaveThreadStarted = new AtomicBoolean();
|
||||
private final @NotNull Predicate<String> nameFilter;
|
||||
private final @NotNull Path path;
|
||||
|
||||
FileLoader(@NotNull Path path, boolean isClassPathIndexEnabled) {
|
||||
super(path);
|
||||
FileLoader(@NotNull Path path,
|
||||
@Nullable Predicate<String> nameFilter,
|
||||
@Nullable BiConsumer<ClasspathCache.IndexRegistrar, Loader> registrar,
|
||||
boolean isClassPathIndexEnabled) {
|
||||
this.path = path;
|
||||
|
||||
rootDirAbsolutePathLength = path.toString().length();
|
||||
this.isClassPathIndexEnabled = isClassPathIndexEnabled;
|
||||
if (nameFilter == null) {
|
||||
this.nameFilter = registrar == null ? __ -> true : buildData(registrar, isClassPathIndexEnabled);
|
||||
}
|
||||
else {
|
||||
assert registrar == null;
|
||||
this.nameFilter = nameFilter;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
void processResources(@NotNull String dir, @NotNull Predicate<? super String> fileNameFilter, @NotNull BiConsumer<? super String, ? super InputStream> consumer)
|
||||
public @NotNull Path getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processResources(@NotNull String dir,
|
||||
@NotNull Predicate<? super String> fileNameFilter,
|
||||
@NotNull BiConsumer<? super String, ? super InputStream> consumer)
|
||||
throws IOException {
|
||||
try (DirectoryStream<Path> paths = Files.newDirectoryStream(path.resolve(dir))) {
|
||||
for (Path childPath : paths) {
|
||||
@@ -71,12 +82,9 @@ final class FileLoader extends Loader {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable Map<Loader.Attribute, String> getAttributes() throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
private void buildPackageCache(@NotNull Path startDir, @NotNull ClasspathCache.LoaderDataBuilder context) {
|
||||
private void buildPackageAndNameCache(Path startDir,
|
||||
ClasspathCache.LoaderDataBuilder context,
|
||||
StrippedLongArrayList nameHashes) {
|
||||
// FileVisitor is not used to avoid getting file attributes for class files
|
||||
// (.class extension is a strong indicator that it is file and not a directory)
|
||||
Deque<Path> dirCandidates = new ArrayDeque<>();
|
||||
@@ -88,12 +96,12 @@ final class FileLoader extends Loader {
|
||||
boolean containsResources = false;
|
||||
for (Path file : dirStream) {
|
||||
String path = startDir.relativize(file).toString().replace(File.separatorChar, '/');
|
||||
if (path.endsWith(ClassPath.CLASS_EXTENSION)) {
|
||||
context.andClassName(path);
|
||||
if (path.endsWith(ClasspathCache.CLASS_EXTENSION)) {
|
||||
nameHashes.add(Xx3UnencodedString.hashUnencodedString(path));
|
||||
containsClasses = true;
|
||||
}
|
||||
else {
|
||||
context.addResourceName(path, path.length());
|
||||
nameHashes.add(Xx3UnencodedString.hashUnencodedString(path));
|
||||
containsResources = true;
|
||||
if (!path.endsWith(".svg") && !path.endsWith(".png") && !path.endsWith(".xml")) {
|
||||
dirCandidates.addLast(file);
|
||||
@@ -128,13 +136,13 @@ final class FileLoader extends Loader {
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable Resource getResource(@NotNull String name) {
|
||||
public @Nullable Resource getResource(@NotNull String name) {
|
||||
Path file = path.resolve(name);
|
||||
return Files.exists(file) ? new FileResource(file) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable Class<?> findClass(@NotNull String fileName, String className, ClassPath.ClassDataConsumer classConsumer) throws IOException {
|
||||
public @Nullable Class<?> findClass(String fileName, String className, ClassPath.ClassDataConsumer classConsumer) throws IOException {
|
||||
Path file = path.resolve(fileName);
|
||||
byte[] data;
|
||||
try {
|
||||
@@ -143,10 +151,10 @@ final class FileLoader extends Loader {
|
||||
catch (NoSuchFileException e) {
|
||||
return null;
|
||||
}
|
||||
return classConsumer.consumeClassData(className, data, this, null);
|
||||
return classConsumer.consumeClassData(className, data, this);
|
||||
}
|
||||
|
||||
private static ClasspathCache.LoaderData readFromIndex(Path index) {
|
||||
private static @Nullable LoaderData readFromIndex(Path index) {
|
||||
long started = System.nanoTime();
|
||||
boolean isOk = false;
|
||||
short version = -1;
|
||||
@@ -162,15 +170,14 @@ final class FileLoader extends Loader {
|
||||
// little endian - native order for Intel and Apple ARM
|
||||
buffer.order(ByteOrder.LITTLE_ENDIAN);
|
||||
version = buffer.getShort();
|
||||
if (version == ourVersion) {
|
||||
int[] classPackageHashes = new int[buffer.getInt()];
|
||||
int[] resourcePackageHashes = new int[buffer.getInt()];
|
||||
IntBuffer intBuffer = buffer.asIntBuffer();
|
||||
intBuffer.get(classPackageHashes);
|
||||
intBuffer.get(resourcePackageHashes);
|
||||
buffer.position(buffer.position() + intBuffer.position() * Integer.BYTES);
|
||||
ClasspathCache.LoaderData loaderData =
|
||||
new ClasspathCache.LoaderData(resourcePackageHashes, classPackageHashes, new ClasspathCache.NameFilter(buffer));
|
||||
if (version == indexFileVersion) {
|
||||
long[] classPackageHashes = new long[buffer.getInt()];
|
||||
long[] resourcePackageHashes = new long[buffer.getInt()];
|
||||
LongBuffer longBuffer = buffer.asLongBuffer();
|
||||
longBuffer.get(classPackageHashes);
|
||||
longBuffer.get(resourcePackageHashes);
|
||||
buffer.position(buffer.position() + (longBuffer.position() * Long.BYTES));
|
||||
LoaderData loaderData = new LoaderData(resourcePackageHashes, classPackageHashes, new NameFilter(new Xor16(buffer)));
|
||||
isOk = true;
|
||||
return loaderData;
|
||||
}
|
||||
@@ -183,7 +190,10 @@ final class FileLoader extends Loader {
|
||||
isOk = true;
|
||||
}
|
||||
catch (Exception e) {
|
||||
LoggerRt.getInstance(FileLoader.class).warn("Cannot read classpath index (version=" + version + ", module=" + index.getParent().getFileName() + ")", e);
|
||||
//noinspection UseOfSystemOutOrSystemErr
|
||||
System.err.println("Cannot read classpath index (version=" + version + ", module=" + index.getParent().getFileName() + ")");
|
||||
//noinspection CallToPrintStackTrace
|
||||
e.printStackTrace();
|
||||
}
|
||||
finally {
|
||||
if (!isOk) {
|
||||
@@ -199,74 +209,47 @@ final class FileLoader extends Loader {
|
||||
return null;
|
||||
}
|
||||
|
||||
private static void saveToIndex(@NotNull ClasspathCache.LoaderData data, @NotNull Path indexFile) throws IOException {
|
||||
private static void saveIndex(@NotNull LoaderData data, @NotNull Path indexFile) throws IOException {
|
||||
long started = System.nanoTime();
|
||||
boolean isOk = false;
|
||||
SeekableByteChannel channel = null;
|
||||
ByteBuffer buffer = DirectByteBufferPool.DEFAULT_POOL.allocate(Short.BYTES + data.approximateSizeInBytes());
|
||||
try {
|
||||
ByteBuffer buffer = DirectByteBufferPool.DEFAULT_POOL.allocate(Short.BYTES + data.sizeInBytes());
|
||||
try {
|
||||
buffer.order(ByteOrder.LITTLE_ENDIAN);
|
||||
buffer.order(ByteOrder.LITTLE_ENDIAN);
|
||||
|
||||
buffer.putShort(ourVersion);
|
||||
data.save(buffer);
|
||||
assert buffer.remaining() == 0;
|
||||
buffer.flip();
|
||||
channel = Files.newByteChannel(indexFile, WRITE_OPTIONS);
|
||||
buffer.putShort(indexFileVersion);
|
||||
data.save(buffer);
|
||||
assert buffer.remaining() == 0;
|
||||
buffer.flip();
|
||||
try (SeekableByteChannel channel = Files.newByteChannel(indexFile, WRITE_OPTIONS)) {
|
||||
do {
|
||||
channel.write(buffer);
|
||||
}
|
||||
while (buffer.hasRemaining());
|
||||
|
||||
isOk = true;
|
||||
}
|
||||
finally {
|
||||
DirectByteBufferPool.DEFAULT_POOL.release(buffer);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if (channel != null) {
|
||||
try {
|
||||
channel.close();
|
||||
}
|
||||
catch (Exception e) {
|
||||
if (isOk) {
|
||||
LoggerRt.getInstance(FileLoader.class).warn(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!isOk) {
|
||||
try {
|
||||
Files.deleteIfExists(indexFile);
|
||||
}
|
||||
catch (IOException ignore) {
|
||||
}
|
||||
}
|
||||
DirectByteBufferPool.DEFAULT_POOL.release(buffer);
|
||||
totalSaving.addAndGet(System.nanoTime() - started);
|
||||
}
|
||||
}
|
||||
|
||||
private Path getIndexFileFile() {
|
||||
return path.resolve("classpath.index");
|
||||
}
|
||||
private @NotNull Predicate<String> buildData(@NotNull BiConsumer<ClasspathCache.IndexRegistrar, Loader> registrar,
|
||||
boolean isClassPathIndexEnabled) {
|
||||
Path indexFile = isClassPathIndexEnabled ? path.resolve("classpath.index") : null;
|
||||
LoaderData loaderData = indexFile == null ? null : readFromIndex(indexFile);
|
||||
|
||||
@Override
|
||||
public @NotNull ClasspathCache.IndexRegistrar buildData() {
|
||||
ClasspathCache.LoaderData loaderData = null;
|
||||
Path indexFile = isClassPathIndexEnabled ? getIndexFileFile() : null;
|
||||
if (indexFile != null) {
|
||||
loaderData = readFromIndex(indexFile);
|
||||
}
|
||||
|
||||
int nsMsFactor = 1000000;
|
||||
int nsMsFactor = 1_000_000;
|
||||
int currentLoaders = totalLoaders.incrementAndGet();
|
||||
long currentScanningTime;
|
||||
if (loaderData == null) {
|
||||
long started = System.nanoTime();
|
||||
|
||||
ClasspathCache.LoaderDataBuilder loaderDataBuilder = new ClasspathCache.LoaderDataBuilder(true);
|
||||
buildPackageCache(path, loaderDataBuilder);
|
||||
loaderData = loaderDataBuilder.build();
|
||||
StrippedLongArrayList nameHashes = new StrippedLongArrayList();
|
||||
|
||||
ClasspathCache.LoaderDataBuilder loaderDataBuilder = new ClasspathCache.LoaderDataBuilder();
|
||||
buildPackageAndNameCache(path, loaderDataBuilder, nameHashes);
|
||||
loaderData = new LoaderData(loaderDataBuilder.resourcePackageHashes.toArray(),
|
||||
loaderDataBuilder.classPackageHashes.toArray(),
|
||||
new NameFilter(Xor16.construct(nameHashes.elements(), 0, nameHashes.size())));
|
||||
long doneNanos = System.nanoTime() - started;
|
||||
currentScanningTime = totalScanning.addAndGet(doneNanos);
|
||||
if (doFsActivityLogging) {
|
||||
@@ -274,9 +257,21 @@ final class FileLoader extends Loader {
|
||||
System.out.println("Scanned: " + path + " for " + (doneNanos / nsMsFactor) + "ms");
|
||||
}
|
||||
|
||||
if (indexFile != null) {
|
||||
loaderDataToSave.addLast(new AbstractMap.SimpleImmutableEntry<>(loaderData, indexFile));
|
||||
startCacheSavingIfNeeded();
|
||||
if (isClassPathIndexEnabled) {
|
||||
try {
|
||||
Path tempFile = indexFile.getParent().resolve("classpath.index.tmp");
|
||||
saveIndex(loaderData, tempFile);
|
||||
try {
|
||||
Files.move(tempFile, indexFile, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE);
|
||||
}
|
||||
catch (AtomicMoveNotSupportedException e) {
|
||||
Files.move(tempFile, indexFile, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
//noinspection CallToPrintStackTrace
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -289,43 +284,8 @@ final class FileLoader extends Loader {
|
||||
", loading: " + (totalReading.get() / nsMsFactor) + "ms for " + currentLoaders + " loaders");
|
||||
}
|
||||
|
||||
return loaderData;
|
||||
}
|
||||
|
||||
private static void startCacheSavingIfNeeded() {
|
||||
if (!isSaveThreadStarted.compareAndSet(false, true)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Executors.newSingleThreadScheduledExecutor(r -> {
|
||||
Thread thread = new Thread(r, "Save classpath indexes for file loader");
|
||||
thread.setDaemon(true);
|
||||
thread.setPriority(Thread.MIN_PRIORITY);
|
||||
return thread;
|
||||
}).schedule(() -> {
|
||||
while (true) {
|
||||
try {
|
||||
Map.Entry<ClasspathCache.LoaderData, Path> entry = loaderDataToSave.takeFirst();
|
||||
Path finalFile = entry.getValue();
|
||||
Path tempFile = finalFile.getParent().resolve("classpath.index.tmp");
|
||||
try {
|
||||
saveToIndex(entry.getKey(), tempFile);
|
||||
try {
|
||||
Files.move(tempFile, finalFile, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE);
|
||||
}
|
||||
catch (AtomicMoveNotSupportedException e) {
|
||||
Files.move(tempFile, finalFile, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
LoggerRt.getInstance(FileLoader.class).warn("Cannot save classpath index for module " + finalFile.getParent().getFileName(), e);
|
||||
}
|
||||
}
|
||||
catch (InterruptedException ignored) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}, 10, TimeUnit.SECONDS);
|
||||
registrar.accept(loaderData, this);
|
||||
return loaderData.nameFilter;
|
||||
}
|
||||
|
||||
private static final class FileResource implements Resource {
|
||||
@@ -371,4 +331,78 @@ final class FileLoader extends Loader {
|
||||
public String toString() {
|
||||
return "FileLoader(path=" + path + ')';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsName(String name) {
|
||||
return name.isEmpty() || nameFilter.test(name);
|
||||
}
|
||||
|
||||
private static final class LoaderData implements ClasspathCache.IndexRegistrar {
|
||||
private final long[] resourcePackageHashes;
|
||||
private final long[] classPackageHashes;
|
||||
private final NameFilter nameFilter;
|
||||
|
||||
LoaderData(long[] resourcePackageHashes, long[] classPackageHashes, NameFilter nameFilter) {
|
||||
this.resourcePackageHashes = resourcePackageHashes;
|
||||
this.classPackageHashes = classPackageHashes;
|
||||
this.nameFilter = nameFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int classPackageCount() {
|
||||
return classPackageHashes.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int resourcePackageCount() {
|
||||
return resourcePackageHashes.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Predicate<String> getNameFilter() {
|
||||
return nameFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long[] classPackages() {
|
||||
return classPackageHashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long[] resourcePackages() {
|
||||
return resourcePackageHashes;
|
||||
}
|
||||
|
||||
int approximateSizeInBytes() {
|
||||
return Integer.BYTES * 2 +
|
||||
classPackageHashes.length * Long.BYTES +
|
||||
resourcePackageHashes.length * Long.BYTES +
|
||||
nameFilter.filter.sizeInBytes();
|
||||
}
|
||||
|
||||
void save(@NotNull ByteBuffer buffer) {
|
||||
buffer.putInt(classPackageHashes.length);
|
||||
buffer.putInt(resourcePackageHashes.length);
|
||||
LongBuffer longBuffer = buffer.asLongBuffer();
|
||||
longBuffer.put(classPackageHashes);
|
||||
longBuffer.put(resourcePackageHashes);
|
||||
buffer.position(buffer.position() + longBuffer.position() * Long.BYTES);
|
||||
nameFilter.filter.write(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class NameFilter implements Predicate<String> {
|
||||
final Xor16 filter;
|
||||
|
||||
NameFilter(Xor16 filter) {
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean test(@NotNull String name) {
|
||||
int lastIndex = name.length() - 1;
|
||||
int end = name.charAt(lastIndex) == '/' ? lastIndex : name.length();
|
||||
return filter.mightContain(Xx3UnencodedString.hashUnencodedStringRange(name, 0, end));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.intellij.util.lang.fastutil;
|
||||
package com.intellij.util.lang;
|
||||
|
||||
final class Hash {
|
||||
static final float DEFAULT_LOAD_FACTOR = .75f;
|
||||
@@ -1,7 +1,6 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import com.intellij.openapi.diagnostic.LoggerRt;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
@@ -20,7 +19,11 @@ import java.util.function.BiConsumer;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.jar.Attributes;
|
||||
|
||||
public class JarLoader extends Loader {
|
||||
final class JarLoader implements Loader {
|
||||
public enum Attribute {
|
||||
SPEC_TITLE, SPEC_VERSION, SPEC_VENDOR, CLASS_PATH, IMPL_TITLE, IMPL_VERSION, IMPL_VENDOR
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final Map.Entry<Attribute, Attributes.Name>[] PACKAGE_FIELDS = new Map.Entry[]{
|
||||
new AbstractMap.SimpleImmutableEntry<>(Attribute.SPEC_TITLE, Attributes.Name.SPECIFICATION_TITLE),
|
||||
@@ -32,26 +35,31 @@ public class JarLoader extends Loader {
|
||||
new AbstractMap.SimpleImmutableEntry<>(Attribute.IMPL_VENDOR, Attributes.Name.IMPLEMENTATION_VENDOR)
|
||||
};
|
||||
|
||||
protected final ClassPath configuration;
|
||||
final ClassPath configuration;
|
||||
final URL url;
|
||||
protected final ResourceFile zipFile;
|
||||
private volatile Map<Loader.Attribute, String> attributes;
|
||||
private final ResourceFile zipFile;
|
||||
private final Path path;
|
||||
|
||||
JarLoader(@NotNull Path file, @NotNull ClassPath configuration, @NotNull ResourceFile zipFile) throws IOException {
|
||||
super(file);
|
||||
JarLoader(@NotNull Path path, @NotNull ClassPath configuration, @NotNull ResourceFile zipFile) throws IOException {
|
||||
this.path = path;
|
||||
|
||||
this.configuration = configuration;
|
||||
this.zipFile = zipFile;
|
||||
url = new URL("jar", "", -1, fileToUri(file) + "!/");
|
||||
url = new URL("jar", "", -1, fileToUri(path) + "!/");
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Map<Attribute, String> getAttributes() throws IOException {
|
||||
return loadManifestAttributes(zipFile);
|
||||
public Path getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
@Override
|
||||
final @Nullable Class<?> findClass(@NotNull String fileName, String className, @NotNull ClassPath.ClassDataConsumer classConsumer) throws IOException {
|
||||
public boolean containsName(String name) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable Class<?> findClass(String fileName, String className, ClassPath.ClassDataConsumer classConsumer) throws IOException {
|
||||
return zipFile.findClass(fileName, className, this, classConsumer);
|
||||
}
|
||||
|
||||
@@ -73,21 +81,17 @@ public class JarLoader extends Loader {
|
||||
}
|
||||
}
|
||||
|
||||
final @Nullable String getClassPathManifestAttribute() throws IOException {
|
||||
return loadManifestAttributes(zipFile).get(Attribute.CLASS_PATH);
|
||||
}
|
||||
|
||||
private static @NotNull Map<Loader.Attribute, String> getAttributes(@NotNull Attributes attributes) {
|
||||
static @NotNull Map<JarLoader.Attribute, String> getAttributes(@NotNull Attributes attributes) {
|
||||
if (attributes.isEmpty()) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
Map<Loader.Attribute, String> map = null;
|
||||
for (Map.Entry<Loader.Attribute, Attributes.Name> p : PACKAGE_FIELDS) {
|
||||
Map<JarLoader.Attribute, String> map = null;
|
||||
for (Map.Entry<JarLoader.Attribute, Attributes.Name> p : PACKAGE_FIELDS) {
|
||||
String value = attributes.getValue(p.getValue());
|
||||
if (value != null) {
|
||||
if (map == null) {
|
||||
map = new EnumMap<>(Loader.Attribute.class);
|
||||
map = new EnumMap<>(JarLoader.Attribute.class);
|
||||
}
|
||||
map.put(p.getKey(), value);
|
||||
}
|
||||
@@ -95,64 +99,27 @@ public class JarLoader extends Loader {
|
||||
return map == null ? Collections.emptyMap() : map;
|
||||
}
|
||||
|
||||
private @NotNull Map<Loader.Attribute, String> loadManifestAttributes(@NotNull ResourceFile resourceFile) throws IOException {
|
||||
Map<Loader.Attribute, String> result = attributes;
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
synchronized (this) {
|
||||
result = attributes;
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
result = configuration.getManifestData(path);
|
||||
if (result == null) {
|
||||
Attributes manifestAttributes = resourceFile.loadManifestAttributes();
|
||||
result = manifestAttributes == null ? Collections.emptyMap() : getAttributes(manifestAttributes);
|
||||
configuration.cacheManifestData(path, result);
|
||||
}
|
||||
attributes = result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final @NotNull ClasspathCache.IndexRegistrar buildData() throws IOException {
|
||||
return zipFile.buildClassPathCacheData();
|
||||
}
|
||||
|
||||
@Override
|
||||
final @Nullable Resource getResource(@NotNull String name) {
|
||||
public @Nullable Resource getResource(@NotNull String name) {
|
||||
try {
|
||||
return zipFile.getResource(name, this);
|
||||
}
|
||||
catch (IOException e) {
|
||||
error("url: " + path, e);
|
||||
//noinspection CallToPrintStackTrace
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
void processResources(@NotNull String dir,
|
||||
@NotNull Predicate<? super String> fileNameFilter,
|
||||
@NotNull BiConsumer<? super String, ? super InputStream> consumer) throws IOException {
|
||||
public void processResources(@NotNull String dir,
|
||||
@NotNull Predicate<? super String> fileNameFilter,
|
||||
@NotNull BiConsumer<? super String, ? super InputStream> consumer) throws IOException {
|
||||
zipFile.processResources(dir, fileNameFilter, consumer);
|
||||
}
|
||||
|
||||
protected final void error(@NotNull String message, @NotNull Throwable t) {
|
||||
LoggerRt logger = LoggerRt.getInstance(JarLoader.class);
|
||||
if (configuration.errorOnMissingJar) {
|
||||
logger.error(message, t);
|
||||
}
|
||||
else {
|
||||
logger.warn(message, t);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "JarLoader [" + path + "]";
|
||||
public String toString() {
|
||||
return "JarLoader(path=" + path + ")";
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,11 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import com.intellij.ReviseWhenPortedToJDK;
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
@ReviseWhenPortedToJDK("11")
|
||||
@ApiStatus.Internal
|
||||
// implementation of `copyOf` is allowed to not do copy - it can return the same map, read `copyOf` as `immutable`
|
||||
public abstract class Java11Shim {
|
||||
|
||||
@@ -12,12 +12,10 @@ import java.lang.ref.SoftReference;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Path;
|
||||
import java.security.ProtectionDomain;
|
||||
import java.util.Enumeration;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.jar.Attributes;
|
||||
import java.util.jar.JarEntry;
|
||||
import java.util.jar.JarFile;
|
||||
import java.util.jar.Manifest;
|
||||
import java.util.zip.ZipEntry;
|
||||
@@ -28,14 +26,12 @@ final class JdkZipResourceFile implements ResourceFile {
|
||||
private volatile SoftReference<ZipFile> zipFileSoftReference;
|
||||
private final boolean lockJars;
|
||||
private final File file;
|
||||
private final boolean isSecureLoader;
|
||||
|
||||
private static final Object lock = new Object();
|
||||
|
||||
JdkZipResourceFile(@NotNull Path path, boolean lockJars, boolean isSecureLoader) {
|
||||
JdkZipResourceFile(@NotNull Path path, boolean lockJars) {
|
||||
this.lockJars = lockJars;
|
||||
this.file = path.toFile();
|
||||
this.isSecureLoader = isSecureLoader;
|
||||
}
|
||||
|
||||
@SuppressWarnings("DuplicatedCode")
|
||||
@@ -54,7 +50,7 @@ final class JdkZipResourceFile implements ResourceFile {
|
||||
// This code is executed at least 100K times (O(number of classes needed to load)), and it takes considerable time to open ZipFile's
|
||||
// such number of times, so we store reference to ZipFile if we allowed to lock the file (assume it isn't changed)
|
||||
if (!lockJars) {
|
||||
return createZipFile(file);
|
||||
return new ZipFile(file);
|
||||
}
|
||||
|
||||
SoftReference<ZipFile> ref = zipFileSoftReference;
|
||||
@@ -70,16 +66,12 @@ final class JdkZipResourceFile implements ResourceFile {
|
||||
return zipFile;
|
||||
}
|
||||
|
||||
zipFile = createZipFile(file);
|
||||
zipFile = new ZipFile(file);
|
||||
zipFileSoftReference = new SoftReference<>(zipFile);
|
||||
}
|
||||
return zipFile;
|
||||
}
|
||||
|
||||
private ZipFile createZipFile(@NotNull File file) throws IOException {
|
||||
return isSecureLoader ? new JarFile(file) : new ZipFile(file);
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable Class<?> findClass(@NotNull String fileName, String className, JarLoader jarLoader, ClassPath.ClassDataConsumer classConsumer)
|
||||
throws IOException {
|
||||
@@ -94,15 +86,7 @@ final class JdkZipResourceFile implements ResourceFile {
|
||||
try (InputStream stream = zipFile.getInputStream(entry)) {
|
||||
bytes = loadBytes(stream, (int)entry.getSize());
|
||||
}
|
||||
|
||||
ProtectionDomain protectionDomain;
|
||||
if (jarLoader instanceof SecureJarLoader) {
|
||||
protectionDomain = ((SecureJarLoader)jarLoader).getProtectionDomain((JarEntry)entry, new URL(jarLoader.url, entry.getName()));
|
||||
}
|
||||
else {
|
||||
protectionDomain = null;
|
||||
}
|
||||
return classConsumer.consumeClassData(className, bytes, jarLoader, protectionDomain);
|
||||
return classConsumer.consumeClassData(className, bytes, jarLoader);
|
||||
}
|
||||
finally {
|
||||
if (!lockJars) {
|
||||
@@ -125,12 +109,7 @@ final class JdkZipResourceFile implements ResourceFile {
|
||||
if (entry == null) {
|
||||
return null;
|
||||
}
|
||||
if (isSecureLoader) {
|
||||
return new SecureJarResource(jarLoader.url, (JarEntry)entry, (SecureJarLoader)jarLoader);
|
||||
}
|
||||
else {
|
||||
return new ZipFileResource(jarLoader.url, entry, this);
|
||||
}
|
||||
return new ZipFileResource(jarLoader.url, entry, this);
|
||||
}
|
||||
finally {
|
||||
if (!lockJars) {
|
||||
@@ -166,19 +145,10 @@ final class JdkZipResourceFile implements ResourceFile {
|
||||
public @NotNull ClasspathCache.IndexRegistrar buildClassPathCacheData() throws IOException {
|
||||
ZipFile zipFile = getZipFile();
|
||||
try {
|
||||
ClasspathCache.LoaderDataBuilder builder = new ClasspathCache.LoaderDataBuilder(true);
|
||||
ClasspathCache.LoaderDataBuilder builder = new ClasspathCache.LoaderDataBuilder();
|
||||
Enumeration<? extends ZipEntry> entries = zipFile.entries();
|
||||
while (entries.hasMoreElements()) {
|
||||
ZipEntry entry = entries.nextElement();
|
||||
String name = entry.getName();
|
||||
if (name.endsWith(ClassPath.CLASS_EXTENSION)) {
|
||||
builder.addClassPackageFromName(name);
|
||||
builder.andClassName(name);
|
||||
}
|
||||
else {
|
||||
builder.addResourcePackageFromName(name);
|
||||
builder.addResourceName(name, name.endsWith("/") ? name.length() - 1 : name.length());
|
||||
}
|
||||
builder.addPackageFromName(entries.nextElement().getName());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
@@ -189,11 +159,11 @@ final class JdkZipResourceFile implements ResourceFile {
|
||||
}
|
||||
}
|
||||
|
||||
private static class ZipFileResource implements Resource {
|
||||
protected final URL baseUrl;
|
||||
private static final class ZipFileResource implements Resource {
|
||||
private final URL baseUrl;
|
||||
private URL url;
|
||||
protected final ZipEntry entry;
|
||||
protected final JdkZipResourceFile file;
|
||||
private final ZipEntry entry;
|
||||
private final JdkZipResourceFile file;
|
||||
|
||||
private ZipFileResource(@NotNull URL baseUrl, @NotNull ZipEntry entry, @NotNull JdkZipResourceFile file) {
|
||||
this.baseUrl = baseUrl;
|
||||
@@ -239,23 +209,4 @@ final class JdkZipResourceFile implements ResourceFile {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static final class SecureJarResource extends JdkZipResourceFile.ZipFileResource {
|
||||
SecureJarResource(@NotNull URL baseUrl, @NotNull JarEntry entry, @NotNull SecureJarLoader jarLoader) {
|
||||
super(baseUrl, entry, (JdkZipResourceFile)jarLoader.zipFile);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte @NotNull [] getBytes() throws IOException {
|
||||
ZipFile zipFile = file.getZipFile();
|
||||
try (InputStream stream = zipFile.getInputStream(entry)) {
|
||||
return loadBytes(stream, (int)entry.getSize());
|
||||
}
|
||||
finally {
|
||||
if (!file.lockJars) {
|
||||
zipFile.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,56 +1,27 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
* An object responsible for loading classes and resources from a particular classpath element: a jar or a directory.
|
||||
*
|
||||
* @see JarLoader
|
||||
* @see FileLoader
|
||||
*/
|
||||
public abstract class Loader {
|
||||
public enum Attribute {
|
||||
SPEC_TITLE, SPEC_VERSION, SPEC_VENDOR, CLASS_PATH, IMPL_TITLE, IMPL_VERSION, IMPL_VENDOR
|
||||
}
|
||||
public interface Loader {
|
||||
Path getPath();
|
||||
|
||||
final @NotNull Path path;
|
||||
private Predicate<String> nameFilter;
|
||||
@Nullable Resource getResource(String name);
|
||||
|
||||
Loader(@NotNull Path path) {
|
||||
this.path = path;
|
||||
}
|
||||
void processResources(String dir,
|
||||
Predicate<? super String> fileNameFilter,
|
||||
BiConsumer<? super String, ? super InputStream> consumer) throws IOException;
|
||||
|
||||
abstract @Nullable Resource getResource(@NotNull String name);
|
||||
@Nullable Class<?> findClass(String fileName, String className, ClassPath.ClassDataConsumer classConsumer) throws IOException;
|
||||
|
||||
abstract void processResources(@NotNull String dir,
|
||||
@NotNull Predicate<? super String> fileNameFilter,
|
||||
@NotNull BiConsumer<? super String, ? super InputStream> consumer) throws IOException;
|
||||
|
||||
public abstract Map<Loader.Attribute, String> getAttributes() throws IOException;
|
||||
|
||||
abstract @Nullable Class<?> findClass(@NotNull String fileName, String className, ClassPath.ClassDataConsumer classConsumer) throws IOException;
|
||||
|
||||
abstract @NotNull ClasspathCache.IndexRegistrar buildData() throws IOException;
|
||||
|
||||
final boolean containsName(@NotNull String name) {
|
||||
if (name.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
Predicate<String> filter = nameFilter;
|
||||
return filter == null || filter.test(name);
|
||||
}
|
||||
|
||||
final void setNameFilter(@NotNull Predicate<String> filter) {
|
||||
nameFilter = filter;
|
||||
}
|
||||
boolean containsName(String name);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
@@ -20,6 +20,7 @@ public interface ResourceFile {
|
||||
@Nullable Class<?> findClass(String fileName, String className, JarLoader jarLoader, ClassPath.ClassDataConsumer classConsumer)
|
||||
throws IOException;
|
||||
|
||||
void processResources(@NotNull String dir, @NotNull Predicate<? super String> nameFilter, @NotNull BiConsumer<? super String, ? super InputStream> consumer)
|
||||
throws IOException;
|
||||
void processResources(@NotNull String dir,
|
||||
@NotNull Predicate<? super String> nameFilter,
|
||||
@NotNull BiConsumer<? super String, ? super InputStream> consumer) throws IOException;
|
||||
}
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Path;
|
||||
import java.security.CodeSource;
|
||||
import java.security.Permissions;
|
||||
import java.security.ProtectionDomain;
|
||||
import java.util.jar.JarEntry;
|
||||
|
||||
final class SecureJarLoader extends JarLoader {
|
||||
private volatile @Nullable ProtectionDomain protectionDomain;
|
||||
private final Object protectionDomainMonitor = new Object();
|
||||
|
||||
SecureJarLoader(@NotNull Path file, @NotNull ClassPath configuration) throws IOException {
|
||||
super(file, configuration, new JdkZipResourceFile(file, configuration.lockJars, true));
|
||||
}
|
||||
|
||||
ProtectionDomain getProtectionDomain(@NotNull JarEntry entry, URL url) {
|
||||
ProtectionDomain result = protectionDomain;
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
synchronized (protectionDomainMonitor) {
|
||||
result = protectionDomain;
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeSource codeSource = new CodeSource(url, entry.getCodeSigners());
|
||||
result = new ProtectionDomain(codeSource, new Permissions());
|
||||
protectionDomain = result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,165 @@
|
||||
/*
|
||||
* Copyright (C) 2002-2021 Sebastiano Vigna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.intellij.util.lang;
|
||||
|
||||
final class StrippedLongArrayList {
|
||||
private static final long[] DEFAULT_EMPTY_ARRAY = {};
|
||||
private static final long[] EMPTY_ARRAY = {};
|
||||
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
|
||||
|
||||
/**
|
||||
* The initial default capacity of an array list.
|
||||
*/
|
||||
private static final int DEFAULT_INITIAL_CAPACITY = 10;
|
||||
|
||||
/**
|
||||
* The backing array.
|
||||
*/
|
||||
private transient long[] a;
|
||||
|
||||
/**
|
||||
* The current actual size of the list (never greater than the backing-array
|
||||
* length).
|
||||
*/
|
||||
private int size;
|
||||
|
||||
public StrippedLongArrayList(int capacity) {
|
||||
if (capacity < 0) {
|
||||
throw new IllegalArgumentException("Initial capacity (" + capacity + ") is negative");
|
||||
}
|
||||
a = capacity == 0 ? EMPTY_ARRAY : new long[capacity];
|
||||
}
|
||||
|
||||
public StrippedLongArrayList() {
|
||||
a = DEFAULT_EMPTY_ARRAY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the backing array of this list.
|
||||
*
|
||||
* @return the backing array.
|
||||
*/
|
||||
public long[] elements() {
|
||||
return a;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that this array list can contain the given number of entries without
|
||||
* resizing.
|
||||
*
|
||||
* @param capacity the new minimum capacity for this array list.
|
||||
*/
|
||||
public void ensureCapacity(final int capacity) {
|
||||
if (capacity <= a.length || (a == DEFAULT_EMPTY_ARRAY && capacity <= DEFAULT_INITIAL_CAPACITY)) {
|
||||
return;
|
||||
}
|
||||
a = ensureCapacity(a, capacity, size);
|
||||
assert size <= a.length;
|
||||
}
|
||||
|
||||
private static long[] ensureCapacity(final long[] array, final int length, final int preserve) {
|
||||
return length > array.length ? forceCapacity(array, length, preserve) : array;
|
||||
}
|
||||
|
||||
private static long[] forceCapacity(final long[] array, final int length, final int preserve) {
|
||||
final long[] t = new long[length];
|
||||
System.arraycopy(array, 0, t, 0, preserve);
|
||||
return t;
|
||||
}
|
||||
|
||||
/**
|
||||
* Grows this array list, ensuring that it can contain the given number of
|
||||
* entries without resizing, and in case increasing the current capacity at
|
||||
* least by a factor of 50%.
|
||||
*
|
||||
* @param capacity the new minimum capacity for this array list.
|
||||
*/
|
||||
private void grow(int capacity) {
|
||||
if (capacity <= a.length) {
|
||||
return;
|
||||
}
|
||||
if (a != DEFAULT_EMPTY_ARRAY) {
|
||||
capacity = (int)Math.max(
|
||||
Math.min((long)a.length + (a.length >> 1), MAX_ARRAY_SIZE), capacity);
|
||||
}
|
||||
else if (capacity < DEFAULT_INITIAL_CAPACITY) {
|
||||
capacity = DEFAULT_INITIAL_CAPACITY;
|
||||
}
|
||||
a = forceCapacity(a, capacity, size);
|
||||
assert size <= a.length;
|
||||
}
|
||||
|
||||
public void add(final int index, final long k) {
|
||||
grow(size + 1);
|
||||
if (index != size) {
|
||||
System.arraycopy(a, index, a, index + 1, size - index);
|
||||
}
|
||||
a[index] = k;
|
||||
size++;
|
||||
assert size <= a.length;
|
||||
}
|
||||
|
||||
public boolean add(final long k) {
|
||||
grow(size + 1);
|
||||
a[size++] = k;
|
||||
assert size <= a.length;
|
||||
return true;
|
||||
}
|
||||
|
||||
public long getLong(final int index) {
|
||||
if (index >= size) {
|
||||
throw new IndexOutOfBoundsException(
|
||||
"Index (" + index + ") is greater than or equal to list size (" + size + ")");
|
||||
}
|
||||
return a[index];
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
size = 0;
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unlikely-arg-type")
|
||||
@Override
|
||||
public boolean equals(final Object o) {
|
||||
if (o == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof StrippedLongArrayList)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int s = size();
|
||||
if (s != ((StrippedLongArrayList)o).size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final long[] a1 = a;
|
||||
final long[] a2 = ((StrippedLongArrayList)o).a;
|
||||
if (a1 == a2) {
|
||||
return true;
|
||||
}
|
||||
while (s-- != 0) {
|
||||
if (a1[s] != a2[s]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -13,15 +13,15 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.intellij.util.lang.fastutil;
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
public final class StrippedLongOpenHashSet {
|
||||
final class StrippedLongSet {
|
||||
/**
|
||||
* The array of keys.
|
||||
*/
|
||||
private transient long[] key;
|
||||
public transient long[] keys;
|
||||
/**
|
||||
* The mask for wrapping a position counter.
|
||||
*/
|
||||
@@ -63,35 +63,21 @@ public final class StrippedLongOpenHashSet {
|
||||
* @param f the load factor.
|
||||
*/
|
||||
|
||||
public StrippedLongOpenHashSet(final int expected, final float f) {
|
||||
StrippedLongSet(final int expected, final float f) {
|
||||
if (f <= 0 || f > 1) {
|
||||
throw new IllegalArgumentException("Load factor must be greater than 0 and smaller than or equal to 1");
|
||||
}
|
||||
if (expected < 0) {
|
||||
throw new IllegalArgumentException("The expected number of elements must be nonnegative");
|
||||
throw new IllegalArgumentException("The expected number of elements must be non-negative");
|
||||
}
|
||||
this.f = f;
|
||||
minN = n = Hash.arraySize(expected, f);
|
||||
mask = n - 1;
|
||||
maxFill = Hash.maxFill(n, f);
|
||||
key = new long[n + 1];
|
||||
keys = new long[n + 1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
|
||||
*
|
||||
* @param expected the expected number of elements in the hash set.
|
||||
*/
|
||||
public StrippedLongOpenHashSet(final int expected) {
|
||||
this(expected, Hash.DEFAULT_LOAD_FACTOR);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new hash set with initial expected
|
||||
* {@link Hash#DEFAULT_INITIAL_SIZE} elements and
|
||||
* {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
|
||||
*/
|
||||
public StrippedLongOpenHashSet() {
|
||||
StrippedLongSet() {
|
||||
this(Hash.DEFAULT_INITIAL_SIZE, Hash.DEFAULT_LOAD_FACTOR);
|
||||
}
|
||||
|
||||
@@ -99,6 +85,10 @@ public final class StrippedLongOpenHashSet {
|
||||
return containsNull ? size - 1 : size;
|
||||
}
|
||||
|
||||
public boolean hasNull() {
|
||||
return containsNull;
|
||||
}
|
||||
|
||||
public boolean add(final long k) {
|
||||
int pos;
|
||||
if (k == 0) {
|
||||
@@ -109,7 +99,7 @@ public final class StrippedLongOpenHashSet {
|
||||
}
|
||||
else {
|
||||
long curr;
|
||||
final long[] key = this.key;
|
||||
final long[] key = this.keys;
|
||||
// The starting point.
|
||||
if (!((curr = key[pos = (int)Hash.mix(k) & mask]) == 0)) {
|
||||
if (curr == k) {
|
||||
@@ -139,7 +129,7 @@ public final class StrippedLongOpenHashSet {
|
||||
// Shift entries with the same hash.
|
||||
int last, slot;
|
||||
long curr;
|
||||
final long[] key = this.key;
|
||||
final long[] key = this.keys;
|
||||
for (; ; ) {
|
||||
pos = (last = pos) + 1 & mask;
|
||||
for (; ; ) {
|
||||
@@ -168,7 +158,7 @@ public final class StrippedLongOpenHashSet {
|
||||
|
||||
private boolean removeNullEntry() {
|
||||
containsNull = false;
|
||||
key[n] = 0;
|
||||
keys[n] = 0;
|
||||
size--;
|
||||
if (n > minN && size < maxFill / 4 && n > Hash.DEFAULT_INITIAL_SIZE) {
|
||||
rehash(n / 2);
|
||||
@@ -184,7 +174,7 @@ public final class StrippedLongOpenHashSet {
|
||||
return false;
|
||||
}
|
||||
long curr;
|
||||
final long[] key = this.key;
|
||||
final long[] key = this.keys;
|
||||
int pos;
|
||||
// The starting point.
|
||||
if ((curr = key[pos = (int)Hash.mix(k) & mask]) == 0) {
|
||||
@@ -208,7 +198,7 @@ public final class StrippedLongOpenHashSet {
|
||||
return containsNull;
|
||||
}
|
||||
long curr;
|
||||
final long[] key = this.key;
|
||||
final long[] key = this.keys;
|
||||
int pos;
|
||||
// The starting point.
|
||||
if ((curr = key[pos = (int)Hash.mix(k) & mask]) == 0) {
|
||||
@@ -238,7 +228,7 @@ public final class StrippedLongOpenHashSet {
|
||||
/**
|
||||
* An iterator over a hash set.
|
||||
*/
|
||||
public final class SetIterator {
|
||||
final class SetIterator {
|
||||
/**
|
||||
* The index of the last entry returned, if positive or zero; initially,
|
||||
* {@link #n}.
|
||||
@@ -249,10 +239,11 @@ public final class StrippedLongOpenHashSet {
|
||||
* A downward counter measuring how many entries must still be returned.
|
||||
*/
|
||||
int c = size;
|
||||
|
||||
/**
|
||||
* A boolean telling us whether we should return the null key.
|
||||
*/
|
||||
boolean mustReturnNull = StrippedLongOpenHashSet.this.containsNull;
|
||||
boolean mustReturnNull = StrippedLongSet.this.containsNull;
|
||||
|
||||
public boolean hasNext() {
|
||||
return c != 0;
|
||||
@@ -265,9 +256,9 @@ public final class StrippedLongOpenHashSet {
|
||||
c--;
|
||||
if (mustReturnNull) {
|
||||
mustReturnNull = false;
|
||||
return key[n];
|
||||
return keys[n];
|
||||
}
|
||||
long[] key = StrippedLongOpenHashSet.this.key;
|
||||
long[] key = StrippedLongSet.this.keys;
|
||||
for (; ; ) {
|
||||
long v = key[--pos];
|
||||
if (v != 0) {
|
||||
@@ -281,19 +272,9 @@ public final class StrippedLongOpenHashSet {
|
||||
return new SetIterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Rehashes the set.
|
||||
*
|
||||
* <p>
|
||||
* This method implements the basic rehashing strategy, and may be overriden by
|
||||
* subclasses implementing different rehashing strategies (e.g., disk-based
|
||||
* rehashing). However, you should not override this method unless you
|
||||
* understand the internal workings of this class.
|
||||
*
|
||||
* @param newN the new size
|
||||
*/
|
||||
@SuppressWarnings("StatementWithEmptyBody")
|
||||
private void rehash(final int newN) {
|
||||
final long[] key = this.key;
|
||||
final long[] key = this.keys;
|
||||
final int mask = newN - 1; // Note that this is used by the hashing macro
|
||||
final long[] newKey = new long[newN + 1];
|
||||
int i = n, pos;
|
||||
@@ -307,29 +288,16 @@ public final class StrippedLongOpenHashSet {
|
||||
n = newN;
|
||||
this.mask = mask;
|
||||
maxFill = Hash.maxFill(n, f);
|
||||
this.key = newKey;
|
||||
this.keys = newKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a hash code for this set.
|
||||
* <p>
|
||||
* This method overrides the generic method provided by the superclass. Since
|
||||
* {@code equals()} is not overriden, it is important that the value returned by
|
||||
* this method is the same value as the one returned by the overriden method.
|
||||
*
|
||||
* @return a hash code for this set.
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int h = 0;
|
||||
for (int j = realSize(), i = 0; j-- != 0; ) {
|
||||
while (key[i] == 0) {
|
||||
i++;
|
||||
}
|
||||
h += Hash.long2int(key[i]);
|
||||
i++;
|
||||
public long[] toArray() {
|
||||
long[] result = new long[size];
|
||||
SetIterator iterator = iterator();
|
||||
int i = 0;
|
||||
while (iterator.hasNext()) {
|
||||
result[i++] = iterator.nextLong();
|
||||
}
|
||||
// Zero / null have hash zero.
|
||||
return h;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,200 @@
|
||||
/*
|
||||
* Copyright (C) 2002-2021 Sebastiano Vigna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.function.IntFunction;
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
final class StrippedLongToObjectMap<V> implements LongFunction<V> {
|
||||
/**
|
||||
* The array of keys.
|
||||
*/
|
||||
private long[] keys;
|
||||
/**
|
||||
* The array of values.
|
||||
*/
|
||||
private V[] values;
|
||||
/**
|
||||
* The mask for wrapping a position counter.
|
||||
*/
|
||||
private int mask;
|
||||
/**
|
||||
* Whether this map contains the key zero.
|
||||
*/
|
||||
private boolean containsNullKey;
|
||||
/**
|
||||
* The current table size.
|
||||
*/
|
||||
private int tableSize;
|
||||
/**
|
||||
* Threshold after which we rehash. It must be the table size times loadFactor.
|
||||
*/
|
||||
private int maxFill;
|
||||
|
||||
/**
|
||||
* Number of entries in the set (including the key zero, if present).
|
||||
*/
|
||||
private int size;
|
||||
|
||||
private final IntFunction<V[]> valueArrayFactory;
|
||||
|
||||
StrippedLongToObjectMap(IntFunction<V[]> valueArrayFactory, int expectedCapacity) {
|
||||
this.valueArrayFactory = valueArrayFactory;
|
||||
tableSize = Hash.arraySize(expectedCapacity, Hash.DEFAULT_LOAD_FACTOR);
|
||||
mask = tableSize - 1;
|
||||
maxFill = Hash.maxFill(tableSize, Hash.DEFAULT_LOAD_FACTOR);
|
||||
keys = new long[tableSize + 1];
|
||||
values = valueArrayFactory.apply(tableSize + 1);
|
||||
}
|
||||
|
||||
StrippedLongToObjectMap(StrippedLongToObjectMap<V> original) {
|
||||
valueArrayFactory = original.valueArrayFactory;
|
||||
tableSize = original.tableSize;
|
||||
mask = original.mask;
|
||||
maxFill = original.maxFill;
|
||||
size = original.size;
|
||||
keys = original.keys.clone();
|
||||
values = original.values.clone();
|
||||
containsNullKey = original.containsNullKey;
|
||||
}
|
||||
|
||||
private int realSize() {
|
||||
return containsNullKey ? size - 1 : size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Index is negative for non-existing key.
|
||||
*/
|
||||
public int index(final long key) {
|
||||
if (key == 0) {
|
||||
return containsNullKey ? tableSize : -(tableSize + 1);
|
||||
}
|
||||
|
||||
long current;
|
||||
long[] keys = this.keys;
|
||||
int index;
|
||||
// the starting point
|
||||
if ((current = keys[index = (int)Hash.mix(key) & mask]) == 0) {
|
||||
return -(index + 1);
|
||||
}
|
||||
if (key == current) {
|
||||
return index;
|
||||
}
|
||||
// there's always an unused entry
|
||||
while (true) {
|
||||
if ((current = keys[index = index + 1 & mask]) == 0) {
|
||||
return -(index + 1);
|
||||
}
|
||||
if (key == current) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void addByIndex(int index, long key, V value) {
|
||||
replaceByIndex(-index - 1, key, value);
|
||||
if (size++ >= maxFill) {
|
||||
rehash(Hash.arraySize(size + 1, Hash.DEFAULT_LOAD_FACTOR));
|
||||
}
|
||||
}
|
||||
|
||||
public void replaceByIndex(int index, long key, @NotNull V value) {
|
||||
if (index == tableSize) {
|
||||
containsNullKey = true;
|
||||
}
|
||||
keys[index] = key;
|
||||
values[index] = value;
|
||||
}
|
||||
|
||||
public V getByIndex(int index) {
|
||||
return values[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public V apply(long k) {
|
||||
if (k == 0) {
|
||||
return containsNullKey ? values[tableSize] : null;
|
||||
}
|
||||
|
||||
long curr;
|
||||
final long[] key = this.keys;
|
||||
int pos;
|
||||
// The starting point.
|
||||
if ((curr = key[pos = (int)Hash.mix(k) & mask]) == 0) {
|
||||
return null;
|
||||
}
|
||||
if (k == curr) {
|
||||
return values[pos];
|
||||
}
|
||||
// There's always an unused entry.
|
||||
while (true) {
|
||||
if ((curr = key[pos = pos + 1 & mask]) == 0) {
|
||||
return null;
|
||||
}
|
||||
if (k == curr) {
|
||||
return values[pos];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return size == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rehashes the map.
|
||||
*
|
||||
* <p>
|
||||
* This method implements the basic rehashing strategy, and may be overridden by
|
||||
* subclasses implementing different rehashing strategies (e.g., disk-based
|
||||
* rehashing). However, you should not override this method unless you
|
||||
* understand the internal workings of this class.
|
||||
*
|
||||
* @param newN the new size
|
||||
*/
|
||||
@SuppressWarnings("DuplicatedCode")
|
||||
private void rehash(final int newN) {
|
||||
final long[] keys = this.keys;
|
||||
final V[] values = this.values;
|
||||
final int mask = newN - 1; // Note that this is used by the hashing macro
|
||||
final long[] newKey = new long[newN + 1];
|
||||
final V[] newValue = valueArrayFactory.apply(newN + 1);
|
||||
int i = tableSize;
|
||||
int pos;
|
||||
for (int j = realSize(); j-- != 0; ) {
|
||||
//noinspection StatementWithEmptyBody
|
||||
while (keys[--i] == 0) ;
|
||||
if (!(newKey[pos = (int)Hash.mix(keys[i]) & mask] == 0)) {
|
||||
//noinspection StatementWithEmptyBody
|
||||
while (!(newKey[pos = pos + 1 & mask] == 0)) ;
|
||||
}
|
||||
newKey[pos] = keys[i];
|
||||
newValue[pos] = values[i];
|
||||
}
|
||||
newValue[newN] = values[tableSize];
|
||||
tableSize = newN;
|
||||
this.mask = mask;
|
||||
maxFill = Hash.maxFill(tableSize, Hash.DEFAULT_LOAD_FACTOR);
|
||||
this.keys = newKey;
|
||||
this.values = newValue;
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,11 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import com.intellij.ReviseWhenPortedToJDK;
|
||||
import com.intellij.openapi.util.text.StringUtilRt;
|
||||
import com.intellij.util.UrlUtilRt;
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
@@ -32,6 +29,8 @@ import java.util.function.Predicate;
|
||||
* Should be constructed using {@link #build()} method.
|
||||
*/
|
||||
public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataConsumer {
|
||||
private static final boolean mimicJarUrlConnection = Boolean.parseBoolean(System.getProperty("idea.mimic.jar.url.connection", "false"));
|
||||
|
||||
private static final boolean isParallelCapable = registerAsParallelCapable();
|
||||
private static final ClassLoader appClassLoader = UrlClassLoader.class.getClassLoader();
|
||||
|
||||
@@ -41,6 +40,7 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
protected final ClassPath classPath;
|
||||
private final ClassLoadingLocks<String> classLoadingLocks;
|
||||
private final boolean isBootstrapResourcesAllowed;
|
||||
private final boolean isSystemClassLoader;
|
||||
|
||||
protected final @NotNull ClassPath.ClassDataConsumer classDataConsumer =
|
||||
ClassPath.recordLoadingTime ? new ClassPath.MeasuringClassDataConsumer(this) : this;
|
||||
@@ -89,7 +89,6 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
|
||||
/** @deprecated use {@link #build()} (left for compatibility with `java.system.class.loader` setting) */
|
||||
@Deprecated
|
||||
@ReviseWhenPortedToJDK("9")
|
||||
public UrlClassLoader(@NotNull ClassLoader parent) {
|
||||
this(createDefaultBuilderForJdk(parent), null, isParallelCapable);
|
||||
|
||||
@@ -120,16 +119,15 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
String[] parts = System.getProperty("java.class.path").split(System.getProperty("path.separator"));
|
||||
configuration.files = new ArrayList<>(parts.length);
|
||||
for (String s : parts) {
|
||||
configuration.files.add(new File(s).toPath());
|
||||
configuration.files.add(Paths.get(s));
|
||||
}
|
||||
}
|
||||
|
||||
configuration.isSystemClassLoader = true;
|
||||
configuration.parent = parent.getParent();
|
||||
configuration.lockJars = true;
|
||||
configuration.useCache = true;
|
||||
configuration.isClassPathIndexEnabled = true;
|
||||
configuration.isBootstrapResourcesAllowed = Boolean.parseBoolean(System.getProperty("idea.allow.bootstrap.resources", "true"));
|
||||
configuration.autoAssignUrlsWithProtectionDomain();
|
||||
return configuration;
|
||||
}
|
||||
|
||||
@@ -137,35 +135,15 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
this(builder, null, isParallelCapable);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Do not extend UrlClassLoader. If you cannot avoid it, use {@link #UrlClassLoader(Builder, boolean)}.
|
||||
*/
|
||||
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
|
||||
@Deprecated
|
||||
protected UrlClassLoader(@NotNull UrlClassLoader.Builder builder) {
|
||||
this(builder, null, false);
|
||||
}
|
||||
|
||||
protected UrlClassLoader(@NotNull UrlClassLoader.Builder builder,
|
||||
@Nullable Function<Path, ResourceFile> resourceFileFactory,
|
||||
boolean isParallelCapable) {
|
||||
this(builder, resourceFileFactory, isParallelCapable, false);
|
||||
}
|
||||
|
||||
protected UrlClassLoader(@NotNull UrlClassLoader.Builder builder,
|
||||
@Nullable Function<Path, ResourceFile> resourceFileFactory,
|
||||
boolean isParallelCapable,
|
||||
boolean isMimicJarUrlConnectionNeeded) {
|
||||
super(builder.parent);
|
||||
|
||||
isSystemClassLoader = builder.isSystemClassLoader;
|
||||
files = builder.files;
|
||||
|
||||
Set<Path> urlsWithProtectionDomain = builder.pathsWithProtectionDomain;
|
||||
if (urlsWithProtectionDomain == null) {
|
||||
urlsWithProtectionDomain = Collections.emptySet();
|
||||
}
|
||||
|
||||
classPath = new ClassPath(files, urlsWithProtectionDomain, builder, resourceFileFactory, isMimicJarUrlConnectionNeeded);
|
||||
classPath = new ClassPath(files, builder, resourceFileFactory, mimicJarUrlConnection);
|
||||
|
||||
isBootstrapResourcesAllowed = builder.isBootstrapResourcesAllowed;
|
||||
classLoadingLocks = isParallelCapable ? new ClassLoadingLocks<>() : null;
|
||||
@@ -177,6 +155,7 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
this.files = files;
|
||||
this.classPath = classPath;
|
||||
isBootstrapResourcesAllowed = false;
|
||||
isSystemClassLoader = false;
|
||||
classLoadingLocks = new ClassLoadingLocks<>();
|
||||
}
|
||||
|
||||
@@ -214,13 +193,20 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
|
||||
@Override
|
||||
protected Class<?> findClass(@NotNull String name) throws ClassNotFoundException {
|
||||
if (name.startsWith("com.intellij.util.lang.")) {
|
||||
String fileNameWithoutExtension = name.replace('.', '/');
|
||||
String fileName = fileNameWithoutExtension + ClasspathCache.CLASS_EXTENSION;
|
||||
long packageNameHash = ClasspathCache.getPackageNameHash(fileNameWithoutExtension, fileNameWithoutExtension.lastIndexOf('/'));
|
||||
|
||||
// com.intellij.util.lang, org.jetbrains.xxh3, org.jetbrains.ikv
|
||||
// see XxHash3Test.packages
|
||||
if (isSystemClassLoader &&
|
||||
(packageNameHash == -9217824570049207139L || packageNameHash == -1976620678582843062L || packageNameHash == 4571982292824530778L)) {
|
||||
return appClassLoader.loadClass(name);
|
||||
}
|
||||
|
||||
Class<?> clazz;
|
||||
try {
|
||||
clazz = classPath.findClass(name, classDataConsumer);
|
||||
clazz = classPath.findClass(name, fileName, packageNameHash, classDataConsumer);
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new ClassNotFoundException(name, e);
|
||||
@@ -231,7 +217,7 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
return clazz;
|
||||
}
|
||||
|
||||
private void definePackageIfNeeded(String name, Loader loader) throws IOException {
|
||||
private void definePackageIfNeeded(String name) {
|
||||
int lastDotIndex = name.lastIndexOf('.');
|
||||
if (lastDotIndex == -1) {
|
||||
return;
|
||||
@@ -244,20 +230,7 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
}
|
||||
|
||||
try {
|
||||
Map<Loader.Attribute, String> attributes = loader.getAttributes();
|
||||
if (attributes == null || attributes.isEmpty()) {
|
||||
definePackage(packageName, null, null, null, null, null, null, null);
|
||||
}
|
||||
else {
|
||||
definePackage(packageName,
|
||||
attributes.get(Loader.Attribute.SPEC_TITLE),
|
||||
attributes.get(Loader.Attribute.SPEC_VERSION),
|
||||
attributes.get(Loader.Attribute.SPEC_VENDOR),
|
||||
attributes.get(Loader.Attribute.IMPL_TITLE),
|
||||
attributes.get(Loader.Attribute.IMPL_VERSION),
|
||||
attributes.get(Loader.Attribute.IMPL_VENDOR),
|
||||
null);
|
||||
}
|
||||
definePackage(packageName, null, null, null, null, null, null, null);
|
||||
}
|
||||
catch (IllegalArgumentException ignore) {
|
||||
// do nothing, the package is already defined by another thread
|
||||
@@ -274,20 +247,20 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isByteBufferSupported(@NotNull String name, @Nullable ProtectionDomain protectionDomain) {
|
||||
public boolean isByteBufferSupported(@NotNull String name) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> consumeClassData(@NotNull String name, byte[] data, Loader loader, @Nullable ProtectionDomain protectionDomain) throws IOException {
|
||||
definePackageIfNeeded(name, loader);
|
||||
return super.defineClass(name, data, 0, data.length, protectionDomain == null ? getProtectionDomain() : protectionDomain);
|
||||
public Class<?> consumeClassData(@NotNull String name, byte[] data, Loader loader) throws IOException {
|
||||
definePackageIfNeeded(name);
|
||||
return super.defineClass(name, data, 0, data.length, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> consumeClassData(@NotNull String name, ByteBuffer data, Loader loader, @Nullable ProtectionDomain protectionDomain) throws IOException {
|
||||
definePackageIfNeeded(name, loader);
|
||||
return super.defineClass(name, data, protectionDomain == null ? getProtectionDomain() : protectionDomain);
|
||||
public Class<?> consumeClassData(@NotNull String name, ByteBuffer data, Loader loader) throws IOException {
|
||||
definePackageIfNeeded(name);
|
||||
return super.defineClass(name, data, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -348,7 +321,7 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
}
|
||||
|
||||
public final void processResources(@NotNull String dir,
|
||||
@NotNull Predicate<? super String> fileNameFilter,
|
||||
@NotNull Predicate<String> fileNameFilter,
|
||||
@NotNull BiConsumer<? super String, ? super InputStream> consumer) throws IOException {
|
||||
classPath.processResources(dir, fileNameFilter, consumer);
|
||||
}
|
||||
@@ -366,7 +339,10 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
@ApiStatus.Internal
|
||||
public @Nullable BiPredicate<String, Boolean> resolveScopeManager;
|
||||
|
||||
public @Nullable Class<?> loadClassInsideSelf(@NotNull String name, boolean forceLoadFromSubPluginClassloader) throws IOException {
|
||||
public @Nullable Class<?> loadClassInsideSelf(String name,
|
||||
String fileName,
|
||||
long packageNameHash,
|
||||
boolean forceLoadFromSubPluginClassloader) throws IOException {
|
||||
synchronized (getClassLoadingLock(name)) {
|
||||
Class<?> c = findLoadedClass(name);
|
||||
if (c != null) {
|
||||
@@ -387,7 +363,7 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
return c;
|
||||
}
|
||||
}
|
||||
return classPath.findClass(name, classDataConsumer);
|
||||
return classPath.findClass(name, fileName, packageNameHash, classDataConsumer);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -479,12 +455,34 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
private static boolean endsWith(@NotNull CharSequence text, @NotNull CharSequence suffix) {
|
||||
int l1 = text.length();
|
||||
int l2 = suffix.length();
|
||||
if (l1 < l2) return false;
|
||||
|
||||
for (int i = l1 - 1; i >= l1 - l2; i--) {
|
||||
if (text.charAt(i) != suffix.charAt(i + l2 - l1)) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
private static int lastIndexOf(@NotNull CharSequence s, char c, int start, int end) {
|
||||
start = Math.max(start, 0);
|
||||
for (int i = Math.min(end, s.length()) - 1; i >= start; i--) {
|
||||
if (s.charAt(i) == c) return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
@SuppressWarnings("DuplicatedCode")
|
||||
private static void processDots(StringBuilder result, int dots, int start) {
|
||||
if (dots == 2) {
|
||||
int pos = -1;
|
||||
if (!StringUtilRt.endsWith(result, "/../") && !"../".contentEquals(result)) {
|
||||
pos = StringUtilRt.lastIndexOf(result, '/', start, result.length() - 1);
|
||||
if (!endsWith(result, "/../") && !"../".contentEquals(result)) {
|
||||
pos = lastIndexOf(result, '/', start, result.length() - 1);
|
||||
if (pos >= 0) {
|
||||
++pos; // separator found, trim to next char
|
||||
}
|
||||
@@ -528,7 +526,7 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
@SuppressWarnings({"UseOfSystemOutOrSystemErr", "SameParameterValue"})
|
||||
private void logError(String message, Throwable t) {
|
||||
try {
|
||||
Class<?> logger = Class.forName("com.intellij.openapi.diagnostic.Logger", false, this);
|
||||
Class<?> logger = loadClass("com.intellij.openapi.diagnostic.Logger");
|
||||
MethodHandles.Lookup lookup = MethodHandles.lookup();
|
||||
Object instance = lookup.findStatic(logger, "getInstance", MethodType.methodType(logger, Class.class)).invoke(getClass());
|
||||
lookup.findVirtual(logger, "error", MethodType.methodType(void.class, String.class, Throwable.class))
|
||||
@@ -560,13 +558,12 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
private static final boolean isClassPathIndexEnabledGlobalValue = Boolean.parseBoolean(System.getProperty("idea.classpath.index.enabled", "true"));
|
||||
|
||||
List<Path> files = Collections.emptyList();
|
||||
@Nullable Set<Path> pathsWithProtectionDomain;
|
||||
ClassLoader parent;
|
||||
boolean lockJars = true;
|
||||
boolean useCache;
|
||||
boolean isSystemClassLoader;
|
||||
boolean isClassPathIndexEnabled;
|
||||
boolean isBootstrapResourcesAllowed;
|
||||
boolean errorOnMissingJar = true;
|
||||
@Nullable CachePoolImpl cachePool;
|
||||
Predicate<? super Path> cachingCondition;
|
||||
|
||||
@@ -591,14 +588,6 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks URLs that are signed by Sun/Oracle and whose signatures must be verified.
|
||||
*/
|
||||
@NotNull UrlClassLoader.Builder urlsWithProtectionDomain(@NotNull Set<Path> value) {
|
||||
pathsWithProtectionDomain = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
public @NotNull UrlClassLoader.Builder parent(ClassLoader parent) {
|
||||
this.parent = parent;
|
||||
return this;
|
||||
@@ -669,40 +658,8 @@ public class UrlClassLoader extends ClassLoader implements ClassPath.ClassDataCo
|
||||
return this;
|
||||
}
|
||||
|
||||
public @NotNull UrlClassLoader.Builder setLogErrorOnMissingJar(boolean log) {
|
||||
errorOnMissingJar = log;
|
||||
return this;
|
||||
}
|
||||
|
||||
public @NotNull UrlClassLoader.Builder autoAssignUrlsWithProtectionDomain() {
|
||||
Set<Path> result = null;
|
||||
for (Path path : files) {
|
||||
if (isUrlNeedsProtectionDomain(path)) {
|
||||
if (result == null) {
|
||||
result = new HashSet<>();
|
||||
}
|
||||
result.add(path);
|
||||
}
|
||||
}
|
||||
pathsWithProtectionDomain = result;
|
||||
return this;
|
||||
}
|
||||
|
||||
public @NotNull UrlClassLoader get() {
|
||||
return new UrlClassLoader(this, null, isParallelCapable);
|
||||
}
|
||||
|
||||
private static boolean isUrlNeedsProtectionDomain(@NotNull Path file) {
|
||||
String path = file.toString();
|
||||
// BouncyCastle needs a protection domain
|
||||
if (path.endsWith(".jar")) {
|
||||
int offset = path.lastIndexOf(file.getFileSystem().getSeparator().charAt(0)) + 1;
|
||||
//noinspection SpellCheckingInspection
|
||||
if (path.startsWith("bcprov-", offset) || path.startsWith("bcpkix-", offset)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
Copyright (C) 2002-2020 Sebastiano Vigna
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
@@ -1,345 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2002-2020 Sebastiano Vigna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.intellij.util.lang.fastutil;
|
||||
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
public final class StrippedIntOpenHashSet {
|
||||
/**
|
||||
* The array of keys.
|
||||
*/
|
||||
private transient int[] key;
|
||||
/**
|
||||
* The mask for wrapping a position counter.
|
||||
*/
|
||||
private transient int mask;
|
||||
/**
|
||||
* Whether this set contains the null key.
|
||||
*/
|
||||
private transient boolean containsNull;
|
||||
/**
|
||||
* The current table size. Note that an additional element is allocated for
|
||||
* storing the null key.
|
||||
*/
|
||||
private transient int n;
|
||||
/**
|
||||
* Threshold after which we rehash. It must be the table size times {@link #f}.
|
||||
*/
|
||||
private transient int maxFill;
|
||||
/**
|
||||
* We never resize below this threshold, which is the construction-time {#n}.
|
||||
*/
|
||||
private final transient int minN;
|
||||
/**
|
||||
* Number of entries in the set (including the null key, if present).
|
||||
*/
|
||||
private int size;
|
||||
/**
|
||||
* The acceptable load factor.
|
||||
*/
|
||||
private final float f;
|
||||
|
||||
/**
|
||||
* Creates a new hash set.
|
||||
*
|
||||
* <p>
|
||||
* The actual table size will be the least power of two greater than
|
||||
* {@code expected}/{@code f}.
|
||||
*
|
||||
* @param expected the expected number of elements in the hash set.
|
||||
* @param f the load factor.
|
||||
*/
|
||||
|
||||
public StrippedIntOpenHashSet(final int expected, final float f) {
|
||||
if (f <= 0 || f > 1) {
|
||||
throw new IllegalArgumentException("Load factor must be greater than 0 and smaller than or equal to 1");
|
||||
}
|
||||
if (expected < 0) {
|
||||
throw new IllegalArgumentException("The expected number of elements must be nonnegative");
|
||||
}
|
||||
this.f = f;
|
||||
minN = n = Hash.arraySize(expected, f);
|
||||
mask = n - 1;
|
||||
maxFill = Hash.maxFill(n, f);
|
||||
key = new int[n + 1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
|
||||
*
|
||||
* @param expected the expected number of elements in the hash set.
|
||||
*/
|
||||
public StrippedIntOpenHashSet(final int expected) {
|
||||
this(expected, Hash.DEFAULT_LOAD_FACTOR);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new hash set with initial expected
|
||||
* {@link Hash#DEFAULT_INITIAL_SIZE} elements and
|
||||
* {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
|
||||
*/
|
||||
public StrippedIntOpenHashSet() {
|
||||
this(Hash.DEFAULT_INITIAL_SIZE, Hash.DEFAULT_LOAD_FACTOR);
|
||||
}
|
||||
|
||||
private int realSize() {
|
||||
return containsNull ? size - 1 : size;
|
||||
}
|
||||
|
||||
public boolean add(final int k) {
|
||||
int pos;
|
||||
if (k == 0) {
|
||||
if (containsNull) {
|
||||
return false;
|
||||
}
|
||||
containsNull = true;
|
||||
}
|
||||
else {
|
||||
int curr;
|
||||
final int[] key = this.key;
|
||||
// The starting point.
|
||||
if (!((curr = key[pos = Hash.mix(k) & mask]) == 0)) {
|
||||
if (curr == k) {
|
||||
return false;
|
||||
}
|
||||
while (!((curr = key[pos = pos + 1 & mask]) == 0)) {
|
||||
if (curr == k) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
key[pos] = k;
|
||||
}
|
||||
if (size++ >= maxFill) {
|
||||
rehash(Hash.arraySize(size + 1, f));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Shifts left entries with the specified hash code, starting at the specified
|
||||
* position, and empties the resulting free entry.
|
||||
*
|
||||
* @param pos a starting position.
|
||||
*/
|
||||
private void shiftKeys(int pos) {
|
||||
// Shift entries with the same hash.
|
||||
int last, slot;
|
||||
int curr;
|
||||
final int[] key = this.key;
|
||||
for (; ; ) {
|
||||
pos = (last = pos) + 1 & mask;
|
||||
for (; ; ) {
|
||||
if ((curr = key[pos]) == 0) {
|
||||
key[last] = 0;
|
||||
return;
|
||||
}
|
||||
slot = Hash.mix(curr) & mask;
|
||||
if (last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos) {
|
||||
break;
|
||||
}
|
||||
pos = pos + 1 & mask;
|
||||
}
|
||||
key[last] = curr;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean removeEntry(final int pos) {
|
||||
size--;
|
||||
shiftKeys(pos);
|
||||
if (n > minN && size < maxFill / 4 && n > Hash.DEFAULT_INITIAL_SIZE) {
|
||||
rehash(n / 2);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean removeNullEntry() {
|
||||
containsNull = false;
|
||||
key[n] = 0;
|
||||
size--;
|
||||
if (n > minN && size < maxFill / 4 && n > Hash.DEFAULT_INITIAL_SIZE) {
|
||||
rehash(n / 2);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean remove(final int k) {
|
||||
if (k == 0) {
|
||||
if (containsNull) {
|
||||
return removeNullEntry();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
int curr;
|
||||
final int[] key = this.key;
|
||||
int pos;
|
||||
// The starting point.
|
||||
if ((curr = key[pos = Hash.mix(k) & mask]) == 0) {
|
||||
return false;
|
||||
}
|
||||
if (k == curr) {
|
||||
return removeEntry(pos);
|
||||
}
|
||||
while (true) {
|
||||
if ((curr = key[pos = pos + 1 & mask]) == 0) {
|
||||
return false;
|
||||
}
|
||||
if (k == curr) {
|
||||
return removeEntry(pos);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public boolean contains(final int k) {
|
||||
if (k == 0) {
|
||||
return containsNull;
|
||||
}
|
||||
int curr;
|
||||
final int[] key = this.key;
|
||||
int pos;
|
||||
// The starting point.
|
||||
if ((curr = key[pos = Hash.mix(k) & mask]) == 0) {
|
||||
return false;
|
||||
}
|
||||
if (k == curr) {
|
||||
return true;
|
||||
}
|
||||
while (true) {
|
||||
if ((curr = key[pos = pos + 1 & mask]) == 0) {
|
||||
return false;
|
||||
}
|
||||
if (k == curr) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return size == 0;
|
||||
}
|
||||
|
||||
public int[] toArray() {
|
||||
int[] result = new int[size];
|
||||
SetIterator iterator = iterator();
|
||||
int i = 0;
|
||||
while (iterator.hasNext()) {
|
||||
result[i++] = iterator.nextInt();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* An iterator over a hash set.
|
||||
*/
|
||||
public final class SetIterator {
|
||||
/**
|
||||
* The index of the last entry returned, if positive or zero; initially,
|
||||
* {@link #n}
|
||||
*/
|
||||
int pos = n;
|
||||
|
||||
/**
|
||||
* A downward counter measuring how many entries must still be returned.
|
||||
*/
|
||||
int c = size;
|
||||
/**
|
||||
* A boolean telling us whether we should return the null key.
|
||||
*/
|
||||
boolean mustReturnNull = StrippedIntOpenHashSet.this.containsNull;
|
||||
|
||||
public boolean hasNext() {
|
||||
return c != 0;
|
||||
}
|
||||
|
||||
public int nextInt() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
c--;
|
||||
if (mustReturnNull) {
|
||||
mustReturnNull = false;
|
||||
return key[n];
|
||||
}
|
||||
final int[] key = StrippedIntOpenHashSet.this.key;
|
||||
for (; ; ) {
|
||||
int v = key[--pos];
|
||||
if (v != 0) {
|
||||
return v;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public SetIterator iterator() {
|
||||
return new SetIterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Rehashes the set.
|
||||
*
|
||||
* <p>
|
||||
* This method implements the basic rehashing strategy, and may be overriden by
|
||||
* subclasses implementing different rehashing strategies (e.g., disk-based
|
||||
* rehashing). However, you should not override this method unless you
|
||||
* understand the internal workings of this class.
|
||||
*
|
||||
* @param newN the new size
|
||||
*/
|
||||
private void rehash(final int newN) {
|
||||
final int[] key = this.key;
|
||||
final int mask = newN - 1; // Note that this is used by the hashing macro
|
||||
final int[] newKey = new int[newN + 1];
|
||||
int i = n, pos;
|
||||
for (int j = realSize(); j-- != 0; ) {
|
||||
while (key[--i] == 0) ;
|
||||
if (!(newKey[pos = Hash.mix(key[i]) & mask] == 0)) {
|
||||
while (!(newKey[pos = pos + 1 & mask] == 0)) ;
|
||||
}
|
||||
newKey[pos] = key[i];
|
||||
}
|
||||
n = newN;
|
||||
this.mask = mask;
|
||||
maxFill = Hash.maxFill(n, f);
|
||||
this.key = newKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a hash code for this set.
|
||||
* <p>
|
||||
* This method overrides the generic method provided by the superclass. Since
|
||||
* {@code equals()} is not overriden, it is important that the value returned by
|
||||
* this method is the same value as the one returned by the overriden method.
|
||||
*
|
||||
* @return a hash code for this set.
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int h = 0;
|
||||
for (int j = realSize(), i = 0; j-- != 0; ) {
|
||||
while (key[i] == 0) {
|
||||
i++;
|
||||
}
|
||||
h += key[i];
|
||||
i++;
|
||||
}
|
||||
// Zero / null have hash zero.
|
||||
return h;
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,6 @@
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/test" isTestSource="true" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
@@ -20,9 +19,5 @@
|
||||
<orderEntry type="library" name="caffeine" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.base" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.classLoader" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.zip" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit5" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="assertJ" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit5Params" level="project" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -1,121 +0,0 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.ikv
|
||||
|
||||
import com.intellij.util.lang.ByteBufferCleaner
|
||||
import org.jetbrains.ikv.IkvIndexBuilder.Entry
|
||||
import org.minperf.RecSplitEvaluator
|
||||
import org.minperf.RecSplitGenerator
|
||||
import org.minperf.RecSplitSettings
|
||||
import org.minperf.UniversalHash
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.ByteOrder
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.StandardOpenOption
|
||||
import java.util.*
|
||||
|
||||
class IkvIndexBuilder(private val writeSize: Boolean = true, private val settings: RecSplitSettings) {
|
||||
private val entries = ArrayList<Entry>()
|
||||
|
||||
data class Entry(val key: Int, val offset: Int, val size: Int)
|
||||
|
||||
private class EntryHash : UniversalHash<Entry> {
|
||||
override fun universalHash(key: Entry, index: Long) = UniversalHash.IntHash.hashInt(key.key, index)
|
||||
}
|
||||
|
||||
fun add(entry: Entry) {
|
||||
entries.add(entry)
|
||||
}
|
||||
|
||||
fun write(writer: (ByteBuffer) -> Unit) {
|
||||
val hash = EntryHash()
|
||||
val keyData = RecSplitGenerator(hash, settings).generate(entries) {
|
||||
ByteBuffer.allocateDirect(it).order(ByteOrder.LITTLE_ENDIAN)
|
||||
}
|
||||
|
||||
try {
|
||||
writer(keyData)
|
||||
keyData.flip()
|
||||
|
||||
val buffer = ByteBuffer
|
||||
.allocateDirect((entries.size * (if (writeSize) Long.SIZE_BYTES else Int.SIZE_BYTES)) +
|
||||
(Int.SIZE_BYTES * 2) + 1)
|
||||
.order(ByteOrder.LITTLE_ENDIAN)
|
||||
try {
|
||||
// write offsets in key index order
|
||||
val evaluator = RecSplitEvaluator(keyData, hash, settings)
|
||||
keyData.flip()
|
||||
entries.sortWith(Comparator { o1, o2 -> evaluator.evaluate(o1).compareTo(evaluator.evaluate(o2)) })
|
||||
|
||||
if (writeSize) {
|
||||
val longBuffer = buffer.asLongBuffer()
|
||||
for (entry in entries) {
|
||||
longBuffer.put(entry.offset.toLong() shl 32 or (entry.size.toLong() and 0xffffffffL))
|
||||
}
|
||||
buffer.position(buffer.position() + (longBuffer.position() * Long.SIZE_BYTES))
|
||||
}
|
||||
else {
|
||||
val intBuffer = buffer.asIntBuffer()
|
||||
for (entry in entries) {
|
||||
intBuffer.put(entry.offset)
|
||||
}
|
||||
buffer.position(buffer.position() + (intBuffer.position() * Int.SIZE_BYTES))
|
||||
}
|
||||
|
||||
buffer.putInt(entries.size)
|
||||
buffer.putInt(keyData.remaining())
|
||||
buffer.put(if (writeSize) 1 else 0)
|
||||
buffer.flip()
|
||||
writer(buffer)
|
||||
}
|
||||
finally {
|
||||
ByteBufferCleaner.unmapBuffer(buffer)
|
||||
}
|
||||
}
|
||||
finally {
|
||||
ByteBufferCleaner.unmapBuffer(keyData)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun sizeUnawareIkvWriter(file: Path): IkvWriter {
|
||||
return IkvWriter(channel = FileChannel.open(file, EnumSet.of(StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW)),
|
||||
writeSize = false)
|
||||
}
|
||||
|
||||
class IkvWriter(private val channel: FileChannel,
|
||||
settings: RecSplitSettings = RecSplitSettings.DEFAULT_SETTINGS,
|
||||
writeSize: Boolean = true) : AutoCloseable {
|
||||
private val indexBuilder = IkvIndexBuilder(writeSize, settings = settings)
|
||||
private var position = 0
|
||||
|
||||
fun write(key: Int, data: ByteArray) {
|
||||
indexBuilder.add(Entry(key, position, data.size))
|
||||
writeBuffer(ByteBuffer.wrap(data))
|
||||
}
|
||||
|
||||
fun write(key: Int, data: ByteBuffer) {
|
||||
indexBuilder.add(Entry(key, position, data.remaining()))
|
||||
var currentPosition = position.toLong()
|
||||
do {
|
||||
currentPosition += channel.write(data, currentPosition)
|
||||
}
|
||||
while (data.hasRemaining())
|
||||
position = currentPosition.toInt()
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
channel.use {
|
||||
indexBuilder.write(::writeBuffer)
|
||||
}
|
||||
}
|
||||
|
||||
private fun writeBuffer(value: ByteBuffer) {
|
||||
var currentPosition = position
|
||||
do {
|
||||
currentPosition += channel.write(value, currentPosition.toLong())
|
||||
}
|
||||
while (value.hasRemaining())
|
||||
position = currentPosition
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.ikv;
|
||||
|
||||
import com.intellij.util.io.Murmur3_32Hash;
|
||||
import net.openshift.hash.XxHash3;
|
||||
import org.openjdk.jmh.annotations.*;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
||||
/*
|
||||
Benchmark Mode Cnt Score Error Units
|
||||
StringHashBenchmark.murmur3 avgt 10 293.428 ± 2.642 ns/op
|
||||
StringHashBenchmark.xxh3 avgt 10 277.485 ± 9.273 ns/op
|
||||
*/
|
||||
|
||||
@BenchmarkMode(Mode.AverageTime)
|
||||
@OutputTimeUnit(TimeUnit.NANOSECONDS)
|
||||
@Measurement(iterations = 5, time = 4)
|
||||
@Fork(2)
|
||||
public class StringHashBenchmark {
|
||||
private static final String data;
|
||||
|
||||
static {
|
||||
byte[] b = new byte[200];
|
||||
new Random(42).nextBytes(b);
|
||||
data = new BigInteger(b).toString(Character.MAX_RADIX);
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
public int murmur3() {
|
||||
return Murmur3_32Hash.MURMUR3_32.hashString(data, 0, data.length());
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
public int xxh3() {
|
||||
return XxHash3.hashUnencodedChars32(data);
|
||||
}
|
||||
}
|
||||
@@ -3,15 +3,15 @@
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" scope="TEST" name="jmh-core" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.zip" scope="TEST" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.ex" scope="TEST" />
|
||||
<orderEntry type="library" scope="TEST" name="kotlin-stdlib-jdk8" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt" scope="TEST" />
|
||||
<orderEntry type="library" scope="TEST" name="jmh-generator-annprocess" level="project" />
|
||||
<orderEntry type="library" name="jmh-core" level="project" />
|
||||
<orderEntry type="library" name="jmh-generator-annprocess" level="project" />
|
||||
<orderEntry type="library" name="kotlin-stdlib-jdk8" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.immutableKeyValueStore" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.zip" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt.java8" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -0,0 +1 @@
|
||||
Cannot be located in test sources due to IntelliJ IDEA bug - cannot find annotation processor.
|
||||
@@ -1,8 +1,7 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.ikv
|
||||
|
||||
import net.openshift.hash.XxHash3
|
||||
import org.minperf.RecSplitSettings
|
||||
import org.jetbrains.xxh3.Xxh3
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
@@ -19,7 +18,7 @@ internal fun generateDb(file: Path, count: Int, settings: RecSplitSettings): Lis
|
||||
writer.use {
|
||||
for (i in 0 until count) {
|
||||
val data = random.nextBytes(random.nextInt(64, 512))
|
||||
val key = XxHash3.hash32(data)
|
||||
val key = Xxh3.hash32(data)
|
||||
writer.write(key, data)
|
||||
list.add(Pair(key, data))
|
||||
}
|
||||
@@ -0,0 +1,276 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.ikv;
|
||||
|
||||
import com.intellij.util.lang.Xor16;
|
||||
import org.openjdk.jmh.annotations.*;
|
||||
import org.openjdk.jmh.infra.Blackhole;
|
||||
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/*
|
||||
Benchmark Mode Cnt Score Error Units
|
||||
BloomFilterBenchmark.guavaConstruct avgt 25 1705,049 ± 13,914 us/op
|
||||
BloomFilterBenchmark.guavaGet avgt 25 1691,062 ± 16,181 us/op
|
||||
BloomFilterBenchmark.ideaConstruct avgt 25 318,380 ± 0,641 us/op
|
||||
BloomFilterBenchmark.ideaGet avgt 25 586,279 ± 11,676 us/op
|
||||
BloomFilterBenchmark.libFilterConstruct avgt 25 19744,587 ± 89,751 us/op
|
||||
BloomFilterBenchmark.libFilterGet avgt 25 206,051 ± 1,445 us/op
|
||||
BloomFilterBenchmark.xorConstruct avgt 15 400.139 ± 1.886 us/op
|
||||
BloomFilterBenchmark.xorFilterGet avgt 15 129.594 ± 0.255 us/op
|
||||
|
||||
https://gist.github.com/develar/de9a2eb4934e55b281604d70fd00c5e3 - BlockSplitBloomFilter from
|
||||
https://github.com/apache/parquet-mr/blob/master/parquet-column/src/main/java/org/apache/parquet/column/values/bloomfilter/BlockSplitBloomFilter.java
|
||||
is not suitable because filter data is very large for 0.005 probability.
|
||||
|
||||
https://gist.github.com/develar/974a587f8180e183ce25cf308ffae39a
|
||||
*/
|
||||
@SuppressWarnings("CommentedOutCode")
|
||||
@BenchmarkMode(Mode.AverageTime)
|
||||
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
||||
@Measurement(time = 5)
|
||||
@Fork(3)
|
||||
public class BloomFilterBenchmark {
|
||||
private static final double PROBABILITY = 0.005d;
|
||||
|
||||
@State(Scope.Benchmark)
|
||||
public static class ConstructState {
|
||||
public long[] keys = new long[10_000];
|
||||
|
||||
@Setup
|
||||
public void setup() {
|
||||
generateKeys(keys);
|
||||
}
|
||||
}
|
||||
|
||||
@State(Scope.Benchmark)
|
||||
public static class IdeaGetState {
|
||||
public long[] keys = new long[10_000];
|
||||
BloomFilterBase filter;
|
||||
|
||||
@Setup
|
||||
public void setup() {
|
||||
generateKeys(keys);
|
||||
|
||||
filter = new BloomFilterBase(keys.length, PROBABILITY);
|
||||
for (long key : keys) {
|
||||
int hash = (int)(key >> 32);
|
||||
int hash2 = (int)key;
|
||||
filter.addIt(hash, hash2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@State(Scope.Benchmark)
|
||||
public static class XorFilterGetState {
|
||||
public long[] keys = new long[10_000];
|
||||
Xor16 filter;
|
||||
|
||||
@Setup
|
||||
public void setup() {
|
||||
generateKeys(keys);
|
||||
|
||||
filter = Xor16.construct(keys, 0, keys.length);
|
||||
}
|
||||
}
|
||||
|
||||
//@State(Scope.Benchmark)
|
||||
//public static class LibFilterGetState {
|
||||
// public long[] keys = new long[10_000];
|
||||
// BlockFilter filter;
|
||||
//
|
||||
// @Setup
|
||||
// public void setup() {
|
||||
// generateKeys(keys);
|
||||
//
|
||||
// filter = new BlockFilter(keys.length, PROBABILITY);
|
||||
// for (long key : keys) {
|
||||
// filter.addHash64(key);
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
|
||||
//@State(Scope.Benchmark)
|
||||
//public static class GuavaGetState {
|
||||
// public long[] keys = new long[10_000];
|
||||
// BloomFilter<Long> filter;
|
||||
//
|
||||
// @Setup
|
||||
// public void setup() {
|
||||
// generateKeys(keys);
|
||||
//
|
||||
// filter = BloomFilter.create(Funnels.longFunnel(), keys.length, PROBABILITY);
|
||||
// for (long key : keys) {
|
||||
// filter.put(key);
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
|
||||
//@State(Scope.Benchmark)
|
||||
//public static class BlockSplitGetState {
|
||||
// public long[] keys = new long[10_000];
|
||||
// BlockSplitBloomFilter filter;
|
||||
//
|
||||
// @Setup
|
||||
// public void setup() {
|
||||
// generateKeys(keys);
|
||||
//
|
||||
// BlockSplitBloomFilter.BloomFilterGenerator generator = new BlockSplitBloomFilter.BloomFilterGenerator(keys.length, PROBABILITY);
|
||||
// for (long key : keys) {
|
||||
// generator.add(key);
|
||||
// }
|
||||
// filter = new BlockSplitBloomFilter(generator.getData());
|
||||
// }
|
||||
//}
|
||||
|
||||
@Benchmark
|
||||
public BloomFilterBase ideaConstruct(ConstructState state) {
|
||||
BloomFilterBase filter = new BloomFilterBase(state.keys.length, PROBABILITY);
|
||||
for (long key : state.keys) {
|
||||
filter.addIt((int)(key >> 32), (int)key);
|
||||
}
|
||||
return filter;
|
||||
}
|
||||
|
||||
//@Benchmark
|
||||
//public BlockFilter libFilterConstruct(ConstructState state) {
|
||||
// BlockFilter filter = new BlockFilter(state.keys.length, PROBABILITY);
|
||||
// for (long key : state.keys) {
|
||||
// filter.addHash64(key);
|
||||
// }
|
||||
// return filter;
|
||||
//}
|
||||
|
||||
//@Benchmark
|
||||
//public BloomFilter<Long> guavaConstruct(ConstructState state) {
|
||||
// BloomFilter<Long> filter = BloomFilter.create(Funnels.longFunnel(), state.keys.length, PROBABILITY);
|
||||
// for (long key : state.keys) {
|
||||
// filter.put(key);
|
||||
// }
|
||||
// return filter;
|
||||
//}
|
||||
|
||||
@Benchmark
|
||||
public Xor16 xorConstruct(ConstructState state) {
|
||||
return Xor16.construct(state.keys, 0, state.keys.length);
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
public void xorFilterGet(XorFilterGetState state, Blackhole blackhole) {
|
||||
Xor16 filter = state.filter;
|
||||
for (long key : state.keys) {
|
||||
blackhole.consume(filter.mightContain(key));
|
||||
blackhole.consume(filter.mightContain(key + 1));
|
||||
}
|
||||
}
|
||||
|
||||
//@Benchmark
|
||||
//public BlockSplitBloomFilter.BloomFilterGenerator blockSplitConstruct(ConstructState state) {
|
||||
// BlockSplitBloomFilter.BloomFilterGenerator generator = new BlockSplitBloomFilter.BloomFilterGenerator(state.keys.length, PROBABILITY);
|
||||
// for (long key : state.keys) {
|
||||
// generator.add(key);
|
||||
// }
|
||||
// return generator;
|
||||
//}
|
||||
|
||||
@Benchmark
|
||||
public void ideaGet(IdeaGetState state, Blackhole blackhole) {
|
||||
BloomFilterBase filter = state.filter;
|
||||
for (long key : state.keys) {
|
||||
blackhole.consume(filter.maybeContains((int)(key >> 32), (int)key));
|
||||
blackhole.consume(filter.maybeContains((int)(key >> 32) + 1, (int)key + 1));
|
||||
}
|
||||
}
|
||||
|
||||
//@Benchmark
|
||||
//public void libFilterGet(LibFilterGetState state, Blackhole blackhole) {
|
||||
// BlockFilter filter = state.filter;
|
||||
// for (long key : state.keys) {
|
||||
// blackhole.consume(filter.mightContain(key));
|
||||
// blackhole.consume(filter.mightContain(key + 1));
|
||||
// }
|
||||
//}
|
||||
|
||||
//@Benchmark
|
||||
//public void guavaGet(GuavaGetState state, Blackhole blackhole) {
|
||||
// BloomFilter<Long> filter = state.filter;
|
||||
// for (long key : state.keys) {
|
||||
// blackhole.consume(filter.mightContain(key));
|
||||
// blackhole.consume(filter.mightContain(key + 1));
|
||||
// }
|
||||
//}
|
||||
|
||||
//@Benchmark
|
||||
//public void blockSplitGet(BlockSplitGetState state, Blackhole blackhole) {
|
||||
// BlockSplitBloomFilter filter = state.filter;
|
||||
// for (long key : state.keys) {
|
||||
// blackhole.consume(filter.mightContain(key));
|
||||
// blackhole.consume(filter.mightContain(key + 1));
|
||||
// }
|
||||
//}
|
||||
|
||||
private static void generateKeys(long[] keys) {
|
||||
Random random = new Random(42);
|
||||
for (int i = 0, n = keys.length; i < n; i++) {
|
||||
keys[i] = random.nextLong();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final class BloomFilterBase {
|
||||
private final int myHashFunctionCount;
|
||||
private final int myBitsCount;
|
||||
private final long[] myElementsSet;
|
||||
private static final int BITS_PER_ELEMENT = 6;
|
||||
|
||||
BloomFilterBase(int _maxElementCount, double probability) {
|
||||
int bitsPerElementFactor = (int)Math.ceil(-Math.log(probability) / (Math.log(2) * Math.log(2)));
|
||||
myHashFunctionCount = (int)Math.ceil(bitsPerElementFactor * Math.log(2));
|
||||
|
||||
int bitsCount = _maxElementCount * bitsPerElementFactor;
|
||||
|
||||
if ((bitsCount & 1) == 0) {
|
||||
++bitsCount;
|
||||
}
|
||||
while (!isPrime(bitsCount)) {
|
||||
bitsCount += 2;
|
||||
}
|
||||
myBitsCount = bitsCount;
|
||||
myElementsSet = new long[(bitsCount >> BITS_PER_ELEMENT) + 1];
|
||||
}
|
||||
|
||||
private static boolean isPrime(int bits) {
|
||||
if ((bits & 1) == 0 || bits % 3 == 0) {
|
||||
return false;
|
||||
}
|
||||
int sqrt = (int)Math.sqrt(bits);
|
||||
for (int i = 6; i <= sqrt; i += 6) {
|
||||
if (bits % (i - 1) == 0 || bits % (i + 1) == 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void addIt(int prime, int prime2) {
|
||||
for (int i = 0; i < myHashFunctionCount; ++i) {
|
||||
int abs = Math.abs((i * prime + prime2 * (myHashFunctionCount - i)) % myBitsCount);
|
||||
myElementsSet[abs >> BITS_PER_ELEMENT] |= (1L << abs);
|
||||
}
|
||||
}
|
||||
|
||||
boolean maybeContains(int prime, int prime2) {
|
||||
for (int i = 0; i < myHashFunctionCount; ++i) {
|
||||
int abs = Math.abs((i * prime + prime2 * (myHashFunctionCount - i)) % myBitsCount);
|
||||
if ((myElementsSet[abs >> BITS_PER_ELEMENT] & (1L << abs)) == 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public int sizeInBytes() {
|
||||
return 4 * 2 + myElementsSet.length * 8;
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.ikv;
|
||||
|
||||
import com.intellij.util.io.Murmur3_32Hash;
|
||||
import net.openshift.hash.XxHash3;
|
||||
import com.intellij.util.lang.Murmur3_32Hash;
|
||||
import org.jetbrains.xxh3.Xxh3;
|
||||
import org.openjdk.jmh.annotations.*;
|
||||
|
||||
import java.util.Random;
|
||||
@@ -35,6 +35,6 @@ public class HashBenchmark {
|
||||
|
||||
@Benchmark
|
||||
public int xxh3() {
|
||||
return XxHash3.hash32(data);
|
||||
return Xxh3.hash32(data);
|
||||
}
|
||||
}
|
||||
@@ -2,8 +2,6 @@
|
||||
package org.jetbrains.ikv;
|
||||
|
||||
import kotlin.Pair;
|
||||
import org.minperf.RecSplitSettings;
|
||||
import org.minperf.UniversalHash;
|
||||
import org.openjdk.jmh.annotations.*;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
@@ -47,6 +45,28 @@ public class IkvBenchmark {
|
||||
}
|
||||
}
|
||||
|
||||
//@State(Scope.Benchmark)
|
||||
//public static class GetState {
|
||||
// public int[] keys = new int[5_000];
|
||||
//
|
||||
// @Setup
|
||||
// public void setup() {
|
||||
// generateKeys(keys);
|
||||
// }
|
||||
//
|
||||
// private static void generateKeys(int[] keys) {
|
||||
// Random random = new Random(42);
|
||||
// for (int i = 0, n = keys.length; i < n; i++) {
|
||||
// keys[i] = random.nextInt();
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//@Benchmark
|
||||
//public ByteBuffer construct(GetState state) {
|
||||
// new RecSplitGenerator<Integer>(UniversalHash.IntHash.INSTANCE, RecSplitSettings.DEFAULT_SETTINGS).generate(state.keys,);
|
||||
//}
|
||||
|
||||
@Benchmark
|
||||
public ByteBuffer lookup(IkvBenchmarkState state) {
|
||||
return state.ikv.getValue(state.key);
|
||||
@@ -0,0 +1,44 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.ikv;
|
||||
|
||||
import com.intellij.util.lang.Murmur3_32Hash;
|
||||
import org.jetbrains.xxh3.Xxh3;
|
||||
import org.openjdk.jmh.annotations.Benchmark;
|
||||
import org.openjdk.jmh.annotations.BenchmarkMode;
|
||||
import org.openjdk.jmh.annotations.Mode;
|
||||
import org.openjdk.jmh.annotations.OutputTimeUnit;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/*
|
||||
Benchmark Mode Cnt Score Error Units
|
||||
StringHashBenchmark.murmur3 avgt 25 59.986 ± 4.639 ns/op
|
||||
StringHashBenchmark.murmur3_unencoded avgt 25 40.002 ± 0.369 ns/op
|
||||
StringHashBenchmark.xxh3 avgt 25 12.475 ± 0.041 ns/op
|
||||
StringHashBenchmark.xxh3_unencoded avgt 25 58.890 ± 0.522 ns/op
|
||||
*/
|
||||
@BenchmarkMode(Mode.AverageTime)
|
||||
@OutputTimeUnit(TimeUnit.NANOSECONDS)
|
||||
public class StringHashBenchmark {
|
||||
private static final String data = "com/intellij/profiler/async/windows/WinAsyncProfilerLocator";
|
||||
|
||||
@Benchmark
|
||||
public int murmur3() {
|
||||
return Murmur3_32Hash.MURMUR3_32.hashString(data, 0, data.length());
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
public int murmur3_unencoded() {
|
||||
return Murmur3_32Hash.MURMUR3_32.hashUnencodedChars(data);
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
public int xxh3() {
|
||||
return Xxh3.hash32(data);
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
public int xxh3_unencoded() {
|
||||
return Xxh3.hashUnencodedChars32(data);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/testSrc" isTestSource="true" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="kotlin-stdlib-jdk8" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.zip" />
|
||||
<orderEntry type="library" name="fastutil-min" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="assertJ" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit5Params" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="JUnit5" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt.java8" scope="TEST" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -0,0 +1,80 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.ikv
|
||||
|
||||
import com.intellij.util.lang.ByteBufferCleaner
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.ByteOrder
|
||||
|
||||
class IkvIndexBuilder<T : IkvIndexEntry>(private val hash: UniversalHash<T>,
|
||||
private val writeSize: Boolean = true,
|
||||
private val settings: RecSplitSettings = RecSplitSettings.DEFAULT_SETTINGS) {
|
||||
private val entries = LinkedHashSet<T>()
|
||||
|
||||
fun add(entry: T) {
|
||||
if (!entries.add(entry)) {
|
||||
throw IllegalStateException("$entry duplicates ${entries.find { it == entry }}\n")
|
||||
}
|
||||
}
|
||||
|
||||
fun write(writer: (ByteBuffer) -> Unit): List<T> {
|
||||
return writeIkvIndex(unsortedEntries = entries, hash = hash, settings = settings, writeSize = writeSize, writer = writer)
|
||||
}
|
||||
}
|
||||
|
||||
interface IkvIndexEntry {
|
||||
val size: Int
|
||||
val offset: Int
|
||||
}
|
||||
|
||||
private fun <T : IkvIndexEntry> writeIkvIndex(unsortedEntries: Collection<T>,
|
||||
hash: UniversalHash<T>,
|
||||
settings: RecSplitSettings = RecSplitSettings.DEFAULT_SETTINGS,
|
||||
writeSize: Boolean = true,
|
||||
writer: (ByteBuffer) -> Unit): List<T> {
|
||||
val keyData = RecSplitGenerator(hash, settings).generate(unsortedEntries) {
|
||||
ByteBuffer.allocateDirect(it).order(ByteOrder.LITTLE_ENDIAN)
|
||||
}
|
||||
|
||||
try {
|
||||
writer(keyData)
|
||||
keyData.flip()
|
||||
|
||||
val buffer = ByteBuffer.allocateDirect((unsortedEntries.size * (if (writeSize) Long.SIZE_BYTES else Int.SIZE_BYTES)) +
|
||||
(Int.SIZE_BYTES * 2) + 1)
|
||||
.order(ByteOrder.LITTLE_ENDIAN)
|
||||
try {
|
||||
// write offsets in key index order
|
||||
val evaluator = RecSplitEvaluator(keyData, hash, settings)
|
||||
keyData.flip()
|
||||
val sortedEntries = unsortedEntries.sortedWith(Comparator { o1, o2 -> evaluator.evaluate(o1).compareTo(evaluator.evaluate(o2)) })
|
||||
|
||||
if (writeSize) {
|
||||
val longBuffer = buffer.asLongBuffer()
|
||||
for (entry in sortedEntries) {
|
||||
longBuffer.put(entry.offset.toLong() shl 32 or (entry.size.toLong() and 0xffffffffL))
|
||||
}
|
||||
buffer.position(buffer.position() + (longBuffer.position() * Long.SIZE_BYTES))
|
||||
}
|
||||
else {
|
||||
val intBuffer = buffer.asIntBuffer()
|
||||
for (entry in sortedEntries) {
|
||||
intBuffer.put(entry.offset)
|
||||
}
|
||||
buffer.position(buffer.position() + (intBuffer.position() * Int.SIZE_BYTES))
|
||||
}
|
||||
|
||||
buffer.putInt(sortedEntries.size)
|
||||
buffer.putInt(keyData.remaining())
|
||||
buffer.put(if (writeSize) 1 else 0)
|
||||
buffer.flip()
|
||||
writer(buffer)
|
||||
return sortedEntries
|
||||
}
|
||||
finally {
|
||||
ByteBufferCleaner.unmapBuffer(buffer)
|
||||
}
|
||||
}
|
||||
finally {
|
||||
ByteBufferCleaner.unmapBuffer(keyData)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.ikv
|
||||
|
||||
import org.jetbrains.xxh3.Xxh3
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.StandardOpenOption
|
||||
import java.util.*
|
||||
|
||||
fun sizeUnawareIkvWriter(file: Path): IkvWriter {
|
||||
return IkvWriter(channel = FileChannel.open(file, EnumSet.of(StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW)),
|
||||
writeSize = false)
|
||||
}
|
||||
|
||||
class IkvWriter(private val channel: FileChannel,
|
||||
settings: RecSplitSettings = RecSplitSettings.DEFAULT_SETTINGS,
|
||||
writeSize: Boolean = true) : AutoCloseable {
|
||||
private class Entry(@JvmField val key: Int, override val offset: Int, override val size: Int) : IkvIndexEntry {
|
||||
override fun equals(other: Any?) = key == (other as? Entry)?.key
|
||||
|
||||
override fun hashCode() = key
|
||||
}
|
||||
|
||||
private class EntryHash : UniversalHash<Entry> {
|
||||
override fun universalHash(key: Entry, index: Long) = Xxh3.hashInt(key.key, index)
|
||||
}
|
||||
|
||||
private val indexBuilder = IkvIndexBuilder(hash = EntryHash(), writeSize = writeSize, settings = settings)
|
||||
private var position = 0
|
||||
|
||||
fun write(key: Int, data: ByteArray) {
|
||||
indexBuilder.add(Entry(key, position, data.size))
|
||||
writeBuffer(ByteBuffer.wrap(data))
|
||||
}
|
||||
|
||||
fun write(key: Int, data: ByteBuffer) {
|
||||
indexBuilder.add(Entry(key, position, data.remaining()))
|
||||
var currentPosition = position.toLong()
|
||||
do {
|
||||
currentPosition += channel.write(data, currentPosition)
|
||||
}
|
||||
while (data.hasRemaining())
|
||||
position = currentPosition.toInt()
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
channel.use {
|
||||
indexBuilder.write(::writeBuffer)
|
||||
}
|
||||
}
|
||||
|
||||
private fun writeBuffer(value: ByteBuffer) {
|
||||
var currentPosition = position
|
||||
do {
|
||||
currentPosition += channel.write(value, currentPosition.toLong())
|
||||
}
|
||||
while (value.hasRemaining())
|
||||
position = currentPosition
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
// Copyright 2021 Thomas Mueller. Use of this source code is governed by the Apache 2.0 license.
|
||||
package org.minperf;
|
||||
package org.jetbrains.ikv;
|
||||
|
||||
import com.intellij.util.ArrayUtilRt;
|
||||
import it.unimi.dsi.fastutil.longs.LongArrayList;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
@@ -23,7 +22,6 @@ import java.util.function.IntFunction;
|
||||
@SuppressWarnings("DuplicatedCode")
|
||||
public final class RecSplitGenerator<T> {
|
||||
public static final int MAX_FILL = 8;
|
||||
public static final int MAX_BITS_PER_ENTRY = 8;
|
||||
|
||||
final UniversalHash<T> hash;
|
||||
private final RecSplitSettings settings;
|
||||
@@ -304,8 +302,8 @@ public final class RecSplitGenerator<T> {
|
||||
throw new IllegalStateException("Hash has a poor quality, use another one");
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
T[] data = list.toArray((T[])ArrayUtilRt.EMPTY_OBJECT_ARRAY);
|
||||
@SuppressWarnings({"unchecked", "SSBasedInspection"})
|
||||
T[] data = (T[])list.toArray(new Object[0]);
|
||||
list = null;
|
||||
long[] hashes = new long[size];
|
||||
computeHashes(data, 0, size, hash, hashes);
|
||||
@@ -1,13 +1,12 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.ikv
|
||||
|
||||
import net.openshift.hash.XxHash3
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.jetbrains.xxh3.Xxh3
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.api.io.TempDir
|
||||
import org.junit.jupiter.params.ParameterizedTest
|
||||
import org.junit.jupiter.params.provider.ValueSource
|
||||
import org.minperf.UniversalHash
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.file.Files
|
||||
@@ -28,7 +27,7 @@ internal class IkvTest {
|
||||
writer.use {
|
||||
for (i in 0 until count) {
|
||||
val data = random.nextBytes(random.nextInt(64, 512))
|
||||
val key = XxHash3.hash32(data)
|
||||
val key = Xxh3.hash32(data)
|
||||
writer.write(key, data)
|
||||
list.add(Pair(key, data))
|
||||
}
|
||||
@@ -47,7 +46,7 @@ internal class IkvTest {
|
||||
val file = tempDir!!.resolve("db")
|
||||
|
||||
val data = random.nextBytes(random.nextInt(64, 512))
|
||||
val key = XxHash3.hash32(data)
|
||||
val key = Xxh3.hash32(data)
|
||||
|
||||
Files.createDirectories(file.parent)
|
||||
FileChannel.open(file, EnumSet.of(StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW)).use { channel ->
|
||||
@@ -67,7 +66,7 @@ internal class IkvTest {
|
||||
val file = tempDir!!.resolve("db")
|
||||
|
||||
val data = random.nextBytes(random.nextInt(64, 512))
|
||||
val key = XxHash3.hash32(data)
|
||||
val key = Xxh3.hash32(data)
|
||||
|
||||
Files.createDirectories(file.parent)
|
||||
FileChannel.open(file, EnumSet.of(StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW)).use { channel ->
|
||||
@@ -13,9 +13,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.openhft.hash;
|
||||
|
||||
import net.openshift.hash.HashFunction;
|
||||
package org.jetbrains.xxh3;
|
||||
|
||||
import java.nio.Buffer;
|
||||
import java.nio.ByteBuffer;
|
||||
@@ -24,34 +22,34 @@ import static java.nio.ByteOrder.*;
|
||||
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
|
||||
|
||||
final class HashFunctionTest {
|
||||
public static void test(HashFunction f, byte[] data, long eh) {
|
||||
public static void test(byte[] data, long eh) {
|
||||
int len = data.length;
|
||||
ByteBuffer bb = ByteBuffer.wrap(data).order(nativeOrder());
|
||||
testArrays(f, data, eh, len);
|
||||
testByteBuffers(f, eh, len, bb);
|
||||
testArrays(data, eh, len);
|
||||
testByteBuffers(eh, len, bb);
|
||||
}
|
||||
|
||||
private static void testArrays(HashFunction f, byte[] data, long eh, int len) {
|
||||
assertThat(f.hashBytes(data)).isEqualTo(eh);
|
||||
private static void testArrays(byte[] data, long eh, int len) {
|
||||
assertThat(Xxh3.hash(data)).isEqualTo(eh);
|
||||
|
||||
byte[] data2 = new byte[len + 2];
|
||||
System.arraycopy(data, 0, data2, 1, len);
|
||||
assertThat(f.hashBytes(data2, 1, len)).isEqualTo(eh);
|
||||
assertThat(Xxh3.hash(data2, 1, len)).isEqualTo(eh);
|
||||
}
|
||||
|
||||
private static void testByteBuffers(HashFunction f, long eh, int len, ByteBuffer bb) {
|
||||
private static void testByteBuffers(long eh, int len, ByteBuffer bb) {
|
||||
bb.order(LITTLE_ENDIAN);
|
||||
assertThat(f.hashBytes(bb)).isEqualTo(eh);
|
||||
assertThat(Xxh3.hash(bb)).isEqualTo(eh);
|
||||
ByteBuffer bb2 = ByteBuffer.allocate(len + 2).order(LITTLE_ENDIAN);
|
||||
((Buffer)bb2).position(1);
|
||||
bb2.put(bb);
|
||||
assertThat(f.hashBytes(bb2, 1, len)).isEqualTo(eh);
|
||||
assertThat(Xxh3.hash(bb2, 1, len)).isEqualTo(eh);
|
||||
|
||||
((Buffer)bb.order(BIG_ENDIAN)).clear();
|
||||
|
||||
assertThat(f.hashBytes(bb)).isEqualTo(eh);
|
||||
assertThat(Xxh3.hash(bb)).isEqualTo(eh);
|
||||
bb2.order(BIG_ENDIAN);
|
||||
assertThat(f.hashBytes(bb2, 1, len)).isEqualTo(eh);
|
||||
assertThat(Xxh3.hash(bb2, 1, len)).isEqualTo(eh);
|
||||
|
||||
((Buffer)bb.order(nativeOrder())).clear();
|
||||
}
|
||||
@@ -13,14 +13,14 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.openhft.hash;
|
||||
package org.jetbrains.xxh3;
|
||||
|
||||
import net.openshift.hash.HashFunction;
|
||||
import net.openshift.hash.XxHash3;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
@@ -41,7 +41,7 @@ public class XxHash3Test {
|
||||
@ParameterizedTest
|
||||
@MethodSource("data")
|
||||
public void xxh3WithoutSeeds(int len) {
|
||||
test(XxHash3.INSTANCE, XXH3Test_HASHES.HASHES_OF_LOOPING_BYTES_WITHOUT_SEED, len);
|
||||
test(XXH3Test_HASHES.HASHES_OF_LOOPING_BYTES_WITHOUT_SEED, len);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@@ -53,7 +53,7 @@ public class XxHash3Test {
|
||||
data[i] = random.nextLong();
|
||||
}
|
||||
|
||||
long expected = 0;
|
||||
long expected;
|
||||
switch (size) {
|
||||
case 1:
|
||||
expected = 6383185674071107836L;
|
||||
@@ -79,15 +79,53 @@ public class XxHash3Test {
|
||||
default:
|
||||
throw new UnsupportedOperationException("Unknown size");
|
||||
}
|
||||
assertThat(XxHash3.INSTANCE.hashLongs(data)).isEqualTo(expected);
|
||||
assertThat(Xxh3.hashLongs(data)).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private static void test(HashFunction h, @SuppressWarnings("SameParameterValue") long[] hashesOfLoopingBytes, int len) {
|
||||
@Test
|
||||
public void string() {
|
||||
testString("com/intellij/profiler/async/windows/WinAsyncProfilerLocator", 2833214887294487028L);
|
||||
testString("test", -7004795540881933248L);
|
||||
//noinspection SpellCheckingInspection
|
||||
testString("тест буковок", -2011715203481716521L);
|
||||
}
|
||||
|
||||
private static void testString(String s, long expected) {
|
||||
assertThat(Xxh3.hash(s)).describedAs("Hash as string of: " + s).isEqualTo(expected);
|
||||
assertThat(Xxh3.hash(s.getBytes(StandardCharsets.UTF_8))).describedAs("Hash as bytes of: " + s).isEqualTo(expected);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void unencodedString() {
|
||||
testUnencodedString("com/intellij/profiler/async/windows/WinAsyncProfilerLocator", -7916769887311287428L);
|
||||
testUnencodedString("test", -1876252253805819900L);
|
||||
//noinspection SpellCheckingInspection
|
||||
testUnencodedString("тест буковок", -3590458601327935281L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void packages() {
|
||||
checkPackage("com.intellij.util.lang", -9217824570049207139L);
|
||||
checkPackage("org.jetbrains.xxh3", 4571982292824530778L);
|
||||
checkPackage("org.jetbrains.ikv", -1976620678582843062L);
|
||||
checkPackage("com.intellij.idea", -635775336887217634L);
|
||||
}
|
||||
|
||||
private static void checkPackage(String s, long expected) {
|
||||
assertThat(Xx3UnencodedString.hashUnencodedString(s.replace('.', '/'))).describedAs("Hash as string of: " + s).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private static void testUnencodedString(String s, long expected) {
|
||||
assertThat(Xx3UnencodedString.hashUnencodedString(s)).describedAs("Hash as string of: " + s).isEqualTo(expected);
|
||||
}
|
||||
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
private static void test(long[] hashesOfLoopingBytes, int len) {
|
||||
byte[] data = new byte[len];
|
||||
for (int j = 0; j < data.length; j++) {
|
||||
data[j] = (byte)j;
|
||||
}
|
||||
HashFunctionTest.test(h, data, hashesOfLoopingBytes[len]);
|
||||
HashFunctionTest.test(data, hashesOfLoopingBytes[len]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
<orderEntry type="library" name="aalto-xml" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.xmlDom" />
|
||||
<orderEntry type="library" name="kotlinx-coroutines-jdk8" level="project" />
|
||||
<orderEntry type="module" module-name="intellij.platform.util.rt.java8" />
|
||||
</component>
|
||||
<component name="copyright">
|
||||
<Base>
|
||||
|
||||
@@ -1914,9 +1914,6 @@ search.everywhere.show.weights.description=Show item weights in description bar
|
||||
|
||||
autodetect.all.jdks=true
|
||||
autodetect.all.jdks.description=Scan disk and show all found JDKs in "Select Project SDK" dialog
|
||||
idea.lazy.classloading.caches=false
|
||||
idea.lazy.classloading.caches.description=Flag for UrlClassLoader to use lazy caching of package contents
|
||||
idea.use.loader.for.jdk9=true
|
||||
|
||||
toolwindow.active.tab.use.contrast.background=false
|
||||
toolwindow.active.tab.use.contrast.background.description=When enabled contrast color is used for selected tab background in tool windows
|
||||
|
||||
12
platform/util/rt-java8/intellij.platform.util.rt.java8.iml
Normal file
12
platform/util/rt-java8/intellij.platform.util.rt.java8.iml
Normal file
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8" inherit-compiler-output="true">
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="jetbrains-annotations" level="project" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -1,5 +1,5 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.io;
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
@@ -17,7 +17,7 @@ public final class DirectByteBufferPool {
|
||||
private static final int MIN_SIZE = 2048;
|
||||
private static final int MAX_POOL_SIZE = 32;
|
||||
|
||||
private final ConcurrentSkipListMap<Integer, ByteBuffer> pool = new ConcurrentSkipListMap<Integer, ByteBuffer>();
|
||||
private final ConcurrentSkipListMap<Integer, ByteBuffer> pool = new ConcurrentSkipListMap<>();
|
||||
private final AtomicInteger count = new AtomicInteger();
|
||||
|
||||
public @NotNull ByteBuffer allocate(int requiredSize) {
|
||||
@@ -11,13 +11,13 @@
|
||||
* or implied. See the License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
package com.intellij.util.io;
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* See MurmurHash3_x86_32 in <a href="https://github.com/aappleby/smhasher/blob/master/src/MurmurHash3.cpp">the C++ implementation</a>.
|
||||
@@ -139,7 +139,7 @@ public final class Murmur3_32Hash {
|
||||
int codePoint = Character.codePointAt(input, i);
|
||||
if (codePoint == c) {
|
||||
// not a valid code point; let the JDK handle invalid Unicode
|
||||
byte[] bytes = input.toString().getBytes(Charset.forName("UTF-8"));
|
||||
byte[] bytes = input.toString().getBytes(StandardCharsets.UTF_8);
|
||||
return hashBytes(bytes, 0, bytes.length);
|
||||
}
|
||||
i++;
|
||||
@@ -360,7 +360,7 @@ public final class Murmur3_32Hash {
|
||||
int codePoint = Character.codePointAt(input, i);
|
||||
if (codePoint == c) {
|
||||
// fall back to JDK getBytes instead of trying to handle invalid surrogates ourselves
|
||||
byte[] bytes = input.subSequence(i, utf16Length).toString().getBytes(Charset.forName("UTF-8"));
|
||||
byte[] bytes = input.subSequence(i, utf16Length).toString().getBytes(StandardCharsets.UTF_8);
|
||||
putBytes(bytes, 0, bytes.length);
|
||||
return;
|
||||
}
|
||||
181
platform/util/rt-java8/src/com/intellij/util/lang/Xor16.java
Normal file
181
platform/util/rt-java8/src/com/intellij/util/lang/Xor16.java
Normal file
@@ -0,0 +1,181 @@
|
||||
// Copyright 2021 Thomas Mueller. Use of this source code is governed by the Apache 2.0 license.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* The xor filter, a new algorithm that can replace a Bloom filter.
|
||||
* <p>
|
||||
* It needs 1.23 log(1/fpp) bits per key. It is related to the BDZ algorithm [1]
|
||||
* (a minimal perfect hash function algorithm).
|
||||
* <p>
|
||||
* [1] paper: Simple and Space-Efficient Minimal Perfect Hash Functions -
|
||||
* http://cmph.sourceforge.net/papers/wads07.pdf
|
||||
*/
|
||||
public final class Xor16 {
|
||||
private static final int BITS_PER_FINGERPRINT = 16;
|
||||
private static final int HASHES = 3;
|
||||
private static final int OFFSET = 32;
|
||||
private static final int FACTOR_TIMES_100 = 123;
|
||||
|
||||
private final int blockLength;
|
||||
private final long seed;
|
||||
private final short[] fingerprints;
|
||||
|
||||
private Xor16(short[] fingerprints, int blockLength, long seed) {
|
||||
this.seed = seed;
|
||||
this.fingerprints = fingerprints;
|
||||
this.blockLength = blockLength;
|
||||
}
|
||||
|
||||
public Xor16(ByteBuffer buffer) {
|
||||
seed = buffer.getLong();
|
||||
fingerprints = new short[buffer.getInt()];
|
||||
blockLength = fingerprints.length / HASHES;
|
||||
buffer.asShortBuffer().get(fingerprints);
|
||||
buffer.position(buffer.position() + (fingerprints.length * Short.BYTES));
|
||||
}
|
||||
|
||||
public int sizeInBytes() {
|
||||
return Long.BYTES + Integer.BYTES + (fingerprints.length * Short.BYTES);
|
||||
}
|
||||
|
||||
public void write(ByteBuffer buffer) {
|
||||
buffer.putLong(seed);
|
||||
buffer.putInt(fingerprints.length);
|
||||
buffer.asShortBuffer().put(fingerprints);
|
||||
buffer.position(buffer.position() + (fingerprints.length * Short.BYTES));
|
||||
}
|
||||
|
||||
private static int getArrayLength(int size) {
|
||||
return (int)(OFFSET + (long)FACTOR_TIMES_100 * size / 100);
|
||||
}
|
||||
|
||||
public static int getBlockLength(int keyCount) {
|
||||
return getArrayLength(keyCount) / HASHES;
|
||||
}
|
||||
|
||||
private static long mix(final long x) {
|
||||
long h = x * 0x9E3779B97F4A7C15L;
|
||||
h ^= h >>> 32;
|
||||
return h ^ (h >>> 16);
|
||||
}
|
||||
|
||||
public static Xor16 construct(final long[] keys, final int offset, final int length) {
|
||||
final int arrayLength = getArrayLength(length);
|
||||
final int blockLength = arrayLength / HASHES;
|
||||
final long[] reverseOrder = new long[length];
|
||||
final byte[] reverseH = new byte[length];
|
||||
int reverseOrderPos;
|
||||
// constant seed - reproducible JARs (initial seed just a random number)
|
||||
long seed = 1354212L;
|
||||
do {
|
||||
seed = mix(seed);
|
||||
byte[] t2count = new byte[arrayLength];
|
||||
long[] t2 = new long[arrayLength];
|
||||
for (int i = offset; i < length; i++) {
|
||||
long k = keys[i];
|
||||
for (int hi = 0; hi < HASHES; hi++) {
|
||||
int h = getHash(blockLength, k, seed, hi);
|
||||
t2[h] ^= k;
|
||||
if (t2count[h] > 120) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
t2count[h]++;
|
||||
}
|
||||
}
|
||||
int[] alone = new int[arrayLength];
|
||||
int alonePos = 0;
|
||||
reverseOrderPos = 0;
|
||||
for (int nextAloneCheck = 0; nextAloneCheck < arrayLength; ) {
|
||||
while (nextAloneCheck < arrayLength) {
|
||||
if (t2count[nextAloneCheck] == 1) {
|
||||
alone[alonePos++] = nextAloneCheck;
|
||||
}
|
||||
nextAloneCheck++;
|
||||
}
|
||||
while (alonePos > 0) {
|
||||
int i = alone[--alonePos];
|
||||
if (t2count[i] == 0) {
|
||||
continue;
|
||||
}
|
||||
long k = t2[i];
|
||||
byte found = -1;
|
||||
for (int hi = 0; hi < HASHES; hi++) {
|
||||
int h = getHash(blockLength, k, seed, hi);
|
||||
int newCount = --t2count[h];
|
||||
if (newCount == 0) {
|
||||
found = (byte)hi;
|
||||
}
|
||||
else {
|
||||
if (newCount == 1) {
|
||||
alone[alonePos++] = h;
|
||||
}
|
||||
t2[h] ^= k;
|
||||
}
|
||||
}
|
||||
reverseOrder[reverseOrderPos] = k;
|
||||
reverseH[reverseOrderPos] = found;
|
||||
reverseOrderPos++;
|
||||
}
|
||||
}
|
||||
}
|
||||
while (reverseOrderPos != length);
|
||||
short[] fingerprints = new short[arrayLength];
|
||||
for (int i = reverseOrderPos - 1; i >= 0; i--) {
|
||||
long k = reverseOrder[i];
|
||||
int found = reverseH[i];
|
||||
int change = -1;
|
||||
long hash = hash64(k, seed);
|
||||
int xor = fingerprint(hash);
|
||||
for (int hi = 0; hi < HASHES; hi++) {
|
||||
int h = getHash(blockLength, k, seed, hi);
|
||||
if (found == hi) {
|
||||
change = h;
|
||||
}
|
||||
else {
|
||||
xor ^= fingerprints[h];
|
||||
}
|
||||
}
|
||||
fingerprints[change] = (short)xor;
|
||||
}
|
||||
return new Xor16(fingerprints, blockLength, seed);
|
||||
}
|
||||
|
||||
private static int getHash(int blockLength, long key, @SuppressWarnings("SameParameterValue") long seed, int index) {
|
||||
long r = Long.rotateLeft(hash64(key, seed), 21 * index);
|
||||
r = reduce((int)r, blockLength);
|
||||
r += (long)index * blockLength;
|
||||
return (int)r;
|
||||
}
|
||||
|
||||
public boolean mightContain(long key) {
|
||||
long hash = hash64(key, seed);
|
||||
int f = fingerprint(hash);
|
||||
int r0 = (int)hash;
|
||||
int r1 = (int)Long.rotateLeft(hash, 21);
|
||||
int r2 = (int)Long.rotateLeft(hash, 42);
|
||||
int h0 = reduce(r0, blockLength);
|
||||
int h1 = reduce(r1, blockLength) + blockLength;
|
||||
int h2 = reduce(r2, blockLength) + 2 * blockLength;
|
||||
f ^= fingerprints[h0] ^ fingerprints[h1] ^ fingerprints[h2];
|
||||
return (f & 0xffff) == 0;
|
||||
}
|
||||
|
||||
private static int fingerprint(long hash) {
|
||||
return (int)(hash & ((1 << BITS_PER_FINGERPRINT) - 1));
|
||||
}
|
||||
|
||||
private static long hash64(long x, long seed) {
|
||||
x += seed;
|
||||
x = (x ^ (x >>> 33)) * 0xff51afd7ed558ccdL;
|
||||
x = (x ^ (x >>> 33)) * 0xc4ceb9fe1a85ec53L;
|
||||
x = x ^ (x >>> 33);
|
||||
return x;
|
||||
}
|
||||
|
||||
private static int reduce(int hash, int n) {
|
||||
// http://lemire.me/blog/2016/06/27/a-fast-alternative-to-the-modulo-reduction/
|
||||
return (int)(((hash & 0xffffffffL) * n) >>> 32);
|
||||
}
|
||||
}
|
||||
@@ -13,13 +13,10 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.openshift.hash;
|
||||
package org.jetbrains.xxh3;
|
||||
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
import static net.openshift.hash.Primitives.unsignedByte;
|
||||
import static net.openshift.hash.Primitives.unsignedInt;
|
||||
|
||||
/**
|
||||
* Strategy of reading bytes, defines the abstraction of {@code T} class instances as ordered byte
|
||||
* sequence. All {@code getXXX(input, offset)} should be consistent to each other in terms of
|
||||
@@ -36,22 +33,7 @@ import static net.openshift.hash.Primitives.unsignedInt;
|
||||
* <li>And so on</li>
|
||||
* </ul>
|
||||
*
|
||||
* <p>{@code getXXX(input, offset)} methods could throw unchecked exceptions when requested bytes
|
||||
* range is outside the bounds of the byte sequence, represented by the given {@code input}.
|
||||
* However, they could omit checks for better performance.
|
||||
*
|
||||
* <p>Only {@link #i8(Object, int)}, {@link #i32(Object, int)} and {@link #i64(Object, int)}
|
||||
* methods are abstract in this class, so implementing them is sufficient for valid {@code Access} instance,
|
||||
* but for efficiency you should override methods used by target {@link HashFunction} implementation.
|
||||
*
|
||||
* <p>{@code Access} API is designed for inputs, that actually represent byte sequences that lay
|
||||
* continuously in memory. Theoretically {@code Access} strategy could be implemented for
|
||||
* non-continuous byte sequences, or abstractions which aren't actually present in memory as they
|
||||
* are accessed, but this should be awkward, and hashing using such {@code Access} is expected to
|
||||
* be slow.
|
||||
*
|
||||
* @param <T> the type of the object to access
|
||||
* @see HashFunction#hash(Object, Access, int, int)
|
||||
*/
|
||||
abstract class Access<T> {
|
||||
/**
|
||||
@@ -75,7 +57,7 @@ abstract class Access<T> {
|
||||
* @return four bytes as an unsigned int value, in the expected byteOrder
|
||||
*/
|
||||
public long u32(T input, int offset) {
|
||||
return unsignedInt(this.i32(input, offset));
|
||||
return i32(input, offset) & 0xFFFFFFFFL;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -89,18 +71,6 @@ abstract class Access<T> {
|
||||
*/
|
||||
public abstract int i32(T input, int offset);
|
||||
|
||||
/**
|
||||
* Shortcut for {@code getByte(input, offset) & 0xFF}. Could be implemented more efficiently.
|
||||
*
|
||||
* @param input the object to access
|
||||
* @param offset offset to the byte to read within the byte sequence represented
|
||||
* by the given object
|
||||
* @return a byte by the given {@code offset}, interpreted as unsigned
|
||||
*/
|
||||
public int u8(T input, int offset) {
|
||||
return unsignedByte(this.i8(input, offset));
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a single byte at the given {@code offset} in the byte sequence represented by the given
|
||||
* {@code input}, returned widened to {@code int}.
|
||||
@@ -0,0 +1,119 @@
|
||||
/*
|
||||
* Copyright 2014 Higher Frequency Trading http://www.higherfrequencytrading.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.jetbrains.xxh3;
|
||||
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
final class CharSequenceAccess {
|
||||
static final Access<CharSequence> INSTANCE = ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN
|
||||
? LittleEndianCharSequenceAccess.INSTANCE
|
||||
: BigEndianCharSequenceAccess.INSTANCE;
|
||||
|
||||
private static abstract class BaseCharSequenceAccess extends Access<CharSequence> {
|
||||
private static int ix(long offset) {
|
||||
return (int)(offset >> 1);
|
||||
}
|
||||
|
||||
protected static long getLong(CharSequence input, long offset,
|
||||
int char0Off, int char1Off, int char2Off, int char3Off,
|
||||
int char4Off, int delta) {
|
||||
final int base = ix(offset);
|
||||
if (0 == ((int)offset & 1)) {
|
||||
final long char0 = input.charAt(base + char0Off);
|
||||
final long char1 = input.charAt(base + char1Off);
|
||||
final long char2 = input.charAt(base + char2Off);
|
||||
final long char3 = input.charAt(base + char3Off);
|
||||
return char0 | (char1 << 16) | (char2 << 32) | (char3 << 48);
|
||||
}
|
||||
else {
|
||||
final long char0 = input.charAt(base + char0Off + delta) >>> 8;
|
||||
final long char1 = input.charAt(base + char1Off + delta);
|
||||
final long char2 = input.charAt(base + char2Off + delta);
|
||||
final long char3 = input.charAt(base + char3Off + delta);
|
||||
final long char4 = input.charAt(base + char4Off);
|
||||
return char0 | (char1 << 8) | (char2 << 24) | (char3 << 40) | (char4 << 56);
|
||||
}
|
||||
}
|
||||
|
||||
protected static long getUnsignedInt(CharSequence input, int offset, int char0Off, int char1Off, int char2Off, int delta) {
|
||||
final int base = ix(offset);
|
||||
if (0 == (offset & 1)) {
|
||||
final long char0 = input.charAt(base + char0Off);
|
||||
final long char1 = input.charAt(base + char1Off);
|
||||
return char0 | (char1 << 16);
|
||||
}
|
||||
else {
|
||||
final long char0 = input.charAt(base + char0Off + delta) >>> 8;
|
||||
final long char1 = input.charAt(base + char1Off + delta);
|
||||
final long char2 = input.charAt(base + char2Off) & 0xff;
|
||||
return char0 | (char1 << 8) | (char2 << 24);
|
||||
}
|
||||
}
|
||||
|
||||
protected static int getByte(CharSequence input, int offset, int shift) {
|
||||
return input.charAt(ix(offset)) >> shift;
|
||||
}
|
||||
|
||||
private BaseCharSequenceAccess() { }
|
||||
|
||||
@Override
|
||||
public int i32(CharSequence input, int offset) {
|
||||
return (int)u32(input, offset);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class LittleEndianCharSequenceAccess extends BaseCharSequenceAccess {
|
||||
static final BaseCharSequenceAccess INSTANCE = new LittleEndianCharSequenceAccess();
|
||||
|
||||
private LittleEndianCharSequenceAccess() { }
|
||||
|
||||
@Override
|
||||
public long i64(CharSequence input, int offset) {
|
||||
return getLong(input, offset, 0, 1, 2, 3, 4, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long u32(CharSequence input, int offset) {
|
||||
return getUnsignedInt(input, offset, 0, 1, 2, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int i8(CharSequence input, int offset) {
|
||||
return getByte(input, offset, (offset & 1) << 3);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class BigEndianCharSequenceAccess extends BaseCharSequenceAccess {
|
||||
static final BaseCharSequenceAccess INSTANCE = new BigEndianCharSequenceAccess();
|
||||
|
||||
private BigEndianCharSequenceAccess() { }
|
||||
|
||||
@Override
|
||||
public long i64(CharSequence input, int offset) {
|
||||
return getLong(input, offset, 3, 2, 1, 0, 0, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long u32(CharSequence input, int offset) {
|
||||
return getUnsignedInt(input, offset, 1, 0, 0, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int i8(CharSequence input, int offset) {
|
||||
return getByte(input, offset, ((offset & 1) ^ 1) << 3);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package org.jetbrains.xxh3;
|
||||
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
|
||||
/**
|
||||
* Do not use. Only as a temporary solution for class loader implementation where Java 9+ cannot be used.
|
||||
*/
|
||||
@ApiStatus.Internal
|
||||
public final class Xx3UnencodedString {
|
||||
public static long hashUnencodedString(String input) {
|
||||
return Xxh3Impl.hash(input, CharSequenceAccess.INSTANCE, 0, input.length() * 2, 0);
|
||||
}
|
||||
|
||||
public static long hashUnencodedStringRange(String input, int start, int end) {
|
||||
return Xxh3Impl.hash(input, CharSequenceAccess.INSTANCE, start * 2, (end - start) * 2, 0);
|
||||
}
|
||||
}
|
||||
462
platform/util/rt-java8/src/org/jetbrains/xxh3/Xxh3Impl.java
Normal file
462
platform/util/rt-java8/src/org/jetbrains/xxh3/Xxh3Impl.java
Normal file
@@ -0,0 +1,462 @@
|
||||
/*
|
||||
* Copyright 2015 Higher Frequency Trading http://www.higherfrequencytrading.com
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.jetbrains.xxh3;
|
||||
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
|
||||
/**
|
||||
* Adapted version of XXH3 implementation from https://github.com/Cyan4973/xxHash.
|
||||
* This implementation provides endian-independent hash values, but it's slower on big-endian platforms.
|
||||
*/
|
||||
@SuppressWarnings({"DuplicatedCode", "CommentedOutCode"})
|
||||
@ApiStatus.Internal
|
||||
final class Xxh3Impl {
|
||||
private Xxh3Impl() {
|
||||
}
|
||||
|
||||
// Pseudorandom secret taken directly from FARSH
|
||||
//private static final byte[] secret = {
|
||||
// (byte)0xb8, (byte)0xfe, (byte)0x6c, (byte)0x39, (byte)0x23, (byte)0xa4, (byte)0x4b, (byte)0xbe, (byte)0x7c, (byte)0x01, (byte)0x81,
|
||||
// (byte)0x2c, (byte)0xf7, (byte)0x21, (byte)0xad, (byte)0x1c,
|
||||
// (byte)0xde, (byte)0xd4, (byte)0x6d, (byte)0xe9, (byte)0x83, (byte)0x90, (byte)0x97, (byte)0xdb, (byte)0x72, (byte)0x40, (byte)0xa4,
|
||||
// (byte)0xa4, (byte)0xb7, (byte)0xb3, (byte)0x67, (byte)0x1f,
|
||||
// (byte)0xcb, (byte)0x79, (byte)0xe6, (byte)0x4e, (byte)0xcc, (byte)0xc0, (byte)0xe5, (byte)0x78, (byte)0x82, (byte)0x5a, (byte)0xd0,
|
||||
// (byte)0x7d, (byte)0xcc, (byte)0xff, (byte)0x72, (byte)0x21,
|
||||
// (byte)0xb8, (byte)0x08, (byte)0x46, (byte)0x74, (byte)0xf7, (byte)0x43, (byte)0x24, (byte)0x8e, (byte)0xe0, (byte)0x35, (byte)0x90,
|
||||
// (byte)0xe6, (byte)0x81, (byte)0x3a, (byte)0x26, (byte)0x4c,
|
||||
// (byte)0x3c, (byte)0x28, (byte)0x52, (byte)0xbb, (byte)0x91, (byte)0xc3, (byte)0x00, (byte)0xcb, (byte)0x88, (byte)0xd0, (byte)0x65,
|
||||
// (byte)0x8b, (byte)0x1b, (byte)0x53, (byte)0x2e, (byte)0xa3,
|
||||
// (byte)0x71, (byte)0x64, (byte)0x48, (byte)0x97, (byte)0xa2, (byte)0x0d, (byte)0xf9, (byte)0x4e, (byte)0x38, (byte)0x19, (byte)0xef,
|
||||
// (byte)0x46, (byte)0xa9, (byte)0xde, (byte)0xac, (byte)0xd8,
|
||||
// (byte)0xa8, (byte)0xfa, (byte)0x76, (byte)0x3f, (byte)0xe3, (byte)0x9c, (byte)0x34, (byte)0x3f, (byte)0xf9, (byte)0xdc, (byte)0xbb,
|
||||
// (byte)0xc7, (byte)0xc7, (byte)0x0b, (byte)0x4f, (byte)0x1d,
|
||||
// (byte)0x8a, (byte)0x51, (byte)0xe0, (byte)0x4b, (byte)0xcd, (byte)0xb4, (byte)0x59, (byte)0x31, (byte)0xc8, (byte)0x9f, (byte)0x7e,
|
||||
// (byte)0xc9, (byte)0xd9, (byte)0x78, (byte)0x73, (byte)0x64,
|
||||
// (byte)0xea, (byte)0xc5, (byte)0xac, (byte)0x83, (byte)0x34, (byte)0xd3, (byte)0xeb, (byte)0xc3, (byte)0xc5, (byte)0x81, (byte)0xa0,
|
||||
// (byte)0xff, (byte)0xfa, (byte)0x13, (byte)0x63, (byte)0xeb,
|
||||
// (byte)0x17, (byte)0x0d, (byte)0xdd, (byte)0x51, (byte)0xb7, (byte)0xf0, (byte)0xda, (byte)0x49, (byte)0xd3, (byte)0x16, (byte)0x55,
|
||||
// (byte)0x26, (byte)0x29, (byte)0xd4, (byte)0x68, (byte)0x9e,
|
||||
// (byte)0x2b, (byte)0x16, (byte)0xbe, (byte)0x58, (byte)0x7d, (byte)0x47, (byte)0xa1, (byte)0xfc, (byte)0x8f, (byte)0xf8, (byte)0xb8,
|
||||
// (byte)0xd1, (byte)0x7a, (byte)0xd0, (byte)0x31, (byte)0xce,
|
||||
// (byte)0x45, (byte)0xcb, (byte)0x3a, (byte)0x8f, (byte)0x95, (byte)0x16, (byte)0x04, (byte)0x28, (byte)0xaf, (byte)0xd7, (byte)0xfb,
|
||||
// (byte)0xca, (byte)0xbb, (byte)0x4b, (byte)0x40, (byte)0x7e,
|
||||
//};
|
||||
|
||||
private static final long[] secretLong = {
|
||||
-4734510112055689544L, 8988705074615774462L, 107169723235645804L, -9150895811085458631L, 3206846044944704547L, -635991603978286172L,
|
||||
2447473855086509643L, -5971219860401587010L, 2066345149520216444L, -2441886536549236479L, -3108015162914296703L, 7914194659941938988L,
|
||||
-1626409839981944329L, -8941494824140493535L, -8033320652366799699L, -7525369938742813156L, -2623469361688619810L, 8276375387167616468L,
|
||||
4644015609783511405L, -6611157965513653271L, -6583065893254229885L, -5213861871876335728L, -5496743794819540073L, 7472518124495991515L,
|
||||
2262974939099578482L, -3810212738154322880L, 8776142829118792868L, -1839215637059881052L, 5685365492914041783L, -3724786431015557197L,
|
||||
-4554178371385614489L, -1891287204249351393L, 8711581037947681227L, -9045227235349436807L, 6521908138563358438L, -3433288310154277810L,
|
||||
9065845566317379788L, -3711581430728825408L, -14498364963784475L, 8286566680123572856L, 2410270004345854594L, -5178731653526335398L,
|
||||
628288925819764176L, 5046485836271438973L, 8378393743697575884L, -615790245780032769L, 4897510963931521394L, 2613204287568263201L,
|
||||
-8204357891075471176L, -2265833688187779576L, 3882259165203625030L, -8055285457383852172L, -1832905809766104073L, -9086416637098318781L,
|
||||
4215904233249082916L, 2754656949352390798L, 5487137525590930912L, 4344889773235015733L, 2899275987193816720L, 5920048007935066598L,
|
||||
-4948848801086031231L, -7945666784801315270L, -4354493403153806298L, 55047854181858380L, -3818837453329782724L, -8589771024315493848L,
|
||||
-3420260712846345390L, 7336514198459093435L, -8402080243849837679L, 1984792007109443779L, 5988533398925789952L, 3338042034334238923L,
|
||||
-6688317018830679928L, 8188439481968977360L, 7237745495519234917L, 5216419214072683403L, -7545670736427461861L, -6730831521841467821L,
|
||||
982514005898797870L, -500565212929953373L, 5690594596133299313L, 4057454151265110116L, 1817289281226577736L, -1217880312389983593L,
|
||||
5111331831722610082L, -6249044541332063987L, -2402310933491200263L, -5990164332231968690L, -2833645246901970632L, -6280079608045441255L,
|
||||
-384819531158567185L, 8573350489219836230L, 4573118074737974953L, -2071806484620464930L, -7141794803835414356L, 3791154848057698520L,
|
||||
4554437623014685352L, -486612386300594438L, -2523916620961464458L, -4909775443879730369L, -4054404076451619613L, -4051062782047603556L,
|
||||
848866664462761780L, 5695865814404364607L, 2111919702937427193L, -8494546410135897124L, 5875540889195497403L, -2282891677615274041L,
|
||||
5467459601266838471L, -3653580031866876149L, -5418591349844075185L, 6464017090953185821L, 3556072174620004746L, -4021334359191855023L,
|
||||
-6933237364981675040L, 9124231484359888203L, -3927526142850255667L, -2753530472436770380L, 8708212900181324121L, 8320639771003045937L,
|
||||
7238261902898274248L, -1556992608276218209L, -4185422456575899266L, -5997129611619018295L, -8958567948248450855L, 3784058077962335096L,
|
||||
-3227810254839716749L, -1453760514566526364L, -4329134394285701654L, -4196251135427498811L, -9095648454776683604L,
|
||||
-6881001310379625341L, -26878911368670412L, -360392965937173549L, 1439744095735366635L, 7139325810128831939L, -1485321483350670907L,
|
||||
1723580219865931905L, 943481457726914464L, -2518330316883232001L, 5898885483309765626L, -5237161843349560557L, -1101321574019503261L,
|
||||
-2670433016801847317L, 5321830579834785047L, -3221803331004277491L, 1644739493610607069L, 6131320256870790993L, 2762139043194663095L,
|
||||
2965150961192524528L, -3158951516726670886L, 7553707719620219721L, -7032137544937171245L, 3143064850383918358L, 1597544665906226773L,
|
||||
-4749560797652047578L, 6394572897509757993L, 9032178055121889492L, 5151371122220703336L, -6825348890156979298L, -242834301215959509L,
|
||||
-8071399103737053674L, -535932061014468418L, -5118182661306221224L, -3334642226765412483L, 8850058120466833735L, -3424193974287467359L,
|
||||
3589503944184336380L, -3588858202114426737L, 5030012605302946040L, -3799403997270715976L, 4236556626373409489L, -8125959076964085638L,
|
||||
-7669846995664752176L, 1627364323045527089L, 294587268038608334L, 2883454493032893253L, -5825401622958753077L, -2905059236606800070L,
|
||||
-299578263794707057L, -3820222711603128683L, -4914839139546299370L, 5457178556493670404L, 4633003122163691304L, 9097354517224871855L
|
||||
};
|
||||
private static final int[] secretInt = {
|
||||
963444408, 590966014, -1541195412, 1269048121, -1102339037, 2092845988, 24952395, -2130608962, 746652028, -148078335, 569846913,
|
||||
-1390282964, 481108471, -568546015, -723641171, 1842667036, -378678050, -2081853996, -1870403219, -1752136727, -610824061, 1926993808,
|
||||
1081269143, -1539280165, -1532739470, -1213946816, -1279810396, 1739831204, 526889911, -887134285, 2043354983, -428225761, 1323727307,
|
||||
-867244423, -1060352282, -440349618, 2028323020, -2106006080, 1518500069, -799374728, 2110806658, -864169894, -3375664, 1929366653,
|
||||
561184716, -1205767425, 146284914, 1174976545, 1950746808, -143374840, 1140290630, 608434036, -1910225929, -527555517, 903908900,
|
||||
-1875517298, -426756640, -2115596235, 981591696, 641368550, 1277573761, 1011623482, 675040294, 1378368588, -1152243652, -1849994712,
|
||||
-1013859502, 12816827, -889142383, -1999961917, -796341504, 1708165323, -1956261752, 462120400, 1394314085, 777198475, -1557245157,
|
||||
1906519635, 1685168942, 1214542243, -1756863375, -1567143836, 228759368, -116546921, 1324944802, 944699661, 423120633, -283559858,
|
||||
1190074680, -1454969063, -559331601, -1394693818, -659759447, -1462194978, -89597780, 1996138712, 1064762024, -482380038, -1662828682,
|
||||
882697023, 1060412643, -113298276, -587645132, -1143146177, -943989511, -943211556, 197642171, 1326172103, 491719623, -1977790709,
|
||||
1368005967, -531527139, 1272992138, -850665391, -1261614112, 1505021259, 827962573, -936289868, -1614270119, 2124400689, -914448440,
|
||||
-641106273, 2027538814, 1937299913, 1685289177, -362515592, -974494605, -1396315548, -2085829142, 881044677, -751533140, -338479997,
|
||||
-1007955148, -977015853, -2117745685, -1602107965, -6258235, -83910527, 335216544, 1662253823, -345828358, 401302291, 219671395,
|
||||
-586344469, 1373441303, -1219371763, -256421411, -621758639, 1239085239, -750134544, 382945754, 1427559241, 643110611, 690378006,
|
||||
-735500715, 1758734630, -1637297111, 731801812, 371957352, -1105843298, 1488852523, 2102967830, 1199397054, -1589150376, -56539267,
|
||||
-1879269049, -124781407, -1191669764, -776406897, 2060564728, -797257288, 835746513, -835596166, 1171141072, -884617679, 986400206,
|
||||
-1891972283, -1785775413, 378900282, 68588943, 671356565, -1356332010, -676386812, -69751000, -889464913, -1144325161, 1270598395,
|
||||
1078705098, 2118142907
|
||||
};
|
||||
|
||||
//static {
|
||||
// // cannot use ByteArrayAccess in this module
|
||||
// ByteBuffer buffer = ByteBuffer.wrap(secret).order(ByteOrder.LITTLE_ENDIAN);
|
||||
// secretLong = new long[(secret.length - Long.BYTES) + 1];
|
||||
// for (int i = 0, n = secretLong.length; i < n; i++) {
|
||||
// secretLong[i] = buffer.getLong(i);
|
||||
// }
|
||||
//
|
||||
// secretInt = new int[(secret.length - Integer.BYTES) + 1];
|
||||
// for (int i = 0, n = secretInt.length; i < n; i++) {
|
||||
// secretInt[i] = buffer.getInt(i);
|
||||
// }
|
||||
//}
|
||||
|
||||
// Primes
|
||||
private static final long XXH_PRIME32_1 = 0x9E3779B1L; /*!< 0b10011110001101110111100110110001 */
|
||||
private static final long XXH_PRIME32_2 = 0x85EBCA77L; /*!< 0b10000101111010111100101001110111 */
|
||||
private static final long XXH_PRIME32_3 = 0xC2B2AE3DL; /*!< 0b11000010101100101010111000111101 */
|
||||
|
||||
private static final long XXH_PRIME64_1 =
|
||||
0x9E3779B185EBCA87L; /*!< 0b1001111000110111011110011011000110000101111010111100101010000111 */
|
||||
private static final long XXH_PRIME64_2 =
|
||||
0xC2B2AE3D27D4EB4FL; /*!< 0b1100001010110010101011100011110100100111110101001110101101001111 */
|
||||
private static final long XXH_PRIME64_3 =
|
||||
0x165667B19E3779F9L; /*!< 0b0001011001010110011001111011000110011110001101110111100111111001 */
|
||||
private static final long XXH_PRIME64_4 =
|
||||
0x85EBCA77C2B2AE63L; /*!< 0b1000010111101011110010100111011111000010101100101010111001100011 */
|
||||
private static final long XXH_PRIME64_5 =
|
||||
0x27D4EB2F165667C5L; /*!< 0b0010011111010100111010110010111100010110010101100110011111000101 */
|
||||
|
||||
// only support fixed size secret
|
||||
private static final int nbStripesPerBlock = (192 - 64) / 8;
|
||||
private static final int block_len = 64 * nbStripesPerBlock;
|
||||
|
||||
public static <T> long hash(final T input, final Access<T> access, final int off, final int length, final long seed) {
|
||||
if (length <= 16) {
|
||||
// len_0to16_64b
|
||||
if (length > 8) {
|
||||
// len_9to16_64b
|
||||
final long bitflip1 = secretLong[24] ^ secretLong[32] + seed;
|
||||
final long bitflip2 = secretLong[40] ^ secretLong[48] - seed;
|
||||
final long input_lo = access.i64(input, off) ^ bitflip1;
|
||||
final long input_hi = access.i64(input, off + length - 8) ^ bitflip2;
|
||||
final long acc = length + Long.reverseBytes(input_lo) + input_hi + unsignedLongMulXorFold(input_lo, input_hi);
|
||||
return avalanche(acc);
|
||||
}
|
||||
if (length >= 4) {
|
||||
// len_4to8_64b
|
||||
long s = seed ^ Long.reverseBytes(seed & 0xFFFFFFFFL);
|
||||
final long input1 = access.i32(input, off); // high int will be shifted
|
||||
final long input2 = access.u32(input, off + length - 4);
|
||||
final long bitflip = (secretLong[8] ^ secretLong[16]) - s;
|
||||
final long keyed = (input2 + (input1 << 32)) ^ bitflip;
|
||||
return rrmxmx(keyed, length);
|
||||
}
|
||||
if (length != 0) {
|
||||
// len_1to3_64b
|
||||
final int c1 = access.i8(input, off) & 0xff;
|
||||
final int c2 = access.i8(input, off + (length >> 1)); // high 3 bytes will be shifted
|
||||
final int c3 = access.i8(input, off + length - 1) & 0xff;
|
||||
final long combined = unsignedInt((c1 << 16) | (c2 << 24) | c3 | (length << 8));
|
||||
final long bitflip = unsignedInt(secretInt[0] ^ secretInt[4]) + seed;
|
||||
return XXH64_avalanche(combined ^ bitflip);
|
||||
}
|
||||
return XXH64_avalanche(seed ^ secretLong[56] ^ secretLong[64]);
|
||||
}
|
||||
if (length <= 128) {
|
||||
// len_17to128_64b
|
||||
long acc = length * XXH_PRIME64_1;
|
||||
|
||||
if (length > 32) {
|
||||
if (length > 64) {
|
||||
if (length > 96) {
|
||||
acc += mix16B(seed, input, access, off + 48, 96);
|
||||
acc += mix16B(seed, input, access, off + length - 64, 112);
|
||||
}
|
||||
acc += mix16B(seed, input, access, off + 32, 64);
|
||||
acc += mix16B(seed, input, access, off + length - 48, 80);
|
||||
}
|
||||
acc += mix16B(seed, input, access, off + 16, 32);
|
||||
acc += mix16B(seed, input, access, off + length - 32, 48);
|
||||
}
|
||||
acc += mix16B(seed, input, access, off, 0);
|
||||
acc += mix16B(seed, input, access, off + length - 16, 16);
|
||||
|
||||
return avalanche(acc);
|
||||
}
|
||||
if (length <= 240) {
|
||||
// len_129to240_64b
|
||||
long acc = length * XXH_PRIME64_1;
|
||||
final int nbRounds = length / 16;
|
||||
int i = 0;
|
||||
for (; i < 8; ++i) {
|
||||
acc += mix16B(seed, input, access, off + 16 * i, 16 * i);
|
||||
}
|
||||
acc = avalanche(acc);
|
||||
|
||||
for (; i < nbRounds; ++i) {
|
||||
acc += mix16B(seed, input, access, off + 16 * i, 16 * (i - 8) + 3);
|
||||
}
|
||||
|
||||
/* last bytes */
|
||||
acc += mix16B(seed, input, access, off + length - 16, 136 - 17);
|
||||
return avalanche(acc);
|
||||
}
|
||||
|
||||
// hashLong_64b_internal
|
||||
long acc_0 = XXH_PRIME32_3;
|
||||
long acc_1 = XXH_PRIME64_1;
|
||||
long acc_2 = XXH_PRIME64_2;
|
||||
long acc_3 = XXH_PRIME64_3;
|
||||
long acc_4 = XXH_PRIME64_4;
|
||||
long acc_5 = XXH_PRIME32_2;
|
||||
long acc_6 = XXH_PRIME64_5;
|
||||
long acc_7 = XXH_PRIME32_1;
|
||||
|
||||
// hashLong_internal_loop
|
||||
final int nb_blocks = (length - 1) / block_len;
|
||||
for (int n = 0; n < nb_blocks; n++) {
|
||||
// accumulate
|
||||
final int offBlock = off + n * block_len;
|
||||
for (int s = 0; s < nbStripesPerBlock; s++) {
|
||||
// accumulate_512
|
||||
final int offStripe = offBlock + s * 64;
|
||||
final int offSec = s * 8;
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8];
|
||||
/* swap adjacent lanes */
|
||||
acc_0 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_1 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe + 8 * 2);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8 * 3);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec + 8 * 2];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8 * 3];
|
||||
/* swap adjacent lanes */
|
||||
acc_2 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_3 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe + 8 * 4);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8 * 5);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec + 8 * 4];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8 * 5];
|
||||
/* swap adjacent lanes */
|
||||
acc_4 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_5 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe + 8 * 6);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8 * 7);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec + 8 * 6];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8 * 7];
|
||||
/* swap adjacent lanes */
|
||||
acc_6 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_7 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
}
|
||||
|
||||
// scrambleAcc_scalar
|
||||
final int offSec = 192 - 64;
|
||||
acc_0 = (acc_0 ^ (acc_0 >>> 47) ^ secretLong[offSec]) * XXH_PRIME32_1;
|
||||
acc_1 = (acc_1 ^ (acc_1 >>> 47) ^ secretLong[offSec + 8]) * XXH_PRIME32_1;
|
||||
acc_2 = (acc_2 ^ (acc_2 >>> 47) ^ secretLong[offSec + 8 * 2]) * XXH_PRIME32_1;
|
||||
acc_3 = (acc_3 ^ (acc_3 >>> 47) ^ secretLong[offSec + 8 * 3]) * XXH_PRIME32_1;
|
||||
acc_4 = (acc_4 ^ (acc_4 >>> 47) ^ secretLong[offSec + 8 * 4]) * XXH_PRIME32_1;
|
||||
acc_5 = (acc_5 ^ (acc_5 >>> 47) ^ secretLong[offSec + 8 * 5]) * XXH_PRIME32_1;
|
||||
acc_6 = (acc_6 ^ (acc_6 >>> 47) ^ secretLong[offSec + 8 * 6]) * XXH_PRIME32_1;
|
||||
acc_7 = (acc_7 ^ (acc_7 >>> 47) ^ secretLong[offSec + 8 * 7]) * XXH_PRIME32_1;
|
||||
}
|
||||
|
||||
/* last partial block */
|
||||
final long nbStripes = ((length - 1) - (block_len * nb_blocks)) / 64;
|
||||
final int offBlock = off + block_len * nb_blocks;
|
||||
for (int s = 0; s < nbStripes; s++) {
|
||||
// accumulate_512
|
||||
final int offStripe = offBlock + s * 64;
|
||||
final int offSec = s * 8;
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8];
|
||||
/* swap adjacent lanes */
|
||||
acc_0 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_1 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe + 8 * 2);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8 * 3);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec + 8 * 2];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8 * 3];
|
||||
/* swap adjacent lanes */
|
||||
acc_2 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_3 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe + 8 * 4);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8 * 5);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec + 8 * 4];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8 * 5];
|
||||
/* swap adjacent lanes */
|
||||
acc_4 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_5 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe + 8 * 6);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8 * 7);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec + 8 * 6];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8 * 7];
|
||||
/* swap adjacent lanes */
|
||||
acc_6 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_7 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
}
|
||||
|
||||
/* last stripe */
|
||||
// accumulate_512
|
||||
final int offStripe = off + length - 64;
|
||||
final int offSec = 192 - 64 - 7;
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8];
|
||||
/* swap adjacent lanes */
|
||||
acc_0 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_1 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe + 8 * 2);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8 * 3);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec + 8 * 2];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8 * 3];
|
||||
/* swap adjacent lanes */
|
||||
acc_2 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_3 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe + 8 * 4);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8 * 5);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec + 8 * 4];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8 * 5];
|
||||
/* swap adjacent lanes */
|
||||
acc_4 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_5 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
{
|
||||
final long data_val_0 = access.i64(input, offStripe + 8 * 6);
|
||||
final long data_val_1 = access.i64(input, offStripe + 8 * 7);
|
||||
final long data_key_0 = data_val_0 ^ secretLong[offSec + 8 * 6];
|
||||
final long data_key_1 = data_val_1 ^ secretLong[offSec + 8 * 7];
|
||||
/* swap adjacent lanes */
|
||||
acc_6 += data_val_1 + (0xFFFFFFFFL & data_key_0) * (data_key_0 >>> 32);
|
||||
acc_7 += data_val_0 + (0xFFFFFFFFL & data_key_1) * (data_key_1 >>> 32);
|
||||
}
|
||||
|
||||
// merge accumulators
|
||||
final long result64 = length * XXH_PRIME64_1
|
||||
+ mix2Accs(acc_0, acc_1, 11)
|
||||
+ mix2Accs(acc_2, acc_3, 11 + 16)
|
||||
+ mix2Accs(acc_4, acc_5, 11 + 16 * 2)
|
||||
+ mix2Accs(acc_6, acc_7, 11 + 16 * 3);
|
||||
|
||||
return avalanche(result64);
|
||||
}
|
||||
|
||||
private static long XXH64_avalanche(long h64) {
|
||||
h64 ^= h64 >>> 33;
|
||||
h64 *= XXH_PRIME64_2;
|
||||
h64 ^= h64 >>> 29;
|
||||
h64 *= XXH_PRIME64_3;
|
||||
return h64 ^ (h64 >>> 32);
|
||||
}
|
||||
|
||||
private static long avalanche(long h64) {
|
||||
h64 ^= h64 >>> 37;
|
||||
h64 *= 0x165667919E3779F9L;
|
||||
return h64 ^ (h64 >>> 32);
|
||||
}
|
||||
|
||||
@SuppressWarnings("SpellCheckingInspection")
|
||||
static long rrmxmx(long h64, final long length) {
|
||||
h64 ^= Long.rotateLeft(h64, 49) ^ Long.rotateLeft(h64, 24);
|
||||
h64 *= 0x9FB21C651E98DF25L;
|
||||
h64 ^= (h64 >>> 35) + length;
|
||||
h64 *= 0x9FB21C651E98DF25L;
|
||||
return h64 ^ (h64 >>> 28);
|
||||
}
|
||||
|
||||
private static <T> long mix16B(long seed, final T input, final Access<T> access, final int offIn, final int offSec) {
|
||||
final long input_lo = access.i64(input, offIn);
|
||||
final long input_hi = access.i64(input, offIn + 8);
|
||||
return unsignedLongMulXorFold(
|
||||
input_lo ^ (secretLong[offSec] + seed),
|
||||
input_hi ^ (secretLong[offSec + 8] - seed)
|
||||
);
|
||||
}
|
||||
|
||||
@SuppressWarnings("SpellCheckingInspection")
|
||||
private static long mix2Accs(final long acc_lh, final long acc_rh, final int offSec) {
|
||||
return unsignedLongMulXorFold(
|
||||
acc_lh ^ secretLong[offSec],
|
||||
acc_rh ^ secretLong[offSec + 8]
|
||||
);
|
||||
}
|
||||
|
||||
private static long unsignedLongMulXorFold(final long lhs, final long rhs) {
|
||||
final long upper = multiplyHigh(lhs, rhs) + ((lhs >> 63) & rhs) + ((rhs >> 63) & lhs);
|
||||
final long lower = lhs * rhs;
|
||||
return lower ^ upper;
|
||||
}
|
||||
|
||||
private static long unsignedInt(int i) {
|
||||
return i & 0xFFFFFFFFL;
|
||||
}
|
||||
|
||||
// from JDK - only 9+
|
||||
private static long multiplyHigh(long x, long y) {
|
||||
if (x < 0 || y < 0) {
|
||||
// Use technique from section 8-2 of Henry S. Warren, Jr.,
|
||||
// Hacker's Delight (2nd ed.) (Addison Wesley, 2013), 173-174.
|
||||
long x1 = x >> 32;
|
||||
long x2 = x & 0xFFFFFFFFL;
|
||||
long y1 = y >> 32;
|
||||
long y2 = y & 0xFFFFFFFFL;
|
||||
long z2 = x2 * y2;
|
||||
long t = x1 * y2 + (z2 >>> 32);
|
||||
long z1 = t & 0xFFFFFFFFL;
|
||||
long z0 = t >> 32;
|
||||
z1 += x2 * y1;
|
||||
return x1 * y1 + z0 + (z1 >> 32);
|
||||
}
|
||||
else {
|
||||
// Use Karatsuba technique with two base 2^32 digits.
|
||||
long x1 = x >>> 32;
|
||||
long y1 = y >>> 32;
|
||||
long x2 = x & 0xFFFFFFFFL;
|
||||
long y2 = y & 0xFFFFFFFFL;
|
||||
long A = x1 * y1;
|
||||
long B = x2 * y2;
|
||||
long C = (x1 + x2) * (y1 + y2);
|
||||
long K = C - A - B;
|
||||
return (((B >>> 32) + K) >>> 32) + A;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import com.intellij.psi.codeStyle.MinusculeMatcher;
|
||||
import com.intellij.util.XmlDomReader;
|
||||
import org.jetbrains.annotations.ApiStatus;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.xxh3.Xx3UnencodedString;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
@@ -44,6 +45,7 @@ public final class ClassPathUtil {
|
||||
MinusculeMatcher.class, // module 'intellij.platform.util.text.matching'
|
||||
com.intellij.openapi.util.SystemInfoRt.class, // module 'intellij.platform.util.rt'
|
||||
com.intellij.util.lang.UrlClassLoader.class, // module 'intellij.platform.util.classLoader'
|
||||
Xx3UnencodedString.class, // intellij.platform.util.rt.java8 (required for classLoader)
|
||||
org.intellij.lang.annotations.Flow.class, // jetbrains-annotations-java5
|
||||
org.jdom.Document.class, // jDOM
|
||||
org.apache.log4j.Appender.class, // Log4J
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.openapi.application;
|
||||
|
||||
import com.intellij.openapi.util.SystemInfoRt;
|
||||
@@ -9,7 +9,10 @@ import org.jetbrains.annotations.Contract;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.io.*;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.lang.invoke.MethodType;
|
||||
import java.net.URISyntaxException;
|
||||
@@ -203,10 +206,10 @@ public final class PathManager {
|
||||
* Looks for a file in all possible bin directories.
|
||||
*
|
||||
* @return first that exists.
|
||||
* @throws FileNotFoundException if nothing found.
|
||||
* @throws RuntimeException if nothing found.
|
||||
* @see #findBinFile(String)
|
||||
*/
|
||||
public static @NotNull Path findBinFileWithException(@NotNull String fileName) throws FileNotFoundException {
|
||||
public static @NotNull Path findBinFileWithException(@NotNull String fileName) {
|
||||
Path file = findBinFile(fileName);
|
||||
if (file != null) {
|
||||
return file;
|
||||
@@ -217,7 +220,7 @@ public final class PathManager {
|
||||
for (Path directory : getBinDirectories()) {
|
||||
message.append('\n').append(directory);
|
||||
}
|
||||
throw new FileNotFoundException(message.toString());
|
||||
throw new RuntimeException(message.toString());
|
||||
}
|
||||
|
||||
public static @NotNull String getLibPath() {
|
||||
@@ -686,7 +689,7 @@ public final class PathManager {
|
||||
if (!Files.exists(artifactsDir)) {
|
||||
// running IDE or tests in IDE
|
||||
artifactsDir = outClassesDir.resolve("artifacts");
|
||||
} // otherwise running tests via build scripts
|
||||
} // otherwise, running tests via build scripts
|
||||
return artifactsDir.resolve(artifactDirNameInBuildLayout).resolve(artifactFileName);
|
||||
}
|
||||
|
||||
|
||||
@@ -328,7 +328,7 @@ public final class JDOMUtil {
|
||||
try {
|
||||
XMLStreamReader2 xmlStreamReader = StaxFactory.createXmlStreamReader(data);
|
||||
try {
|
||||
return SafeStAXStreamBuilder.build(xmlStreamReader, true, true, null == null ? SafeStAXStreamBuilder.FACTORY : null);
|
||||
return SafeStAXStreamBuilder.build(xmlStreamReader, true, true, SafeStAXStreamBuilder.FACTORY);
|
||||
}
|
||||
finally {
|
||||
xmlStreamReader.close();
|
||||
|
||||
@@ -1,272 +0,0 @@
|
||||
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
|
||||
package com.intellij.util.lang;
|
||||
|
||||
import com.intellij.openapi.util.SystemInfo;
|
||||
import com.intellij.openapi.util.io.FileUtil;
|
||||
import com.intellij.openapi.util.io.StreamUtil;
|
||||
import com.intellij.util.io.DigestUtil;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Test;
|
||||
|
||||
import javax.crypto.KeyAgreement;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.Provider;
|
||||
import java.security.Security;
|
||||
import java.util.*;
|
||||
import java.util.jar.Attributes;
|
||||
import java.util.jar.Manifest;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class SecureUrlClassLoaderTest {
|
||||
/**
|
||||
* IDEA's UrlClassLoader should verify JAR signatures and checksum if they are exists
|
||||
* but only if JAR url specified in {@link UrlClassLoader.Builder#pathsWithProtectionDomain}.
|
||||
*/
|
||||
@Test
|
||||
public void testSignedJars() throws Exception {
|
||||
String className = "org.bouncycastle.jce.provider.BouncyCastleProvider";
|
||||
URL classUrl = getJarUrl(className);
|
||||
|
||||
ClassLoader classLoader;
|
||||
Exception error;
|
||||
|
||||
classLoader = new URLClassLoader(new URL[]{classUrl}, null);
|
||||
error = codeThatRegistersSecurityProvider(classLoader, className);
|
||||
assertNull(error);
|
||||
|
||||
classLoader = UrlClassLoader.build()
|
||||
.files(Collections.singletonList(Paths.get(classUrl.toURI())))
|
||||
.get();
|
||||
error = codeThatRegistersSecurityProvider(classLoader, className);
|
||||
|
||||
// Oracle JRE prevents instantiating key exchange algorithm from unsigned JAR.
|
||||
// OpenJDK based runtimes including IntelliJ JDK does not prevent this.
|
||||
// Other JRE was not tested.
|
||||
if (!SystemInfo.isOracleJvm) {
|
||||
assertNull(error);
|
||||
}
|
||||
else {
|
||||
assertThat(error).isNotNull();
|
||||
assertEquals(SecurityException.class, error.getClass());
|
||||
}
|
||||
|
||||
classLoader = UrlClassLoader.build()
|
||||
.files(Collections.singletonList(Paths.get(classUrl.toURI())))
|
||||
.urlsWithProtectionDomain(Collections.singleton(Paths.get(classUrl.toURI())))
|
||||
.get();
|
||||
error = codeThatRegistersSecurityProvider(classLoader, className);
|
||||
assertNull(error);
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private static URL getJarUrl(String className) throws MalformedURLException {
|
||||
URL classUrl = SecureUrlClassLoaderTest.class.getClassLoader().getResource(classNameToJarEntryName(className));
|
||||
assertEquals("jar", Objects.requireNonNull(classUrl).getProtocol());
|
||||
classUrl = new URL(classUrl.toExternalForm().split("[!]", 2)[0].substring("jar:".length()));
|
||||
return classUrl;
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private static String classNameToJarEntryName(@NotNull String className) {
|
||||
return className.replace('.', '/') + ".class";
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static SecurityException codeThatRegistersSecurityProvider(ClassLoader classLoader, String className) {
|
||||
Class<?> providerClass;
|
||||
Provider provider;
|
||||
try {
|
||||
providerClass = classLoader.loadClass(className);
|
||||
provider = (Provider)providerClass.newInstance();
|
||||
}
|
||||
catch (Exception error) {
|
||||
throw new IllegalStateException(error);
|
||||
}
|
||||
Security.addProvider(provider);
|
||||
|
||||
try {
|
||||
KeyAgreement.getInstance("DH", provider);
|
||||
return null;
|
||||
}
|
||||
catch (SecurityException error) {
|
||||
return error;
|
||||
}
|
||||
catch (NoSuchAlgorithmException error) {
|
||||
throw new IllegalStateException(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method {@link #doTestLoadJarWithMaliciousThings(boolean, boolean, boolean, String)}
|
||||
* should create correct JAR when all change-parameters are set to false.
|
||||
*/
|
||||
@Test
|
||||
public void testLoadCorrectJar() throws Exception {
|
||||
String className = "org.bouncycastle.jce.provider.BouncyCastleProvider";
|
||||
SecurityException err = doTestLoadJarWithMaliciousThings(false, false, false, className);
|
||||
assertNull(err);
|
||||
}
|
||||
|
||||
/**
|
||||
* Should not load class from JAR when class file was altered but no files in META-INF was not changed.
|
||||
*/
|
||||
@Test
|
||||
public void testLoadJarWithMaliciousClass() throws Exception {
|
||||
String className = "org.bouncycastle.jce.provider.BouncyCastleProvider";
|
||||
SecurityException err = doTestLoadJarWithMaliciousThings(true, false, false, className);
|
||||
assertNotNull(err);
|
||||
assertEquals("SHA-256 digest error for " + classNameToJarEntryName(className), err.getMessage());
|
||||
}
|
||||
|
||||
/**
|
||||
* Should not load class from JAR when manifest file has incorrect signature of some class
|
||||
* but META-INF/*.SF contains correct signature of those class.
|
||||
*/
|
||||
@Test
|
||||
public void testLoadJarWithMaliciousManifest() throws Exception {
|
||||
SecurityException err = doTestLoadJarWithMaliciousThings(false, true, false, "org.bouncycastle.jce.provider.BouncyCastleProvider");
|
||||
assertThat(err).hasMessageMatching(
|
||||
// This error message differs in Java 1.8 and Java 11.
|
||||
"[Ii]nvalid .*signature file digest for (Manifest main attributes|org/bouncycastle/jce/provider/BouncyCastleProvider.class)");
|
||||
}
|
||||
|
||||
/**
|
||||
* Should not load class from JAR when class file was altered and its new digest
|
||||
* was written to manifest file.
|
||||
*/
|
||||
@Test
|
||||
public void testLoadJarWithMaliciousClassAndManifest() throws Exception {
|
||||
SecurityException err = doTestLoadJarWithMaliciousThings(true, true, true, "org.bouncycastle.jce.provider.BouncyCastleProvider");
|
||||
assertNotNull(err);
|
||||
assertEquals("cannot verify signature block file META-INF/BC1024KE", err.getMessage());
|
||||
}
|
||||
|
||||
/**
|
||||
* Should not load class from JAR when class file was altered and its new digest
|
||||
* was written to manifest file and all signature files.
|
||||
*/
|
||||
@Test
|
||||
public void testLoadJarWithMaliciousClassAndManifestAndSignature() throws Exception {
|
||||
SecurityException err = doTestLoadJarWithMaliciousThings(true, true, true, "org.bouncycastle.jce.provider.BouncyCastleProvider");
|
||||
assertNotNull(err);
|
||||
assertEquals("cannot verify signature block file META-INF/BC1024KE", err.getMessage());
|
||||
}
|
||||
|
||||
private static SecurityException doTestLoadJarWithMaliciousThings(boolean changeClass,
|
||||
boolean changeManifest,
|
||||
boolean changeSignatureFile,
|
||||
String className) throws Exception {
|
||||
File root = FileUtil.createTempDirectory("testLoadJarWithMaliciousClass", "");
|
||||
try {
|
||||
File hackedClassJar = new File(root, "hacked-bouncycastle.jar");
|
||||
URL classUrl = getJarUrl(className);
|
||||
Path hackedJarPath = hackedClassJar.toPath();
|
||||
|
||||
createHackedJar(hackedClassJar, classUrl, className, changeClass, changeManifest, changeSignatureFile);
|
||||
|
||||
ClassLoader classLoader = UrlClassLoader.build()
|
||||
.files(Collections.singletonList(hackedJarPath))
|
||||
.urlsWithProtectionDomain(Collections.singleton(hackedJarPath))
|
||||
.get();
|
||||
|
||||
try {
|
||||
classLoader.loadClass(className);
|
||||
return null;
|
||||
}
|
||||
catch (SecurityException err) {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
finally {
|
||||
FileUtil.delete(root);
|
||||
}
|
||||
}
|
||||
|
||||
private static void createHackedJar(File destination,
|
||||
URL classUrl,
|
||||
String className,
|
||||
boolean changeClass,
|
||||
boolean changeManifest,
|
||||
boolean changeSignatureFile) throws Exception {
|
||||
String pathInJar = classNameToJarEntryName(className);
|
||||
try (ZipFile sourceZipFile = new ZipFile(new File(classUrl.getFile()), ZipFile.OPEN_READ)) {
|
||||
ByteArrayOutputStream hackedClassBytes = new ByteArrayOutputStream();
|
||||
Enumeration<? extends ZipEntry> entries = sourceZipFile.entries();
|
||||
while (entries.hasMoreElements()) {
|
||||
ZipEntry entry = entries.nextElement();
|
||||
if (entry.getName().equals(pathInJar)) {
|
||||
StreamUtil.copy(sourceZipFile.getInputStream(entry), hackedClassBytes);
|
||||
break;
|
||||
}
|
||||
}
|
||||
Assume.assumeTrue(hackedClassBytes.size() > 0);
|
||||
|
||||
// Adding one new byte is enough for changing hash digest of the file.
|
||||
// Doesn't matter that the class becomes invalid because
|
||||
// it should not be even tried to load.
|
||||
hackedClassBytes.write(0);
|
||||
|
||||
try (ZipOutputStream zipOutputStream = new ZipOutputStream(new FileOutputStream(destination))) {
|
||||
entries = sourceZipFile.entries();
|
||||
while (entries.hasMoreElements()) {
|
||||
ZipEntry entry = entries.nextElement();
|
||||
|
||||
String upperEntryName = entry.getName().toUpperCase(Locale.US);
|
||||
boolean isManifest = upperEntryName.equals("META-INF/MANIFEST.MF");
|
||||
boolean isSignatureFile = upperEntryName.startsWith("META-INF/")
|
||||
&& upperEntryName.indexOf('/', "META-INF/".length() + 1) < 0
|
||||
&& upperEntryName.endsWith(".SF");
|
||||
boolean isDesiredClass = entry.getName().equals(pathInJar);
|
||||
|
||||
if (isManifest && changeManifest || isSignatureFile && changeSignatureFile) {
|
||||
// Signature or manifest file can't be rewritten even if its
|
||||
// contents was not changed. `manifest.writeTo(InputStream)`
|
||||
// iterates over hashtable, therefore entries will be written
|
||||
// in unexpected order, that leads to false-positive
|
||||
// digital signature check failure.
|
||||
Manifest manifest;
|
||||
try (InputStream stream = sourceZipFile.getInputStream(entry)) {
|
||||
manifest = new Manifest(stream);
|
||||
}
|
||||
hackManifest(manifest, pathInJar, hackedClassBytes);
|
||||
zipOutputStream.putNextEntry(new ZipEntry(entry.getName()));
|
||||
manifest.write(zipOutputStream);
|
||||
}
|
||||
else if (isDesiredClass && changeClass) {
|
||||
zipOutputStream.putNextEntry(new ZipEntry(entry.getName()));
|
||||
hackedClassBytes.writeTo(zipOutputStream);
|
||||
}
|
||||
else {
|
||||
zipOutputStream.putNextEntry(entry);
|
||||
StreamUtil.copy(sourceZipFile.getInputStream(entry), zipOutputStream);
|
||||
}
|
||||
zipOutputStream.closeEntry();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void hackManifest(Manifest manifest, String pathInJar, ByteArrayOutputStream hackedClassBytes) {
|
||||
Attributes newAttributes = new Attributes();
|
||||
byte[] digest = DigestUtil.sha256().digest(hackedClassBytes.toByteArray());
|
||||
newAttributes.putValue("SHA-256-Digest", Base64.getEncoder().encodeToString(digest));
|
||||
Assume.assumeTrue(manifest.getEntries().containsKey(pathInJar));
|
||||
manifest.getEntries().put(pathInJar, newAttributes);
|
||||
}
|
||||
}
|
||||
@@ -7,11 +7,11 @@ import com.intellij.diagnostic.StartUpMeasurer
|
||||
import com.intellij.openapi.diagnostic.Logger
|
||||
import com.intellij.ui.icons.IconLoadMeasurer
|
||||
import com.intellij.util.ImageLoader
|
||||
import net.openshift.hash.XxHash3
|
||||
import org.jetbrains.annotations.ApiStatus
|
||||
import org.jetbrains.mvstore.MVMap
|
||||
import org.jetbrains.mvstore.MVStore
|
||||
import org.jetbrains.mvstore.type.FixedByteArrayDataType
|
||||
import org.jetbrains.xxh3.Xxh3
|
||||
import sun.awt.image.SunWritableRaster
|
||||
import java.awt.Image
|
||||
import java.awt.Point
|
||||
@@ -129,8 +129,8 @@ class SvgCacheManager(dbFile: Path) {
|
||||
private val ZERO_POINT = Point(0, 0)
|
||||
|
||||
private fun getCacheKey(themeDigest: ByteArray, imageBytes: ByteArray): ByteArray {
|
||||
val xx3 = XxHash3.INSTANCE
|
||||
val contentDigest = xx3.hashLongs(longArrayOf(xx3.hashBytes(imageBytes), xx3.hashBytes(themeDigest)))
|
||||
val contentDigest = Xxh3.hashLongs(longArrayOf(
|
||||
Xxh3.hash(imageBytes), Xxh3.hash(themeDigest)))
|
||||
|
||||
val buffer = ByteBuffer.allocate(IMAGE_KEY_SIZE)
|
||||
// add content size to key to reduce chance of hash collision (write as medium int)
|
||||
|
||||
@@ -6,7 +6,7 @@ import com.intellij.ui.icons.IconLoadMeasurer
|
||||
import com.intellij.util.ImageLoader
|
||||
import org.jetbrains.annotations.ApiStatus
|
||||
import org.jetbrains.ikv.Ikv
|
||||
import org.minperf.UniversalHash
|
||||
import org.jetbrains.ikv.UniversalHash
|
||||
import java.awt.Image
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.file.Path
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user