JPS cache - merge timestamp and hash storage, disable PHM compression for JPS cache (compressing of compressed data is inefficient)

GitOrigin-RevId: ff88f355747967c711c20cd024bee117e931965e
This commit is contained in:
Vladimir Krivosheev
2024-09-07 16:44:21 +02:00
committed by intellij-monorepo-bot
parent 8b1df13be3
commit d1674e7fa7
7 changed files with 87 additions and 110 deletions

View File

@@ -2596,6 +2596,7 @@ f:org.jetbrains.jps.incremental.relativizer.PathRelativizerService
- reportUnhandledPaths():V
- toFull(java.lang.String):java.lang.String
- toRelative(java.lang.String):java.lang.String
- toRelative(java.nio.file.Path):java.lang.String
c:org.jetbrains.jps.incremental.resources.ResourcesBuilder
- org.jetbrains.jps.incremental.TargetBuilder
- <init>():V
@@ -2609,16 +2610,16 @@ a:org.jetbrains.jps.incremental.storage.AbstractStateStorage
- pf:dataLock:java.lang.Object
- <init>(java.io.File,com.intellij.util.io.KeyDescriptor,com.intellij.util.io.DataExternalizer):V
- appendData(java.lang.Object,java.lang.Object):V
- clean():V
- close():V
- f:clean():V
- f:close():V
- flush(Z):V
- force():V
- f:force():V
- getKeys():java.util.Collection
- getKeysIterator():java.util.Iterator
- getState(java.lang.Object):java.lang.Object
- remove(java.lang.Object):V
- update(java.lang.Object,java.lang.Object):V
- wipe():Z
- f:wipe():Z
f:org.jetbrains.jps.incremental.storage.BuildDataManager
- sf:PROCESS_CONSTANTS_NON_INCREMENTAL_PROPERTY:java.lang.String
- <init>(org.jetbrains.jps.builders.storage.BuildDataPaths,org.jetbrains.jps.incremental.storage.BuildTargetsState,org.jetbrains.jps.incremental.relativizer.PathRelativizerService):V

View File

@@ -18,6 +18,7 @@ import org.jetbrains.jps.model.serialization.JpsModelSerializationDataService;
import org.jetbrains.jps.util.JpsPathUtil;
import java.io.File;
import java.nio.file.Path;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
@@ -70,6 +71,10 @@ public final class PathRelativizerService {
myRelativizers.add(new GradlePathRelativizer());
}
public @NotNull String toRelative(@NotNull Path path) {
return toRelative(path.toString());
}
/**
* @param path absolute path which should be converted.
* It may use forward or backward slashes as separators, so there is no need to convert it before passing to the method

View File

@@ -1,17 +1,16 @@
// Copyright 2000-2024 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package org.jetbrains.jps.incremental.storage;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.util.CommonProcessors;
import com.intellij.util.io.AppendablePersistentMap;
import com.intellij.util.io.DataExternalizer;
import com.intellij.util.io.KeyDescriptor;
import com.intellij.util.io.PersistentHashMap;
import com.intellij.util.io.*;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
@@ -19,45 +18,51 @@ import java.util.List;
public abstract class AbstractStateStorage<Key, T> implements StorageOwner {
protected final Object dataLock = new Object();
private final File baseFile;
private final KeyDescriptor<Key> keyDescriptor;
private final DataExternalizer<T> stateExternalizer;
private PersistentHashMap<Key, T> map;
private @NotNull final PersistentMapBuilder<Key, T> mapBuilder;
private @NotNull PersistentMapImpl<Key, T> map;
private final boolean isCompressed;
public AbstractStateStorage(File storePath, KeyDescriptor<Key> keyDescriptor, DataExternalizer<T> stateExternalizer) throws IOException {
baseFile = storePath;
this.keyDescriptor = keyDescriptor;
this.stateExternalizer = stateExternalizer;
map = createMap(storePath);
this(PersistentMapBuilder.newBuilder(storePath.toPath(), keyDescriptor, stateExternalizer),
Boolean.parseBoolean(System.getProperty("jps.storage.do.compression", "true")));
}
public void force() {
@ApiStatus.Internal
protected AbstractStateStorage(@NotNull PersistentMapBuilder<Key, T> mapBuilder, boolean isCompressed) throws IOException {
this.isCompressed = isCompressed;
this.mapBuilder = mapBuilder;
map = createMap();
}
public final void force() {
synchronized (dataLock) {
map.force();
try {
map.force();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
@Override
public void close() throws IOException {
public final void close() throws IOException {
synchronized (dataLock) {
map.close();
}
}
@Override
public void clean() throws IOException {
public final void clean() throws IOException {
wipe();
}
public boolean wipe() {
@SuppressWarnings("UnusedReturnValue")
public final boolean wipe() {
synchronized (dataLock) {
map.closeAndDelete();
try {
map.closeAndClean();
}
catch (IOException ignored) {
}
try {
map = createMap(baseFile);
map = createMap();
}
catch (IOException ignored) {
return false;
@@ -79,7 +84,7 @@ public abstract class AbstractStateStorage<Key, T> implements StorageOwner {
public void appendData(final Key key, final T data) throws IOException {
synchronized (dataLock) {
map.appendData(key, (AppendablePersistentMap.ValueDataAppender)out -> stateExternalizer.save(out, data));
map.appendData(key, out -> mapBuilder.getValueExternalizer().save(out, data));
}
}
@@ -98,7 +103,7 @@ public abstract class AbstractStateStorage<Key, T> implements StorageOwner {
public Collection<Key> getKeys() throws IOException {
synchronized (dataLock) {
List<Key> result = new ArrayList<>();
map.processKeysWithExistingMapping(new CommonProcessors.CollectProcessor<>(result));
map.processExistingKeys(new CommonProcessors.CollectProcessor<>(result));
return result;
}
}
@@ -106,14 +111,14 @@ public abstract class AbstractStateStorage<Key, T> implements StorageOwner {
public Iterator<Key> getKeysIterator() throws IOException {
synchronized (dataLock) {
List<Key> result = new ArrayList<>();
map.processKeysWithExistingMapping(new CommonProcessors.CollectProcessor<>(result));
map.processExistingKeys(new CommonProcessors.CollectProcessor<>(result));
return result.iterator();
}
}
private PersistentHashMap<Key, T> createMap(@NotNull File file) throws IOException {
FileUtilRt.createIfNotExists(file); //todo assert
return new PersistentHashMap<>(file, keyDescriptor, stateExternalizer);
private @NotNull PersistentMapImpl<Key, T> createMap() throws IOException {
Files.createDirectories(mapBuilder.getFile().getParent());
return new PersistentMapImpl<>(mapBuilder, new PersistentHashMapValueStorage.CreationTimeOptions(false, false, false, isCompressed));
}
@Override

View File

@@ -272,12 +272,11 @@ public final class BuildTargetSourcesState implements BuildListener {
}
@Override
public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException {
File file = path.toFile();
if (!buildRootIndex.isFileAccepted(file, rootDescriptor)) {
public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) {
if (!buildRootIndex.isFileAccepted(path.toFile(), rootDescriptor)) {
return FileVisitResult.CONTINUE;
}
getFileHash(target, file, path, rootFile, hash, hashToReuse);
getFileHash(target, path, rootFile, hash, hashToReuse);
return FileVisitResult.CONTINUE;
}
});
@@ -307,15 +306,14 @@ public final class BuildTargetSourcesState implements BuildListener {
}
private void getFileHash(@NotNull BuildTarget<?> target,
@NotNull File file,
@NotNull Path path,
@NotNull Path rootFile,
@NotNull LongArrayList hash,
@NotNull HashStream64 hashToReuse) throws IOException {
@NotNull HashStream64 hashToReuse) {
StampsStorage<? extends StampsStorage.Stamp> storage = projectStamps.getStampStorage();
assert storage instanceof HashStampStorage;
HashStampStorage fileStampStorage = (HashStampStorage)storage;
Long fileHash = fileStampStorage.getStoredFileHash(file, target);
Long fileHash = fileStampStorage.getStoredFileHash(path, target);
if (fileHash == null) {
return;
}

View File

@@ -5,41 +5,37 @@ import com.dynatrace.hash4j.hashing.Hashing
import com.intellij.util.ArrayUtil
import com.intellij.util.io.DataExternalizer
import com.intellij.util.io.EnumeratorStringDescriptor
import com.intellij.util.io.PersistentMapBuilder
import org.jetbrains.jps.builders.BuildTarget
import org.jetbrains.jps.incremental.FSOperations
import org.jetbrains.jps.incremental.FileHashUtil
import org.jetbrains.jps.incremental.relativizer.PathRelativizerService
import org.jetbrains.jps.incremental.storage.FileTimestampStorage.FileTimestamp
import java.io.DataInput
import java.io.DataOutput
import java.io.File
import java.nio.file.Path
import java.nio.file.attribute.BasicFileAttributes
internal class HashStampStorage(
dataStorageRoot: Path,
private val relativizer: PathRelativizerService,
targetsState: BuildTargetsState,
) : AbstractStateStorage<String?, Array<HashStampPerTarget>>(
calcStorageRoot(dataStorageRoot).resolve("data").toFile(),
JpsCachePathStringDescriptor,
StateExternalizer,
), StampsStorage<HashStamp?> {
private val timestampStorage = FileTimestampStorage(dataStorageRoot, targetsState)
private val targetState = targetsState
private val fileStampRoot = calcStorageRoot(dataStorageRoot)
private val targetState: BuildTargetsState,
) : AbstractStateStorage<String, Array<HashStampPerTarget>>(
PersistentMapBuilder.newBuilder(getStorageRoot(dataStorageRoot).resolve("data"), JpsCachePathStringDescriptor, StateExternalizer)
.withVersion(2),
false,
), StampsStorage<HashStamp> {
private val fileStampRoot = getStorageRoot(dataStorageRoot)
override fun getStorageRoot(): Path = fileStampRoot
override fun saveStamp(file: Path, buildTarget: BuildTarget<*>, stamp: HashStamp) {
timestampStorage.saveStamp(file, buildTarget, FileTimestamp.fromLong(stamp.timestamp))
val targetId = targetState.getBuildTargetId(buildTarget)
val path = relativizer.toRelative(file.toString())
val path = relativizer.toRelative(file)
update(path, updateFilesStamp(oldState = getState(path), targetId = targetId, stamp = stamp))
}
override fun removeStamp(file: Path, buildTarget: BuildTarget<*>) {
timestampStorage.removeStamp(file, buildTarget)
val path = relativizer.toRelative(file.toString())
val path = relativizer.toRelative(file)
val state = getState(path) ?: return
val targetId = targetState.getBuildTargetId(buildTarget)
for (i in state.indices) {
@@ -57,34 +53,30 @@ internal class HashStampStorage(
}
override fun getPreviousStamp(file: Path, target: BuildTarget<*>): HashStamp? {
val previousTimestamp = timestampStorage.getPreviousStamp(file, target) ?: return null
val state = getState(relativizer.toRelative(file.toString())) ?: return null
val state = getState(relativizer.toRelative(file)) ?: return null
val targetId = targetState.getBuildTargetId(target)
return state
.firstOrNull { it.targetId == targetId }
?.let { HashStamp(it.hash, previousTimestamp.asLong()) }
?.let { HashStamp(hash = it.hash, timestamp = it.timestamp) }
}
fun getStoredFileHash(file: File, target: BuildTarget<*>): Long? {
val state = getState(relativizer.toRelative(file.absolutePath)) ?: return null
fun getStoredFileHash(file: Path, target: BuildTarget<*>): Long? {
val state = getState(relativizer.toRelative(file)) ?: return null
val targetId = targetState.getBuildTargetId(target)
return state.firstOrNull { it.targetId == targetId }?.hash
}
override fun getCurrentStamp(file: Path): HashStamp {
val currentTimestamp = timestampStorage.getCurrentStamp(file)
return HashStamp(FileHashUtil.getFileHash(file), currentTimestamp.asLong())
return HashStamp(hash = FileHashUtil.getFileHash(file), timestamp = FSOperations.lastModified(file))
}
override fun isDirtyStamp(stamp: StampsStorage.Stamp, file: Path): Boolean {
if (stamp !is HashStamp) {
return true
}
if (!timestampStorage.isDirtyStamp(FileTimestamp.fromLong(stamp.timestamp), file)) {
if (stamp.timestamp == FSOperations.lastModified(file)) {
return false
}
return stamp.hash != FileHashUtil.getFileHash(file)
}
@@ -93,41 +85,25 @@ internal class HashStampStorage(
return true
}
if (!timestampStorage.isDirtyStamp(FileTimestamp.fromLong(stamp.timestamp), file, attrs)) {
// If equal, then non-dirty.
// If not equal, then we check the hash to avoid marking the file as `dirty` only because of a different timestamp.
// We cannot rely solely on the hash, as getting the last-modified timestamp is much cheaper than computing the file hash.
if ((if (attrs.isRegularFile) attrs.lastModifiedTime().toMillis() else FSOperations.lastModified(file)) == stamp.timestamp) {
return false
}
return stamp.hash != FileHashUtil.getFileHash(file)
}
override fun force() {
super.force()
timestampStorage.force()
}
override fun clean() {
super.clean()
timestampStorage.clean()
}
override fun wipe(): Boolean {
return super.wipe() && timestampStorage.wipe()
}
override fun close() {
super.close()
timestampStorage.close()
}
}
internal class HashStampPerTarget(@JvmField val targetId: Int, @JvmField val hash: Long)
internal class HashStampPerTarget(@JvmField val targetId: Int, @JvmField val hash: Long, @JvmField val timestamp: Long)
internal data class HashStamp(@JvmField val hash: Long, @JvmField val timestamp: Long) : StampsStorage.Stamp
internal data class HashStamp(@JvmField val hash: Long, @JvmField val timestamp: Long) : StampsStorage.Stamp
private fun calcStorageRoot(dataStorageRoot: Path): Path = dataStorageRoot.resolve("hashes")
private fun getStorageRoot(dataStorageRoot: Path): Path = dataStorageRoot.resolve("hashes")
private fun updateFilesStamp(oldState: Array<HashStampPerTarget>?, targetId: Int, stamp: HashStamp): Array<HashStampPerTarget> {
val newItem = HashStampPerTarget(targetId = targetId, hash = stamp.hash)
val newItem = HashStampPerTarget(targetId = targetId, hash = stamp.hash, timestamp = stamp.timestamp)
if (oldState == null) {
return arrayOf(newItem)
}
@@ -150,6 +126,7 @@ private object StateExternalizer : DataExternalizer<Array<HashStampPerTarget>> {
for (target in value) {
out.writeInt(target.targetId)
out.writeLong(target.hash)
out.writeLong(target.timestamp)
}
}
@@ -158,7 +135,8 @@ private object StateExternalizer : DataExternalizer<Array<HashStampPerTarget>> {
return Array(size) {
val id = `in`.readInt()
val hash = `in`.readLong()
HashStampPerTarget(targetId = id, hash = hash)
val timestamp = `in`.readLong()
HashStampPerTarget(targetId = id, hash = hash, timestamp = timestamp)
}
}
}

View File

@@ -172,6 +172,11 @@ class CompilationContextImpl private constructor(
enableCoroutinesDump: Boolean = true,
customBuildPaths: BuildPaths? = null,
): CompilationContextImpl {
if (!options.useCompiledClassesFromProjectOutput) {
// disable compression - otherwise, our zstd/zip cannot compress efficiently
System.setProperty("jps.storage.do.compression", "false")
}
check(sequenceOf("platform/build-scripts", "bin/idea.properties", "build.txt").all {
Files.exists(COMMUNITY_ROOT.communityRoot.resolve(it))
}) {

View File

@@ -17,7 +17,6 @@ import org.jetbrains.annotations.ApiStatus.Internal;
import org.jetbrains.annotations.*;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
@@ -246,9 +245,7 @@ public final class PersistentMapImpl<Key, Value> implements PersistentMapBase<Ke
* @return empty map with exactly the same configuration as this map was created with, but based on the given path
*/
public PersistentMapImpl<Key, Value> deriveEmptyMap(@NotNull Path path) throws IOException {
return myOptions.with(() -> {
return new PersistentMapImpl<>(myBuilder.copyWithFile(path));
});
return myOptions.with(() -> new PersistentMapImpl<>(myBuilder.copyWithFile(path)));
}
@Override
@@ -597,12 +594,7 @@ public final class PersistentMapImpl<Key, Value> implements PersistentMapBase<Ke
getReadLock().lock();
try {
flushAppendCache();
return myEnumerator.processAllDataObject(processor, new PersistentEnumeratorBase.DataFilter() {
@Override
public boolean accept(final int id) throws IOException {
return readValueId(id) != NULL_ADDR;
}
});
return myEnumerator.processAllDataObject(processor, id -> readValueId(id) != NULL_ADDR);
}
catch (ClosedStorageException ex) {
throw ex;
@@ -691,12 +683,7 @@ public final class PersistentMapImpl<Key, Value> implements PersistentMapBase<Ke
}
if (myValueStorage.performChunksCompaction(readResult.chunksCount)) {
long newValueOffset = myValueStorage.compactChunks(new AppendablePersistentMap.ValueDataAppender() {
@Override
public void append(@NotNull DataOutput out) throws IOException {
myValueExternalizer.save(out, valueRead);
}
}, readResult);
long newValueOffset = myValueStorage.compactChunks(out -> myValueExternalizer.save(out, valueRead), readResult);
myEnumerator.lockStorageWrite();
try {
@@ -1022,9 +1009,7 @@ public final class PersistentMapImpl<Key, Value> implements PersistentMapBase<Ke
if (parentFile == null) return ArrayUtil.EMPTY_FILE_ARRAY;
Path fileName = fileFromDirectory.getFileName();
try (Stream<Path> children = Files.list(parentFile)) {
return children.filter(p -> {
return p.getFileName().toString().startsWith(fileName.toString());
}).map(p -> p.toFile()).toArray(File[]::new);
return children.filter(p -> p.getFileName().toString().startsWith(fileName.toString())).map(Path::toFile).toArray(File[]::new);
}
}