From 7d52eab3a5f62eccc44b5bcd5d320ef1fa525e90 Mon Sep 17 00:00:00 2001 From: Martin Lippert Date: Fri, 13 Dec 2024 12:54:55 +0100 Subject: [PATCH] delta-based index cache: added compacting of cache store --- .../cache/IndexCacheOnDiscDeltaBased.java | 46 +++++++++++++--- .../test/IndexCacheOnDiscDeltaBasedTest.java | 52 +++++++++++++++++++ 2 files changed, 92 insertions(+), 6 deletions(-) diff --git a/headless-services/spring-boot-language-server/src/main/java/org/springframework/ide/vscode/boot/index/cache/IndexCacheOnDiscDeltaBased.java b/headless-services/spring-boot-language-server/src/main/java/org/springframework/ide/vscode/boot/index/cache/IndexCacheOnDiscDeltaBased.java index df06fb7d2a..fb94bc082e 100644 --- a/headless-services/spring-boot-language-server/src/main/java/org/springframework/ide/vscode/boot/index/cache/IndexCacheOnDiscDeltaBased.java +++ b/headless-services/spring-boot-language-server/src/main/java/org/springframework/ide/vscode/boot/index/cache/IndexCacheOnDiscDeltaBased.java @@ -74,6 +74,10 @@ public class IndexCacheOnDiscDeltaBased implements IndexCache { private final File cacheDirectory; private final Map> timestamps; + private final Map compactingCounter; + private final int compactingCounterBoundary; + + private static final int DEFAULT_COMPACTING_TRIGGER = 20; private static final Logger log = LoggerFactory.getLogger(IndexCacheOnDiscDeltaBased.class); @@ -89,6 +93,8 @@ public IndexCacheOnDiscDeltaBased(File cacheDirectory) { } this.timestamps = new ConcurrentHashMap<>(); + this.compactingCounter = new ConcurrentHashMap<>(); + this.compactingCounterBoundary = DEFAULT_COMPACTING_TRIGGER; } @Override @@ -116,6 +122,9 @@ public void store(IndexCacheKey cacheKey, String[] fi ConcurrentMap timestampMap = timestampedFiles.entrySet().stream() .collect(Collectors.toConcurrentMap(e -> InternalFileIdentifier.fromPath(e.getKey()), e -> e.getValue())); this.timestamps.put(cacheKey, timestampMap); + + this.compactingCounter.put(cacheKey, 0); + deleteOutdatedCacheFiles(cacheKey); } @SuppressWarnings("unchecked") @@ -124,7 +133,8 @@ public Pair> retrieve(I File cacheStore = new File(cacheDirectory, cacheKey.toString() + ".json"); if (cacheStore.exists()) { - IndexCacheStore store = retrieveStoreFromIncrementalStorage(cacheKey, type); + Pair, Integer> result = retrieveStoreFromIncrementalStorage(cacheKey, type); + IndexCacheStore store = result.getLeft(); SortedMap timestampedFiles = Arrays.stream(files) .filter(file -> new File(file).exists()) @@ -153,6 +163,8 @@ public Pair> retrieve(I ConcurrentMap timestampMap = timestampedFiles.entrySet().stream() .collect(Collectors.toConcurrentMap(e -> InternalFileIdentifier.fromPath(e.getKey()), e -> e.getValue())); this.timestamps.put(cacheKey, timestampMap); + this.compactingCounter.put(cacheKey, result.getRight()); + compact(cacheKey, type); return Pair.of( (T[]) symbols.toArray((T[]) Array.newInstance(type, symbols.size())), @@ -179,6 +191,9 @@ public void removeFiles(IndexCacheKey cacheKey, Strin timestampsMap.remove(InternalFileIdentifier.fromPath(file)); } } + + this.compactingCounter.merge(cacheKey, 1, Integer::sum); + compact(cacheKey, type); } @Override @@ -190,6 +205,7 @@ public void remove(IndexCacheKey cacheKey) { // update local timestamp cache this.timestamps.remove(cacheKey); + this.compactingCounter.remove(cacheKey); } @Override @@ -212,6 +228,8 @@ public void update(IndexCacheKey cacheKey, String fil // update local timestamp cache Map timestampsMap = this.timestamps.computeIfAbsent(cacheKey, (s) -> new ConcurrentHashMap<>()); timestampsMap.put(InternalFileIdentifier.fromPath(file), lastModified); + this.compactingCounter.merge(cacheKey, 1, Integer::sum); + compact(cacheKey, type); } @Override @@ -238,6 +256,8 @@ public void update(IndexCacheKey cacheKey, String[] f for (int i = 0; i < files.length; i++) { timestampsMap.put(InternalFileIdentifier.fromPath(files[i]), lastModified[i]); } + this.compactingCounter.merge(cacheKey, 1, Integer::sum); + compact(cacheKey, type); } @Override @@ -254,6 +274,10 @@ public long getModificationTimestamp(IndexCacheKey cacheKey, String file) { return 0; } + + public int getCompactingCounterBoundary() { + return compactingCounterBoundary; + } private boolean isFileMatch(SortedMap files1, SortedMap files2) { if (files1.size() != files2.size()) return false; @@ -265,8 +289,18 @@ private boolean isFileMatch(SortedMap files1, SortedMap void compact(IndexCacheKey cacheKey, Class type) { + if (this.compactingCounter.get(cacheKey) > this.compactingCounterBoundary) { + IndexCacheStore compactedData = retrieveStoreFromIncrementalStorage(cacheKey, type).getLeft(); + persist(cacheKey, new DeltaSnapshot(compactedData), false); + this.compactingCounter.put(cacheKey, 0); + + deleteOutdatedCacheFiles(cacheKey); + } + } - private void cleanupCache(IndexCacheKey cacheKey) { + private void deleteOutdatedCacheFiles(IndexCacheKey cacheKey) { File[] cacheFiles = this.cacheDirectory.listFiles(); for (int i = 0; i < cacheFiles.length; i++) { @@ -298,16 +332,15 @@ private void persist(IndexCacheKey cacheKey, DeltaEle gson.toJson(deltaStorage, writer); writer.write("\n"); - - cleanupCache(cacheKey); } catch (Exception e) { log.error("cannot write symbol cache", e); } } - private IndexCacheStore retrieveStoreFromIncrementalStorage(IndexCacheKey cacheKey, Class type) { + private Pair, Integer> retrieveStoreFromIncrementalStorage(IndexCacheKey cacheKey, Class type) { IndexCacheStore store = new IndexCacheStore<>(new TreeMap<>(), new ArrayList(), new HashMap<>(), type); + int deltaCounter = 0; File cacheStore = new File(cacheDirectory, cacheKey.toString() + ".json"); if (cacheStore.exists()) { @@ -319,6 +352,7 @@ private IndexCacheStore retrieveStoreFromIncrement while (reader.peek() != JsonToken.END_DOCUMENT) { DeltaStorage delta = gson.fromJson(reader, DeltaStorage.class); store = delta.storedElement.apply(store); + deltaCounter++; } } @@ -326,7 +360,7 @@ private IndexCacheStore retrieveStoreFromIncrement log.error("error reading cached symbols", e); } } - return store; + return Pair.of(store, deltaCounter); } diff --git a/headless-services/spring-boot-language-server/src/test/java/org/springframework/ide/vscode/boot/index/cache/test/IndexCacheOnDiscDeltaBasedTest.java b/headless-services/spring-boot-language-server/src/test/java/org/springframework/ide/vscode/boot/index/cache/test/IndexCacheOnDiscDeltaBasedTest.java index ab3a942bbb..d23183fe15 100644 --- a/headless-services/spring-boot-language-server/src/test/java/org/springframework/ide/vscode/boot/index/cache/test/IndexCacheOnDiscDeltaBasedTest.java +++ b/headless-services/spring-boot-language-server/src/test/java/org/springframework/ide/vscode/boot/index/cache/test/IndexCacheOnDiscDeltaBasedTest.java @@ -333,6 +333,58 @@ void testSymbolAddedToExistingFile() throws Exception { assertEquals(timeFile1.toMillis() + 2000, cache.getModificationTimestamp(CACHE_KEY_VERSION_1, file1.toString())); } + @Test + void testStorageFileIncrementallyUpdatedAndCompacted() throws Exception { + Path file1 = Paths.get(tempDir.toAbsolutePath().toString(), "tempFile1"); + Files.createFile(file1); + + FileTime timeFile1 = Files.getLastModifiedTime(file1); + String[] files = {file1.toAbsolutePath().toString()}; + + String doc1URI = UriUtil.toUri(file1.toFile()).toString(); + + List generatedSymbols1 = new ArrayList<>(); + WorkspaceSymbol symbol1 = new WorkspaceSymbol("symbol1", SymbolKind.Field, Either.forLeft(new Location("docURI", new Range(new Position(3, 10), new Position(3, 20))))); + EnhancedSymbolInformation enhancedSymbol1 = new EnhancedSymbolInformation(symbol1, null); + generatedSymbols1.add(new CachedSymbol(doc1URI, timeFile1.toMillis(), enhancedSymbol1)); + + cache.store(CACHE_KEY_VERSION_1, files, generatedSymbols1, null, CachedSymbol.class); + + Path path = tempDir.resolve(Paths.get(CACHE_KEY_VERSION_1.toString() + STORAGE_FILE_EXTENSION)); + long initialCacheStorageSize = Files.size(path); + long lastCacheStorageSize = initialCacheStorageSize; + + int compactingBoundary = cache.getCompactingCounterBoundary(); + + for (int i = 0; i < compactingBoundary; i++) { + cache.update(CACHE_KEY_VERSION_1, file1.toAbsolutePath().toString(), timeFile1.toMillis() + (100 * i), generatedSymbols1, null, CachedSymbol.class); + + // check storage size (to see if updates are stored incrementally + long updatedCacheStorageSize = Files.size(path); + assertTrue(updatedCacheStorageSize > lastCacheStorageSize, "cache storage size in iteration: " + i); + + lastCacheStorageSize = updatedCacheStorageSize; + + // check internal timestamp updates + long newModificationTimestamp = cache.getModificationTimestamp(CACHE_KEY_VERSION_1, file1.toString()); + assertEquals(timeFile1.toMillis() + (100 * i), newModificationTimestamp); + + } + + // test compacting after trigger boundary + cache.update(CACHE_KEY_VERSION_1, file1.toAbsolutePath().toString(), timeFile1.toMillis() + (100 * compactingBoundary), generatedSymbols1, null, CachedSymbol.class); + + // check storage size (to see if updates are stored incrementally + long updatedCacheStorageSize = Files.size(path); + assertTrue(updatedCacheStorageSize < lastCacheStorageSize, "cache storage size after compacting"); + + lastCacheStorageSize = updatedCacheStorageSize; + + // check internal timestamp updates + long newModificationTimestamp = cache.getModificationTimestamp(CACHE_KEY_VERSION_1, file1.toString()); + assertEquals(timeFile1.toMillis() + (100 * compactingBoundary), newModificationTimestamp); + } + @Test void testSymbolsAddedToMultipleFiles() throws Exception {