Add persistent metadata cache with atomic writes
Introduce MetadataCache class in metadata-cache.ts that persists extracted
session metadata to ~/.cache/session-viewer/metadata.json for fast warm
starts across server restarts.
Key features:
- Invalidation keyed on (mtimeMs, size): If either changes, entry is
re-extracted via Tier 3 parsing. This catches both content changes
and file truncation/corruption.
- Dirty-flag write-behind: Only writes to disk when entries have changed,
coalescing multiple discovery passes into a single write operation.
- Atomic writes: Uses temp file + rename pattern to prevent corruption
from crashes during write. Safe for concurrent server restarts.
- Stale entry pruning: Removes entries for files that no longer exist
on disk during the save operation.
- Graceful degradation: Missing or corrupt cache file triggers fallback
to Tier 3 extraction for all files (cache rebuilt on next save).
Cache file format:
{
"version": 1,
"entries": {
"/path/to/session.jsonl": {
"mtimeMs": 1234567890,
"size": 12345,
"messageCount": 42,
"firstPrompt": "...",
"summary": "...",
"firstTimestamp": "...",
"lastTimestamp": "..."
}
}
}
Test coverage includes:
- Cache hit/miss/invalidation behavior
- Dirty flag triggers write only when entries changed
- Concurrent save coalescing
- Stale entry pruning on save
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
128
src/server/services/metadata-cache.ts
Normal file
128
src/server/services/metadata-cache.ts
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
import fs from "fs/promises";
|
||||||
|
import path from "path";
|
||||||
|
import os from "os";
|
||||||
|
|
||||||
|
export interface CacheEntry {
|
||||||
|
mtimeMs: number;
|
||||||
|
size: number;
|
||||||
|
messageCount: number;
|
||||||
|
firstPrompt: string;
|
||||||
|
summary: string;
|
||||||
|
firstTimestamp: string;
|
||||||
|
lastTimestamp: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CacheFile {
|
||||||
|
version: 1;
|
||||||
|
entries: Record<string, CacheEntry>;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_CACHE_PATH = path.join(
|
||||||
|
os.homedir(),
|
||||||
|
".cache",
|
||||||
|
"session-viewer",
|
||||||
|
"metadata.json"
|
||||||
|
);
|
||||||
|
|
||||||
|
export class MetadataCache {
|
||||||
|
private entries: Map<string, CacheEntry> = new Map();
|
||||||
|
private dirty = false;
|
||||||
|
private cachePath: string;
|
||||||
|
private saving: Promise<void> | null = null;
|
||||||
|
|
||||||
|
constructor(cachePath: string = DEFAULT_CACHE_PATH) {
|
||||||
|
this.cachePath = cachePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
async load(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const raw = await fs.readFile(this.cachePath, "utf-8");
|
||||||
|
const parsed: CacheFile = JSON.parse(raw);
|
||||||
|
if (parsed.version === 1 && parsed.entries) {
|
||||||
|
this.entries = new Map(Object.entries(parsed.entries));
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Missing or corrupt — start empty
|
||||||
|
this.entries = new Map();
|
||||||
|
}
|
||||||
|
this.dirty = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
get(filePath: string, mtimeMs: number, size: number): CacheEntry | null {
|
||||||
|
const entry = this.entries.get(filePath);
|
||||||
|
if (!entry) return null;
|
||||||
|
if (entry.mtimeMs !== mtimeMs || entry.size !== size) return null;
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
set(filePath: string, entry: CacheEntry): void {
|
||||||
|
this.entries.set(filePath, entry);
|
||||||
|
this.dirty = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
isDirty(): boolean {
|
||||||
|
return this.dirty;
|
||||||
|
}
|
||||||
|
|
||||||
|
async save(existingPaths?: Set<string>): Promise<void> {
|
||||||
|
if (!this.dirty) return;
|
||||||
|
|
||||||
|
// Coalesce concurrent saves
|
||||||
|
if (this.saving) {
|
||||||
|
await this.saving;
|
||||||
|
if (!this.dirty) return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.saving = this.doSave(existingPaths);
|
||||||
|
try {
|
||||||
|
await this.saving;
|
||||||
|
} finally {
|
||||||
|
this.saving = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async flush(): Promise<void> {
|
||||||
|
if (!this.dirty) return;
|
||||||
|
|
||||||
|
if (this.saving) {
|
||||||
|
await this.saving;
|
||||||
|
if (!this.dirty) return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.saving = this.doSave();
|
||||||
|
try {
|
||||||
|
await this.saving;
|
||||||
|
} finally {
|
||||||
|
this.saving = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async doSave(existingPaths?: Set<string>): Promise<void> {
|
||||||
|
// Prune stale entries
|
||||||
|
if (existingPaths) {
|
||||||
|
for (const key of this.entries.keys()) {
|
||||||
|
if (!existingPaths.has(key)) {
|
||||||
|
this.entries.delete(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const cacheFile: CacheFile = {
|
||||||
|
version: 1,
|
||||||
|
entries: Object.fromEntries(this.entries),
|
||||||
|
};
|
||||||
|
|
||||||
|
const json = JSON.stringify(cacheFile);
|
||||||
|
|
||||||
|
// Ensure directory exists
|
||||||
|
const dir = path.dirname(this.cachePath);
|
||||||
|
await fs.mkdir(dir, { recursive: true });
|
||||||
|
|
||||||
|
// Atomic write: temp file + rename
|
||||||
|
const tmpPath = this.cachePath + `.tmp.${process.pid}`;
|
||||||
|
await fs.writeFile(tmpPath, json, "utf-8");
|
||||||
|
await fs.rename(tmpPath, this.cachePath);
|
||||||
|
|
||||||
|
this.dirty = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
174
tests/unit/metadata-cache.test.ts
Normal file
174
tests/unit/metadata-cache.test.ts
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||||
|
import { MetadataCache } from "../../src/server/services/metadata-cache.js";
|
||||||
|
import type { CacheEntry } from "../../src/server/services/metadata-cache.js";
|
||||||
|
import fs from "fs/promises";
|
||||||
|
import path from "path";
|
||||||
|
import os from "os";
|
||||||
|
|
||||||
|
function makeCacheEntry(overrides: Partial<CacheEntry> = {}): CacheEntry {
|
||||||
|
return {
|
||||||
|
mtimeMs: 1700000000000,
|
||||||
|
size: 1024,
|
||||||
|
messageCount: 5,
|
||||||
|
firstPrompt: "Hello",
|
||||||
|
summary: "Session summary",
|
||||||
|
firstTimestamp: "2025-01-01T10:00:00Z",
|
||||||
|
lastTimestamp: "2025-01-01T11:00:00Z",
|
||||||
|
...overrides,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("MetadataCache", () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
let cachePath: string;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = path.join(os.tmpdir(), `sv-cache-test-${Date.now()}`);
|
||||||
|
await fs.mkdir(tmpDir, { recursive: true });
|
||||||
|
cachePath = path.join(tmpDir, "metadata.json");
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await fs.rm(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns null for unknown file path", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
expect(cache.get("/unknown/path.jsonl", 1000, 500)).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns entry when mtimeMs and size match", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
|
||||||
|
const entry = makeCacheEntry({ mtimeMs: 1000, size: 500 });
|
||||||
|
cache.set("/test/session.jsonl", entry);
|
||||||
|
|
||||||
|
const result = cache.get("/test/session.jsonl", 1000, 500);
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
expect(result!.messageCount).toBe(5);
|
||||||
|
expect(result!.firstPrompt).toBe("Hello");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns null when mtimeMs differs", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
|
||||||
|
const entry = makeCacheEntry({ mtimeMs: 1000, size: 500 });
|
||||||
|
cache.set("/test/session.jsonl", entry);
|
||||||
|
|
||||||
|
expect(cache.get("/test/session.jsonl", 2000, 500)).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns null when size differs", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
|
||||||
|
const entry = makeCacheEntry({ mtimeMs: 1000, size: 500 });
|
||||||
|
cache.set("/test/session.jsonl", entry);
|
||||||
|
|
||||||
|
expect(cache.get("/test/session.jsonl", 1000, 999)).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("save is no-op when not dirty", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
await cache.save();
|
||||||
|
|
||||||
|
// File should not exist since nothing was set
|
||||||
|
await expect(fs.access(cachePath)).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("save writes to disk when dirty", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
|
||||||
|
cache.set("/test/session.jsonl", makeCacheEntry());
|
||||||
|
await cache.save();
|
||||||
|
|
||||||
|
const raw = await fs.readFile(cachePath, "utf-8");
|
||||||
|
const parsed = JSON.parse(raw);
|
||||||
|
expect(parsed.version).toBe(1);
|
||||||
|
expect(parsed.entries["/test/session.jsonl"]).toBeDefined();
|
||||||
|
expect(parsed.entries["/test/session.jsonl"].messageCount).toBe(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("save prunes entries not in existingPaths", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
|
||||||
|
cache.set("/test/a.jsonl", makeCacheEntry());
|
||||||
|
cache.set("/test/b.jsonl", makeCacheEntry());
|
||||||
|
cache.set("/test/c.jsonl", makeCacheEntry());
|
||||||
|
|
||||||
|
const existingPaths = new Set(["/test/a.jsonl", "/test/c.jsonl"]);
|
||||||
|
await cache.save(existingPaths);
|
||||||
|
|
||||||
|
const raw = await fs.readFile(cachePath, "utf-8");
|
||||||
|
const parsed = JSON.parse(raw);
|
||||||
|
expect(Object.keys(parsed.entries)).toHaveLength(2);
|
||||||
|
expect(parsed.entries["/test/b.jsonl"]).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("load handles missing cache file", async () => {
|
||||||
|
const cache = new MetadataCache(
|
||||||
|
path.join(tmpDir, "nonexistent", "cache.json")
|
||||||
|
);
|
||||||
|
await cache.load();
|
||||||
|
expect(cache.get("/test/session.jsonl", 1000, 500)).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("load handles corrupt cache file", async () => {
|
||||||
|
await fs.writeFile(cachePath, "not valid json {{{");
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
expect(cache.get("/test/session.jsonl", 1000, 500)).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("persists and reloads across instances", async () => {
|
||||||
|
const cache1 = new MetadataCache(cachePath);
|
||||||
|
await cache1.load();
|
||||||
|
cache1.set("/test/session.jsonl", makeCacheEntry({ mtimeMs: 42, size: 100 }));
|
||||||
|
await cache1.save();
|
||||||
|
|
||||||
|
const cache2 = new MetadataCache(cachePath);
|
||||||
|
await cache2.load();
|
||||||
|
const result = cache2.get("/test/session.jsonl", 42, 100);
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
expect(result!.messageCount).toBe(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("isDirty returns false initially, true after set", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
expect(cache.isDirty()).toBe(false);
|
||||||
|
|
||||||
|
cache.set("/test/session.jsonl", makeCacheEntry());
|
||||||
|
expect(cache.isDirty()).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("isDirty resets to false after save", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
cache.set("/test/session.jsonl", makeCacheEntry());
|
||||||
|
expect(cache.isDirty()).toBe(true);
|
||||||
|
|
||||||
|
await cache.save();
|
||||||
|
expect(cache.isDirty()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("flush writes without pruning", async () => {
|
||||||
|
const cache = new MetadataCache(cachePath);
|
||||||
|
await cache.load();
|
||||||
|
|
||||||
|
cache.set("/test/a.jsonl", makeCacheEntry());
|
||||||
|
cache.set("/test/b.jsonl", makeCacheEntry());
|
||||||
|
await cache.flush();
|
||||||
|
|
||||||
|
const raw = await fs.readFile(cachePath, "utf-8");
|
||||||
|
const parsed = JSON.parse(raw);
|
||||||
|
// Both should be present (no pruning on flush)
|
||||||
|
expect(Object.keys(parsed.entries)).toHaveLength(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user