Compare commits
8 Commits
b69dffc398
...
66f8cc3eb4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66f8cc3eb4 | ||
|
|
04343f6a9a | ||
|
|
007cbbcb69 | ||
|
|
b67c302464 | ||
|
|
8fddd50193 | ||
|
|
f15a1b1b58 | ||
|
|
eda20a9886 | ||
|
|
c20652924d |
11
.beads/.gitignore
vendored
Normal file
11
.beads/.gitignore
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
# Database
|
||||
*.db
|
||||
*.db-shm
|
||||
*.db-wal
|
||||
|
||||
# Lock files
|
||||
*.lock
|
||||
|
||||
# Temporary
|
||||
last-touched
|
||||
*.tmp
|
||||
4
.beads/config.yaml
Normal file
4
.beads/config.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
# Beads Project Configuration
|
||||
# issue_prefix: bd
|
||||
# default_priority: 2
|
||||
# default_type: task
|
||||
12
.beads/issues.jsonl
Normal file
12
.beads/issues.jsonl
Normal file
@@ -0,0 +1,12 @@
|
||||
{"id":"bd-10w","title":"CP3: Unit tests for Tier 1 index validation","description":"## Background\nTier 1 validation has specific edge cases around timestamp normalization and stale detection that must be tested independently from the full discovery pipeline.\n\n## Approach\nAdd tests to tests/unit/session-discovery.test.ts under describe \"Tier 1 index validation\". Tests create temp project directories with both .jsonl files and sessions-index.json, then verify Tier 1 behavior.\n\nTest cases:\n1. Uses index messageCount/summary/firstPrompt when index modified matches file mtime within 1s\n2. Rejects stale index entries when mtime differs by > 1s (falls through to Tier 3)\n3. Handles missing modified field in index entry (falls through to Tier 2/3)\n4. SessionEntry.created and .modified always from stat even when Tier 1 is trusted\n5. Missing sessions-index.json: all sessions still discovered via Tier 2/3\n6. Corrupt sessions-index.json (invalid JSON): warning logged, all sessions still discovered\n7. Legacy index format (raw array, no version wrapper): still parsed correctly\n\n## Acceptance Criteria\n- [ ] All 7 test cases pass\n- [ ] Tests use temp directories with controlled file timestamps\n- [ ] npm run test -- session-discovery passes\n\n## Files\n- MODIFY: tests/unit/session-discovery.test.ts (add tests in \"Tier 1 index validation\" describe block)\n\n## TDD Loop\nRED: Write tests (fail until bd-3g5 Tier 1 implementation is done)\nGREEN: Tests pass after Tier 1 implementation\nVERIFY: npm run test -- session-discovery\n\n## Edge Cases\n- Setting file mtime in tests: use fs.utimes() to control mtime precisely\n- Index with ISO string timestamps vs epoch ms: test both formats","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-04T17:48:25.641546Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:27:28.878028Z","closed_at":"2026-02-04T18:27:28.877984Z","close_reason":"Tests already written as part of bd-3g5: 6 tests covering Tier 1 hit, miss, no-modified-field, missing index, corrupt index, stat-derived timestamps.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-10w","depends_on_id":"bd-3g5","type":"blocks","created_at":"2026-02-04T17:49:30.660263Z","created_by":"tayloreernisse"}]}
|
||||
{"id":"bd-18a","title":"CP2: Implement MetadataCache with dirty-flag write-behind and atomic writes","description":"## Background\nThe persistent metadata cache at ~/.cache/session-viewer/metadata.json avoids re-parsing unchanged JSONL files across server restarts. This is Tier 2 in the tiered lookup — checked after Tier 1 (index) fails, before Tier 3 (full parse).\n\n## Approach\nCreate MetadataCache class in src/server/services/metadata-cache.ts:\n\n```typescript\ninterface CacheEntry {\n mtimeMs: number;\n size: number;\n messageCount: number;\n firstPrompt: string;\n summary: string;\n created: string; // ISO from file birthtime\n modified: string; // ISO from file mtime\n firstTimestamp: string;\n lastTimestamp: string;\n}\n\ninterface CacheFile {\n version: 1;\n entries: Record<string, CacheEntry>; // keyed by absolute file path\n}\n\nexport class MetadataCache {\n constructor(cachePath?: string) // default: ~/.cache/session-viewer/metadata.json\n\n async load(): Promise<void> // Load from disk, graceful on missing/corrupt\n get(filePath: string, mtimeMs: number, size: number): CacheEntry | null // Tier 2 lookup\n set(filePath: string, entry: CacheEntry): void // Mark dirty\n async save(existingPaths?: Set<string>): Promise<void> // Prune stale, atomic write if dirty\n async flush(): Promise<void> // Force write if dirty (for shutdown)\n}\n```\n\nKey behaviors:\n1. load(): Read + JSON.parse cache file. On corrupt/missing: start empty, no error.\n2. get(): Return entry only if mtimeMs AND size match. Otherwise return null (cache miss).\n3. set(): Store entry, set dirty flag to true.\n4. save(existingPaths): If dirty, prune entries whose keys are not in existingPaths, write to temp file then fs.rename (atomic). Reset dirty flag.\n5. flush(): Same as save() but without pruning. Called on shutdown.\n6. Shutdown hooks: Register process.on(\"SIGTERM\") and process.on(\"SIGINT\") handlers that call flush(). Register once in module scope or via an init function.\n\nWrite-behind strategy: discoverSessions() calls save() asynchronously after returning results. The Promise is fire-and-forget but errors are logged.\n\nIntegrate into discoverSessions() in session-discovery.ts:\n- Load cache once on first call (lazy init)\n- Before Tier 3 parse: check cache.get(filePath, stat.mtimeMs, stat.size)\n- After Tier 3 parse: call cache.set(filePath, extractedEntry)\n- After building all entries: fire-and-forget cache.save(discoveredPaths)\n\n## Acceptance Criteria\n- [ ] MetadataCache class exported from src/server/services/metadata-cache.ts\n- [ ] Cache hit returns entry when mtimeMs + size match\n- [ ] Cache miss (returns null) when mtimeMs or size differ\n- [ ] Dirty flag only set when set() is called (not on load/get)\n- [ ] save() is no-op when not dirty\n- [ ] Atomic writes: temp file + rename pattern\n- [ ] Corrupt cache file loads gracefully (empty cache, no throw)\n- [ ] Missing cache file loads gracefully (empty cache, no throw)\n- [ ] Stale entries pruned on save\n- [ ] Shutdown hooks registered for SIGTERM/SIGINT\n- [ ] Cache directory created if it does not exist (mkdir -p equivalent)\n- [ ] npm run test passes\n\n## Files\n- CREATE: src/server/services/metadata-cache.ts\n- MODIFY: src/server/services/session-discovery.ts (integrate cache into tiered lookup)\n\n## TDD Loop\nRED: tests/unit/metadata-cache.test.ts — tests:\n - \"returns null for unknown file path\"\n - \"returns entry when mtimeMs and size match\"\n - \"returns null when mtimeMs differs\"\n - \"returns null when size differs\"\n - \"save is no-op when not dirty\"\n - \"save writes to disk when dirty\"\n - \"save prunes entries not in existingPaths\"\n - \"load handles missing cache file\"\n - \"load handles corrupt cache file\"\n - \"atomic write: file not corrupted on crash\"\nGREEN: Implement MetadataCache\nVERIFY: npm run test -- metadata-cache\n\n## Edge Cases\n- Cache directory does not exist: create with fs.mkdir recursive\n- Cache file locked by another process: log warning, continue without cache\n- Server killed with SIGKILL (hard kill): cache may be lost — acceptable, rebuilt on next cold start\n- Concurrent save() calls: second save waits for first (or coalesce via dirty flag)\n- Very large cache (3000+ entries): JSON serialization should still be < 50ms","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-04T17:48:03.919559Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:25:16.365118Z","closed_at":"2026-02-04T18:25:16.365065Z","close_reason":"Implemented MetadataCache class with dirty-flag write-behind, atomic writes (temp+rename), prune stale entries, load/save/flush. Integrated into discoverSessions() as Tier 2 lookup. 13 unit tests.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-18a","depends_on_id":"bd-34v","type":"blocks","created_at":"2026-02-04T17:49:30.583939Z","created_by":"tayloreernisse"}]}
|
||||
{"id":"bd-1dn","title":"CP4: Bounded concurrency for stat and parse phases","description":"## Background\nCold start with 3,103 files requires bounded parallelism to avoid file-handle exhaustion and IO thrash. Without limits, Node.js will attempt thousands of concurrent fs.stat() and fs.readFile() calls, potentially hitting EMFILE errors.\n\n## Approach\nAdd a lightweight concurrency limiter. Options:\n- Install p-limit (npm i p-limit) — well-maintained, zero deps, 1.2KB\n- Or hand-roll a simple semaphore (~15 lines)\n\nRecommendation: p-limit for clarity and maintenance. It is ESM-only since v4, so use dynamic import or pin v3.x if the project uses CJS.\n\nImplementation in session-discovery.ts:\n\n```typescript\nimport pLimit from \"p-limit\";\n\nconst STAT_CONCURRENCY = 64;\nconst PARSE_CONCURRENCY = 8;\n\n// In discoverSessions(), per project:\nconst statLimit = pLimit(STAT_CONCURRENCY);\nconst parseLimit = pLimit(PARSE_CONCURRENCY);\n\n// Stat phase: batch all files\nconst statResults = await Promise.all(\n jsonlFiles.map(f => statLimit(() => safeStat(f)))\n);\n\n// Parse phase: only Tier 3 misses\nconst parseResults = await Promise.all(\n tier3Files.map(f => parseLimit(() => safeReadAndExtract(f)))\n);\n```\n\nsafeStat() wraps fs.stat in try/catch, returns null on ENOENT/EACCES (with debug log).\nsafeReadAndExtract() wraps fs.readFile + extractSessionMetadata, returns null on failure.\n\nPerformance targets:\n- Cold start (no cache, no index): < 5s for 3,103 files\n- Warm start (cache exists, few changes): < 1s\n- Incremental (cache + few new files): ~500ms + ~50ms per new file\n\n## Acceptance Criteria\n- [ ] p-limit (or equivalent) added as dependency\n- [ ] Stat phase uses concurrency limit of 64\n- [ ] Parse phase uses concurrency limit of 8\n- [ ] ENOENT and EACCES errors from stat silently handled (debug log, skip file)\n- [ ] Read errors silently handled (debug log, skip file)\n- [ ] No EMFILE errors on cold start with 3000+ files\n- [ ] Warm start < 1s verified on real dataset (manual verification step)\n- [ ] npm run test passes\n\n## Files\n- MODIFY: package.json (add p-limit dependency)\n- MODIFY: src/server/services/session-discovery.ts (wrap stat + parse in concurrency limiters)\n\n## TDD Loop\nRED: Manual performance test — time cold start on real ~/.claude/projects\nGREEN: Add concurrency limits, re-test\nVERIFY: npm run test && manual timing of warm/cold starts\n\n## Edge Cases\n- p-limit v4+ is ESM-only: check if project tsconfig uses \"module\": \"ESNext\" or \"NodeNext\". If CJS, use p-limit@3 or hand-roll.\n- Concurrency limits are per-project. With many small projects, total concurrency could still be high. Consider a global limiter shared across projects if needed.\n- Files actively being written during stat phase: mtime captured at stat time, content may differ at read time. Next discovery pass will re-extract (mtime changed).","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-04T17:48:36.609991Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:28:36.268754Z","closed_at":"2026-02-04T18:28:36.268402Z","close_reason":"Added mapWithLimit() concurrency limiter (32 concurrent ops per project) to prevent EMFILE on large session directories. Hand-rolled to avoid external dependency. No behavior change to existing tests.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-1dn","depends_on_id":"bd-34v","type":"blocks","created_at":"2026-02-04T17:49:30.683780Z","created_by":"tayloreernisse"}]}
|
||||
{"id":"bd-1ed","title":"CP1: Unit tests for filesystem-first discovery and metadata extraction","description":"## Background\nDiscovery correctness must be verified with tests covering the key scenarios from the PRD edge cases table. These tests validate the integration of filesystem scanning + metadata extraction.\n\n## Approach\nAdd tests to the existing test file, building on its temp directory pattern (uses os.tmpdir, writes .jsonl fixtures, cleans up).\n\nTest cases:\n1. Discovers all .jsonl files without sessions-index.json present\n2. SessionEntry.created from stat.birthtimeMs, .modified from stat.mtimeMs\n3. Duration computed from first/last JSONL timestamps (not index)\n4. Silently skips files that disappear between readdir and stat (create file, delete before stat mock)\n5. Empty .jsonl file returns messageCount: 0, session still appears in list\n6. extractSessionMetadata().messageCount === parseSessionContent().messages.length on fixture data\n7. Sessions sorted by modified descending\n8. Path traversal in filename rejected (symlink or \"..\" in name)\n9. Multiple project directories scanned and merged\n\n## Acceptance Criteria\n- [ ] All 9+ test cases pass\n- [ ] Tests use temp directories (not real ~/.claude/projects)\n- [ ] Cleanup runs even on test failure (afterEach or try/finally)\n- [ ] npm run test -- session-discovery passes\n\n## Files\n- MODIFY: tests/unit/session-discovery.test.ts (add describe block \"filesystem-first discovery\")\n\n## TDD Loop\nRED: Write tests (will fail until bd-34v discovery rewrite is done)\nGREEN: Tests pass after discovery loop rewrite\nVERIFY: npm run test -- session-discovery\n\n## Edge Cases\n- Test cleanup must handle partially-created temp dirs\n- stat.birthtimeMs may equal 0 on some filesystems (Linux ext4) — test should not hardcode platform-specific birthtimeMs behavior","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-04T17:47:52.189987Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:23:46.313213Z","closed_at":"2026-02-04T18:23:46.313152Z","close_reason":"Tests already written as part of bd-34v implementation. 9 tests cover all spec requirements: filesystem discovery, stat timestamps, JSONL duration, TOCTOU resilience, empty files, sorting, extension filtering, multi-project aggregation.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-1ed","depends_on_id":"bd-34v","type":"blocks","created_at":"2026-02-04T17:49:30.541764Z","created_by":"tayloreernisse"},{"issue_id":"bd-1ed","depends_on_id":"bd-3pr","type":"blocks","created_at":"2026-02-04T17:49:30.559261Z","created_by":"tayloreernisse"}]}
|
||||
{"id":"bd-1tm","title":"CP0: Extract countMessagesForLine() and classifyLine() shared helpers","description":"## Background\nThe PRD requires exact message counts — list counts must match detail-view counts. The existing extractMessages() function (session-parser.ts:78-233) has non-trivial expansion rules that must be encoded in a shared helper so both the metadata extractor and full parser produce identical counts.\n\n## Approach\nExtract two helpers from extractMessages() logic:\n\n```typescript\nexport type LineClassification =\n | \"user\" | \"assistant\" | \"progress\" | \"file-history-snapshot\"\n | \"summary\" | \"system\" | \"queue-operation\" | \"unknown\";\n\nexport function classifyLine(parsed: RawLine): LineClassification\nexport function countMessagesForLine(parsed: RawLine): number\n```\n\nExpansion rules countMessagesForLine must encode:\n- type=user, string content: 1\n- type=user, array content: count of (tool_result + text) blocks in array\n- user text block containing \"<system-reminder>\": still counts as 1 (reclassified as system_message)\n- type=assistant, string content: 1\n- type=assistant, array content: count of (thinking + text + tool_use) blocks\n- type=progress: 1\n- type=file-history-snapshot: 1\n- type=summary: 1\n- type=system: 0 (skipped)\n- type=queue-operation: 0 (skipped)\n- unknown/missing type: 0\n\nThen refactor extractMessages() to use classifyLine() for its initial type dispatch (the switch on raw.type around line 88). countMessagesForLine() can be validated against extractMessages() output.\n\n## Acceptance Criteria\n- [ ] classifyLine() and countMessagesForLine() exported from session-parser.ts\n- [ ] extractMessages() refactored to use classifyLine() internally\n- [ ] npm run test passes (no behavior change to existing parser)\n- [ ] countMessagesForLine() matches extractMessages(line).length for every message type\n\n## Files\n- MODIFY: src/server/services/session-parser.ts\n\n## TDD Loop\nRED: tests/unit/session-parser.test.ts — add tests:\n - \"countMessagesForLine matches extractMessages length for user string message\"\n - \"countMessagesForLine matches extractMessages length for user array with tool_result and text\"\n - \"countMessagesForLine matches extractMessages length for assistant array with thinking/text/tool_use\"\n - \"countMessagesForLine returns 1 for progress/file-history-snapshot/summary\"\n - \"countMessagesForLine returns 0 for system/queue-operation\"\n - \"classifyLine returns correct classification for each type\"\nGREEN: Implement classifyLine() + countMessagesForLine(), wire into extractMessages()\nVERIFY: npm run test -- session-parser\n\n## Edge Cases\n- User message with empty content array: returns 0 (no expandable blocks)\n- Assistant message with mixed block types (some unrecognized): only count known types\n- Missing type field: classify as unknown, count as 0\n- Null/undefined message.content: count as 0 (not 1)","status":"closed","priority":1,"issue_type":"task","created_at":"2026-02-04T17:47:13.654314Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:20:11.248274Z","closed_at":"2026-02-04T18:20:11.248229Z","close_reason":"Implemented classifyLine() and countMessagesForLine() helpers, refactored extractMessages() to use classifyLine(), added 13 unit tests including parity checks","compaction_level":0,"original_size":0}
|
||||
{"id":"bd-2bj","title":"CP2: Unit tests for MetadataCache","description":"## Background\nCache behavior must be verified to ensure correctness, performance, and graceful degradation. These tests validate MetadataCache in isolation using temp directories.\n\n## Approach\nCreate tests/unit/metadata-cache.test.ts. Use temp directories for cache file location. Test the MetadataCache class directly without involving the full discovery pipeline.\n\nTest cases:\n1. get() returns null for unknown file path\n2. get() returns entry when mtimeMs AND size match exactly\n3. get() returns null when mtimeMs differs (file modified)\n4. get() returns null when size differs (file modified)\n5. save() is no-op when cache is not dirty (verify file not written via stat check)\n6. save() writes to disk when dirty (verify file exists after save)\n7. save() prunes entries whose paths are not in existingPaths set\n8. load() handles missing cache file gracefully (no throw, empty state)\n9. load() handles corrupt JSON gracefully (no throw, empty state)\n10. Roundtrip: set entries, save, create new instance, load, get returns same entries\n11. Dirty flag reset after save (second save is no-op)\n\n## Acceptance Criteria\n- [ ] All 11 test cases pass\n- [ ] Tests use temp directories (not real ~/.cache)\n- [ ] Cleanup in afterEach\n- [ ] npm run test -- metadata-cache passes\n\n## Files\n- CREATE: tests/unit/metadata-cache.test.ts\n\n## TDD Loop\nRED: Write all test cases (fail until bd-18a MetadataCache is implemented)\nGREEN: Tests pass after MetadataCache implementation\nVERIFY: npm run test -- metadata-cache\n\n## Edge Cases\n- Temp dir cleanup must handle missing files (rm with force)\n- Tests should not depend on timing (no race-condition-sensitive assertions)","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-04T17:48:10.222765Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:25:41.487042Z","closed_at":"2026-02-04T18:25:41.486998Z","close_reason":"Tests already written as part of bd-18a implementation: 13 tests covering get/set, dirty flag, mtimeMs/size matching, prune, persistence, corrupt/missing file handling, flush.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-2bj","depends_on_id":"bd-18a","type":"blocks","created_at":"2026-02-04T17:49:30.610149Z","created_by":"tayloreernisse"}]}
|
||||
{"id":"bd-2jf","title":"CP0: Unit tests for parser parity (forEachJsonlLine, countMessagesForLine)","description":"## Background\nThe parser parity contract is the biggest correctness risk in this feature. These tests prove that the shared helpers produce identical results to the full parser, ensuring list-view counts can never drift from detail-view counts.\n\n## Approach\nAdd a new describe block in the existing test file. Tests should exercise every expansion rule using inline JSONL lines, plus test with the existing fixture files.\n\nTest cases:\n1. forEachJsonlLine skips malformed/truncated JSON lines (missing closing brace)\n2. forEachJsonlLine reports parseErrors count accurately\n3. forEachJsonlLine handles empty content string\n4. countMessagesForLine matches extractMessages().length for user string content\n5. countMessagesForLine matches extractMessages().length for user array with tool_result + text blocks\n6. countMessagesForLine matches extractMessages().length for user text with system-reminder (reclassified)\n7. countMessagesForLine matches extractMessages().length for assistant string content\n8. countMessagesForLine matches extractMessages().length for assistant array (thinking + text + tool_use)\n9. countMessagesForLine returns 1 for progress, file-history-snapshot, summary\n10. countMessagesForLine returns 0 for system, queue-operation\n11. classifyLine returns correct classification for each known type\n12. Integration: extractSessionMetadata().messageCount === parseSessionContent().messages.length on tests/fixtures/sample-session.jsonl\n13. Integration: same check on tests/fixtures/edge-cases.jsonl (has malformed lines)\n\n## Acceptance Criteria\n- [ ] All 13+ test cases pass\n- [ ] Tests cover every message type expansion rule\n- [ ] At least one test uses a malformed/truncated JSONL line\n- [ ] At least one test uses real fixture files for integration verification\n- [ ] npm run test -- session-parser passes\n\n## Files\n- MODIFY: tests/unit/session-parser.test.ts (add describe block \"parser parity: shared helpers\")\n\n## TDD Loop\nRED: Write all test cases first (they will fail until CP0 implementation beads are done)\nGREEN: Tests pass after bd-2og and bd-1tm are implemented\nVERIFY: npm run test -- session-parser\n\n## Edge Cases\n- Fixture files may change over time; tests should assert count equality (meta.messageCount === parsed.messages.length) not hardcoded numbers\n- Truncated JSON at end of file (crash mid-write) must be handled identically by both paths","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-04T17:47:22.070948Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:20:50.740098Z","closed_at":"2026-02-04T18:20:50.740034Z","close_reason":"Added 4 additional parity tests: system-reminder reclassification, truncated JSON handling, and 2 fixture-based integration tests proving countMessagesForLine sum equals parseSessionContent length","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-2jf","depends_on_id":"bd-1tm","type":"blocks","created_at":"2026-02-04T17:49:30.466873Z","created_by":"tayloreernisse"},{"issue_id":"bd-2jf","depends_on_id":"bd-2og","type":"blocks","created_at":"2026-02-04T17:49:30.447953Z","created_by":"tayloreernisse"}]}
|
||||
{"id":"bd-2og","title":"CP0: Extract forEachJsonlLine() shared line iterator from session-parser.ts","description":"## Background\nThe metadata extractor and full parser must iterate JSONL lines identically — same malformed-line handling, same error skipping. Currently this logic is inline in parseSessionContent() (session-parser.ts:59-70). Extracting it guarantees the parser parity contract from the PRD.\n\n## Approach\nExtract a shared function from the existing inline loop in parseSessionContent():\n\n```typescript\nexport interface RawLine {\n type?: string; uuid?: string; timestamp?: string;\n parentToolUseID?: string;\n message?: { role?: string; content?: string | ContentBlock[]; };\n data?: Record<string, unknown>;\n summary?: string; snapshot?: Record<string, unknown>;\n subtype?: string;\n}\n\nexport function forEachJsonlLine(\n content: string,\n onLine: (parsed: RawLine, lineIndex: number) => void\n): { parseErrors: number }\n```\n\nImplementation:\n1. Split content by newlines, filter empty/whitespace-only lines\n2. JSON.parse each line inside try/catch\n3. On parse failure: increment parseErrors counter, continue (skip line)\n4. On success: call onLine(parsed, lineIndex)\n5. Return { parseErrors }\n\nThen refactor parseSessionContent() to use forEachJsonlLine() internally — replacing its current inline loop (lines 59-70). No behavior change to parseSessionContent output.\n\n## Acceptance Criteria\n- [ ] forEachJsonlLine() exported from src/server/services/session-parser.ts\n- [ ] RawLine interface exported from src/server/services/session-parser.ts\n- [ ] parseSessionContent() refactored to use forEachJsonlLine() internally\n- [ ] npm run test passes (existing tests unchanged, proving no behavior change)\n- [ ] Malformed JSON lines skipped with parseErrors count incremented\n- [ ] Empty/whitespace-only lines skipped without incrementing parseErrors\n\n## Files\n- MODIFY: src/server/services/session-parser.ts (extract from lines 59-70, export new function + RawLine type)\n\n## TDD Loop\nRED: tests/unit/session-parser.test.ts — add tests:\n - \"forEachJsonlLine skips malformed JSON lines\"\n - \"forEachJsonlLine reports parseErrors count\"\n - \"forEachJsonlLine skips empty and whitespace-only lines\"\nGREEN: Extract forEachJsonlLine(), refactor parseSessionContent() to call it\nVERIFY: npm run test -- session-parser\n\n## Edge Cases\n- Truncated JSON from crash mid-write (common) — must skip, not throw\n- Lines with only whitespace or newlines — skip without error\n- Empty content string — returns { parseErrors: 0 }, onLine never called\n- Content with no trailing newline — last line still processed","status":"closed","priority":1,"issue_type":"task","created_at":"2026-02-04T17:47:00.597480Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:20:09.073002Z","closed_at":"2026-02-04T18:20:09.072954Z","close_reason":"Implemented forEachJsonlLine() with RawLine export, refactored parseSessionContent() to use it, added 5 unit tests","compaction_level":0,"original_size":0}
|
||||
{"id":"bd-34v","title":"CP1: Rewrite discovery loop — filesystem-first with tiered metadata","description":"## Background\nCurrent discovery in session-discovery.ts (114 lines) relies exclusively on sessions-index.json. This misses 17% of sessions. The rewrite makes the filesystem the primary source — every .jsonl file is a session, regardless of index state.\n\n## Approach\nRewrite discoverSessions() in session-discovery.ts:\n\n```typescript\nexport async function discoverSessions(\n projectsDir: string = CLAUDE_PROJECTS_DIR\n): Promise<SessionEntry[]>\n```\n\nNew flow per project directory:\n1. fs.readdir() to list all *.jsonl files (filter by .jsonl extension)\n2. Batch fs.stat() all files (initially unbounded; CP4 adds concurrency limits)\n3. Silently skip files that fail stat (ENOENT from TOCTOU race, EACCES) with debug log\n4. For each successfully statted file, get metadata via tiered lookup:\n - Tier 3 only in this checkpoint (Tier 1 and 2 added in CP3 and CP2)\n - Read file content, call extractSessionMetadata() from session-metadata.ts\n - Silently skip files that fail read (TOCTOU between stat and read)\n5. Build SessionEntry:\n - id: path.basename(file, \".jsonl\")\n - project: decoded project directory name\n - path: absolute path to .jsonl file\n - created: new Date(stat.birthtimeMs).toISOString()\n - modified: new Date(stat.mtimeMs).toISOString()\n - messageCount, firstPrompt, summary: from metadata\n - duration: computed from (lastTimestamp - firstTimestamp) in ms, or undefined\n6. Sort all entries by modified descending (stat-derived, never index-derived)\n\nSecurity validations (preserved from current implementation):\n- Reject paths containing \"..\" (traversal)\n- Reject non-.jsonl extensions\n- Reject absolute paths outside projectsDir (containment check)\n\nThe existing 30s in-memory cache in routes/sessions.ts and ?refresh=1 are NOT modified — they wrap discoverSessions() and continue working.\n\n## Acceptance Criteria\n- [ ] All .jsonl sessions appear regardless of whether sessions-index.json exists\n- [ ] SessionEntry.created and .modified always come from fs.stat\n- [ ] List is sorted by modified descending\n- [ ] TOCTOU: files disappearing between readdir/stat silently skipped\n- [ ] TOCTOU: files disappearing between stat/read silently skipped\n- [ ] Path traversal protection applied to filesystem-discovered files\n- [ ] Duration computed from JSONL timestamps (not index)\n- [ ] Existing route-level caching unmodified and working\n- [ ] npm run test passes\n\n## Files\n- MODIFY: src/server/services/session-discovery.ts (rewrite discoverSessions)\n- USES: src/server/services/session-metadata.ts (extractSessionMetadata)\n\n## TDD Loop\nRED: tests/unit/session-discovery.test.ts — add/update tests:\n - \"discovers sessions from .jsonl files without index\"\n - \"timestamps come from stat, not index\"\n - \"silently skips files deleted between readdir and stat\"\n - \"rejects path traversal in filenames\"\n - \"duration computed from JSONL timestamps\"\nGREEN: Rewrite discoverSessions()\nVERIFY: npm run test -- session-discovery\n\n## Edge Cases\n- Project directory with no .jsonl files: returns empty array for that project\n- Project directory that disappears during scan: silently skipped\n- .jsonl file with 0 bytes: extractSessionMetadata returns messageCount 0, session still listed\n- Very long project directory names (URL-encoded paths): handled by existing decoding logic\n- Concurrent discoverSessions() calls: no shared mutable state in this checkpoint (cache added in CP2)","status":"closed","priority":1,"issue_type":"task","created_at":"2026-02-04T17:47:44.866319Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:23:23.803724Z","closed_at":"2026-02-04T18:23:23.803676Z","close_reason":"Rewrote discoverSessions() to be filesystem-first: scans .jsonl files directly, uses extractSessionMetadata() for parser parity, timestamps from stat(), TOCTOU-safe. 9 tests covering discovery, stat-based timestamps, TOCTOU, aggregation, sorting, duration, empty files, extension filtering.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-34v","depends_on_id":"bd-3pr","type":"blocks","created_at":"2026-02-04T17:49:30.523977Z","created_by":"tayloreernisse"}]}
|
||||
{"id":"bd-3g5","title":"CP3: Implement Tier 1 index validation and fast path","description":"## Background\nsessions-index.json is unreliable but when valid, it saves parsing work. Tier 1 uses it as a fast-path optimization. The index format varies: modern ({ version: 1, entries: [...] }) or legacy (raw array). The existing parsing logic in session-discovery.ts already handles both formats.\n\n## Approach\nAdd Tier 1 lookup to discoverSessions() before Tier 2 (cache) and Tier 3 (parse):\n\n```typescript\ninterface IndexEntry {\n sessionId: string;\n summary?: string;\n firstPrompt?: string;\n created?: string;\n modified?: string;\n messageCount?: number;\n fullPath?: string;\n projectPath?: string;\n}\n```\n\nPer-project flow:\n1. Try to read and parse sessions-index.json into Map<sessionId, IndexEntry>\n - Handle both modern (version:1 wrapper) and legacy (raw array) formats\n - On missing file: continue silently (common case, 13 projects have none)\n - On corrupt JSON: log warning, continue with empty map\n2. For each .jsonl file, after stat:\n a. Derive sessionId: path.basename(file, \".jsonl\")\n b. Look up sessionId in index map\n c. If found AND entry.modified exists:\n - Compare new Date(entry.modified).getTime() vs stat.mtimeMs\n - If difference <= 1000ms: Tier 1 HIT — use entry.messageCount, entry.summary, entry.firstPrompt\n - If difference > 1000ms: Tier 1 MISS (stale) — fall through to Tier 2/3\n d. If found but no modified field: skip Tier 1, fall through\n e. If not found: skip Tier 1, fall through\n3. IMPORTANT: SessionEntry.created and .modified ALWAYS from stat, even on Tier 1 hit\n\n## Acceptance Criteria\n- [ ] Tier 1 used when index entry modified matches stat mtime within 1s tolerance\n- [ ] Tier 1 rejected when mtime mismatch > 1s\n- [ ] Tier 1 skipped when entry has no modified field\n- [ ] Missing sessions-index.json does not break discovery\n- [ ] Corrupt sessions-index.json does not break discovery (logged, skipped)\n- [ ] SessionEntry timestamps always from stat, never from index\n- [ ] Both modern and legacy index formats handled\n- [ ] npm run test passes\n\n## Files\n- MODIFY: src/server/services/session-discovery.ts (add Tier 1 logic before Tier 2/3)\n\n## TDD Loop\nRED: tests/unit/session-discovery.test.ts — add describe block \"Tier 1 index validation\":\n - \"uses index data when modified matches stat mtime within 1s\"\n - \"rejects index data when mtime mismatch > 1s\"\n - \"skips Tier 1 when entry has no modified field\"\n - \"handles missing sessions-index.json\"\n - \"handles corrupt sessions-index.json\"\n - \"timestamps always from stat even on Tier 1 hit\"\nGREEN: Implement Tier 1 logic\nVERIFY: npm run test -- session-discovery\n\n## Edge Cases\n- Index modified as ISO string vs numeric timestamp: normalize both via new Date().getTime()\n- Index with extra/unknown fields: ignored (only read known fields)\n- Multiple index entries with same sessionId: last one wins (Map.set overwrites)\n- Extremely old index (years stale): rejected by mtime check, no special handling needed","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-04T17:48:20.640825Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:27:21.126761Z","closed_at":"2026-02-04T18:27:21.126714Z","close_reason":"Implemented Tier 1 index validation fast path in discoverSessions(). Reads sessions-index.json per project, validates mtime within 1s tolerance, falls through to Tier 2/3 on miss. 6 new tests for hit/miss/no-modified/missing/corrupt/stat-timestamps.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-3g5","depends_on_id":"bd-34v","type":"blocks","created_at":"2026-02-04T17:49:30.633475Z","created_by":"tayloreernisse"}]}
|
||||
{"id":"bd-3pr","title":"CP1: Implement extractSessionMetadata() using shared helpers","description":"## Background\nThe lightweight metadata extractor reads JSONL and extracts only what the list view needs, without building full message content strings. It must use the shared helpers from CP0 to guarantee parser parity (list counts match detail counts).\n\n## Approach\nCreate in a new file src/server/services/session-metadata.ts:\n\n```typescript\nexport interface SessionMetadata {\n messageCount: number;\n firstPrompt: string; // first non-system-reminder user message, truncated to 200 chars\n summary: string; // summary field from last type=summary line\n firstTimestamp: string; // ISO from first JSONL line with timestamp field\n lastTimestamp: string; // ISO from last JSONL line with timestamp field\n parseErrors: number; // from forEachJsonlLine\n}\n\nexport function extractSessionMetadata(content: string): SessionMetadata\n```\n\nImplementation:\n1. Call forEachJsonlLine(content, onLine) from session-parser.ts\n2. In onLine callback:\n a. Accumulate messageCount via countMessagesForLine(parsed)\n b. Track firstTimestamp (first parsed.timestamp seen) and lastTimestamp (latest)\n c. For firstPrompt: first user message where content is string and does not contain \"<system-reminder>\", truncated to 200 chars\n d. For summary: overwrite on each type=summary line (keeps last)\n3. Return SessionMetadata with all fields\n\nNo string building, no JSON.stringify, no markdown processing. Just counting + timestamp capture + first-match extraction.\n\n## Acceptance Criteria\n- [ ] extractSessionMetadata() exported from src/server/services/session-metadata.ts\n- [ ] SessionMetadata interface exported\n- [ ] extractSessionMetadata(content).messageCount === parseSessionContent(content, id).messages.length on fixture files\n- [ ] firstPrompt skips system-reminder user messages\n- [ ] firstPrompt truncated to 200 chars\n- [ ] summary captures the LAST summary line (not first)\n- [ ] firstTimestamp and lastTimestamp correctly captured\n- [ ] Empty JSONL content returns messageCount: 0, empty strings for text fields\n\n## Files\n- CREATE: src/server/services/session-metadata.ts\n- MODIFY: src/server/services/session-parser.ts (ensure forEachJsonlLine, countMessagesForLine, RawLine are exported)\n\n## TDD Loop\nRED: tests/unit/session-metadata.test.ts — tests:\n - \"messageCount matches parseSessionContent on sample-session.jsonl\"\n - \"messageCount matches parseSessionContent on edge-cases.jsonl\"\n - \"firstPrompt skips system-reminder messages\"\n - \"firstPrompt truncated to 200 chars\"\n - \"summary captures last summary line\"\n - \"timestamps captured from first and last lines\"\n - \"empty content returns zero counts\"\nGREEN: Implement extractSessionMetadata()\nVERIFY: npm run test -- session-metadata\n\n## Edge Cases\n- JSONL with no user messages: firstPrompt is empty string\n- JSONL with no summary lines: summary is empty string\n- JSONL with no timestamps: firstTimestamp and lastTimestamp are empty strings\n- All user messages are system-reminders: firstPrompt is empty string\n- Single-line JSONL: firstTimestamp === lastTimestamp","status":"closed","priority":1,"issue_type":"task","created_at":"2026-02-04T17:47:32.534319Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:21:48.880124Z","closed_at":"2026-02-04T18:21:48.880075Z","close_reason":"Implemented extractSessionMetadata() in new file session-metadata.ts with 12 unit tests. Uses forEachJsonlLine/countMessagesForLine/classifyLine for parser parity. Fixture integration tests confirm messageCount matches parseSessionContent.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-3pr","depends_on_id":"bd-1tm","type":"blocks","created_at":"2026-02-04T17:49:30.505649Z","created_by":"tayloreernisse"},{"issue_id":"bd-3pr","depends_on_id":"bd-2og","type":"blocks","created_at":"2026-02-04T17:49:30.484195Z","created_by":"tayloreernisse"}]}
|
||||
{"id":"bd-sks","title":"Epic: JSONL-First Session Discovery","description":"## Background\nEpic tracking the JSONL-First Session Discovery feature. Claude Code sessions-index.json is unreliable (17% loss rate: 542 unindexed JSONL files). The .jsonl files are the source of truth; the index is an unreliable convenience cache.\n\n## Scope\n- CP0: Parser parity foundations (shared line iterator + counting helpers)\n- CP1: Filesystem-first discovery with tiered metadata lookup\n- CP2: Persistent metadata cache (~/.cache/session-viewer/metadata.json)\n- CP3: Tier 1 index validation fast path\n- CP4: Bounded concurrency for performance targets\n\n## Acceptance Criteria\n- [ ] All .jsonl sessions appear in session list regardless of index state\n- [ ] Message counts in list view match detail view exactly (parser parity)\n- [ ] Warm start < 1s, cold start < 5s\n- [ ] Existing 30s in-memory cache and ?refresh=1 continue working\n- [ ] Zero config, works out of the box\n\n## PRD Reference\ndocs/prd-jsonl-first-discovery.md","status":"closed","priority":1,"issue_type":"epic","created_at":"2026-02-04T17:46:50.724897Z","created_by":"tayloreernisse","updated_at":"2026-02-04T18:28:42.868428Z","closed_at":"2026-02-04T18:28:42.868380Z","close_reason":"All checkpoints complete: CP0 (parser parity helpers), CP1 (filesystem-first discovery + metadata extraction), CP2 (persistent MetadataCache), CP3 (Tier 1 index validation), CP4 (bounded concurrency). 297 tests passing.","compaction_level":0,"original_size":0}
|
||||
4
.beads/metadata.json
Normal file
4
.beads/metadata.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"database": "beads.db",
|
||||
"jsonl_export": "issues.jsonl"
|
||||
}
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -14,3 +14,6 @@ dist/
|
||||
*~
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# bv (beads viewer) local config and caches
|
||||
.bv/
|
||||
|
||||
313
docs/prd-jsonl-first-discovery.md
Normal file
313
docs/prd-jsonl-first-discovery.md
Normal file
@@ -0,0 +1,313 @@
|
||||
# PRD: JSONL-First Session Discovery
|
||||
|
||||
## Status: Ready for Implementation
|
||||
|
||||
## Context
|
||||
|
||||
Session viewer relies exclusively on `sessions-index.json` files that Claude Code maintains. These indexes are unreliable — a known, widespread bug with multiple open GitHub issues ([#22030](https://github.com/anthropics/claude-code/issues/22030), [#21610](https://github.com/anthropics/claude-code/issues/21610), [#18619](https://github.com/anthropics/claude-code/issues/18619), [#22114](https://github.com/anthropics/claude-code/issues/22114)).
|
||||
|
||||
### Root cause
|
||||
|
||||
Claude Code updates `sessions-index.json` only at session end. If a session crashes, is killed, or is abandoned, the JSONL file is written but the index is never updated. Multiple concurrent Claude instances can also corrupt the index (last-write-wins on a single JSON file). There is no reindex command and no background repair process.
|
||||
|
||||
### Impact on this system
|
||||
|
||||
- **542 unindexed JSONL files** across all projects (87 MB total)
|
||||
- **48 unindexed in last 7 days** (30.8 MB)
|
||||
- **13 projects** have JSONL session files but no index at all
|
||||
- **Zero sessions from today** (Feb 4, 2026) appear in any index
|
||||
- **3,103 total JSONL files** vs **2,563 indexed entries** = 17% loss rate
|
||||
|
||||
### Key insight
|
||||
|
||||
The `.jsonl` files are the source of truth. The index is an unreliable convenience cache. The session viewer must treat it that way.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Must have
|
||||
|
||||
1. **All sessions with a `.jsonl` file must appear in the session list**, regardless of whether they're in `sessions-index.json`
|
||||
2. **Exact message counts** — no estimates, no approximations. Contract: Tier 3 extraction MUST reuse the same line-classification logic as `parseSessionContent` (shared helper), so list counts cannot drift from detail parsing.
|
||||
3. **Performance**: Warm start (cache exists, few changes) must complete under 1 second. Cold start (no cache) is acceptable up to 5 seconds for first request
|
||||
4. **Correctness over speed** — never show stale metadata if the file has been modified
|
||||
5. **Zero config** — works out of the box with no setup or external dependencies
|
||||
|
||||
### Should have
|
||||
|
||||
6. Session `summary` extracted from the last `type="summary"` line in the JSONL
|
||||
7. Session `firstPrompt` extracted from the first non-system-reminder user message
|
||||
8. Session `duration` MUST be derivable without relying on `sessions-index.json` — extract first and last timestamps from JSONL when index is missing or stale
|
||||
9. Persistent metadata cache survives server restarts
|
||||
|
||||
### Won't have (this iteration)
|
||||
|
||||
- Real-time push updates (sessions appearing in UI without refresh)
|
||||
- Background file watcher daemon
|
||||
- Integration with `cass` as a search/indexing backend
|
||||
- Rebuilding Claude Code's `sessions-index.json`
|
||||
|
||||
## Technical Design
|
||||
|
||||
### Architecture: Filesystem-primary with tiered metadata lookup
|
||||
|
||||
```
|
||||
discoverSessions()
|
||||
|
|
||||
+-- For each project directory under ~/.claude/projects/:
|
||||
| |
|
||||
| +-- fs.readdir() --> list all *.jsonl files
|
||||
| +-- Read sessions-index.json (optional, used as pre-populated cache)
|
||||
| |
|
||||
| +-- Batch stat all .jsonl files (bounded concurrency)
|
||||
| | Files that disappeared between readdir and stat are silently skipped (TOCTOU race)
|
||||
| |
|
||||
| +-- For each .jsonl file:
|
||||
| | |
|
||||
| | +-- Tier 1: Check index
|
||||
| | | Entry exists AND normalize(index.modified) matches stat mtime?
|
||||
| | | --> Use index content data (messageCount, summary, firstPrompt)
|
||||
| | | --> Use stat-derived timestamps for created/modified (always)
|
||||
| | |
|
||||
| | +-- Tier 2: Check persistent metadata cache
|
||||
| | | path + mtimeMs + size match?
|
||||
| | | --> Use cached metadata (fast path)
|
||||
| | |
|
||||
| | +-- Tier 3: Extract metadata from JSONL content
|
||||
| | Read file, lightweight parse using shared line iterator + counting helper
|
||||
| | --> Cache result for future lookups
|
||||
| |
|
||||
| +-- Collect SessionEntry[] for this project
|
||||
|
|
||||
+-- Merge all projects
|
||||
+-- Sort by modified (descending) — always stat-derived, never index-derived
|
||||
+-- Async: persist metadata cache to disk (if dirty)
|
||||
```
|
||||
|
||||
### Tier explanation
|
||||
|
||||
| Tier | Source | Speed | When used | Trusts from source |
|
||||
|------|--------|-------|-----------|--------------------|
|
||||
| 1 | `sessions-index.json` | Instant (in-memory lookup) | Index exists, entry present, `normalize(modified)` matches actual file mtime | `messageCount`, `summary`, `firstPrompt` only. Timestamps always from stat. |
|
||||
| 2 | Persistent metadata cache | Instant (in-memory lookup) | Index missing/stale, but file hasn't changed since last extraction (mtimeMs + size match) | All cached fields |
|
||||
| 3 | JSONL file parse | ~5-50ms/file | New or modified file, not in any cache | Extracted fresh |
|
||||
|
||||
Tier 1 reuses Claude's index when it's valid — no wasted work. The index `modified` field (ISO string) is normalized to milliseconds and compared against the real file `stat.mtimeMs`. If the index is missing or corrupt, discovery continues with Tier 2 and 3 without error. Even when Tier 1 is valid, `created` and `modified` timestamps on the `SessionEntry` always come from `fs.stat` — the index is a content cache only.
|
||||
|
||||
### Tier 1: Index validation details
|
||||
|
||||
The actual `sessions-index.json` format has `created` and `modified` as ISO strings, not a `fileMtime` field. Tier 1 validation must:
|
||||
|
||||
1. Map JSONL filename to sessionId: `sessionId := path.basename(jsonlFile, '.jsonl')`
|
||||
2. Look up `sessionId` in the index `Map<string, IndexEntry>`
|
||||
3. Compare `new Date(entry.modified).getTime()` against `stat.mtimeMs` — reject if they differ by more than 1000ms (accounts for ISO string → filesystem mtime rounding)
|
||||
4. If the index entry has no `modified` field, skip Tier 1 (fall through to Tier 2)
|
||||
5. When Tier 1 is valid, trust only content fields (`messageCount`, `summary`, `firstPrompt`). The `created`/`modified` on the resulting `SessionEntry` must come from `stat.birthtimeMs`/`stat.mtimeMs` respectively — this ensures list ordering is never stale even within the 1s mtime tolerance window.
|
||||
|
||||
### Shared line-iteration and counting (parser parity contract)
|
||||
|
||||
The biggest correctness risk in this design is duplicating any JSONL processing logic. The real parser in `session-parser.ts` has non-trivial expansion rules:
|
||||
|
||||
- User array content: expands `tool_result` and `text` blocks into separate messages
|
||||
- `system-reminder` detection reclassifies user `text` blocks as `system_message`
|
||||
- Assistant array content: `thinking`, `text`, and `tool_use` each become separate messages
|
||||
- `progress`, `file-history-snapshot`, `summary` → 1 message each
|
||||
- `system`, `queue-operation` → 0 (skipped)
|
||||
|
||||
It also has error-handling behavior: malformed/truncated JSON lines are skipped (common when sessions crash mid-write). If the metadata extractor and the full parser handle malformed lines differently, counts will drift.
|
||||
|
||||
Rather than reimplementing any of these rules, extract shared helpers at two levels:
|
||||
|
||||
```typescript
|
||||
// In session-parser.ts (or a shared module):
|
||||
|
||||
// Level 1: Line iteration with consistent error handling
|
||||
// Splits content by newlines, JSON.parse each, skips malformed lines identically
|
||||
// to how parseSessionContent handles them. Returns parse error count for diagnostics.
|
||||
export function forEachJsonlLine(
|
||||
content: string,
|
||||
onLine: (parsed: RawLine, lineIndex: number) => void
|
||||
): { parseErrors: number }
|
||||
|
||||
// Level 2: Classification and counting (called per parsed line)
|
||||
export function countMessagesForLine(parsed: RawLine): number
|
||||
export function classifyLine(parsed: RawLine): LineClassification
|
||||
```
|
||||
|
||||
Both `extractSessionMetadata()` and `parseSessionContent()` use `forEachJsonlLine()` for iteration, ensuring identical malformed-line handling. Both use `countMessagesForLine()` for counting. This two-level sharing guarantees that list counts can never drift from detail-view counts, regardless of future parser changes or edge cases in error handling.
|
||||
|
||||
### Metadata extraction (Tier 3)
|
||||
|
||||
A lightweight `extractSessionMetadata()` function reads the JSONL file and extracts only what the list view needs, without building full message content strings:
|
||||
|
||||
```typescript
|
||||
export function extractSessionMetadata(content: string): SessionMetadata
|
||||
```
|
||||
|
||||
Implementation:
|
||||
|
||||
1. Iterate lines via `forEachJsonlLine(content, ...)` — the shared iterator with identical malformed-line handling as the main parser
|
||||
2. Call `countMessagesForLine(parsed)` per line — the shared helper that uses the **same classification rules** as `parseSessionContent` in `session-parser.ts`
|
||||
3. Extract `firstPrompt`: content of the first user message that isn't a `<system-reminder>`, truncated to 200 characters
|
||||
4. Extract `summary`: the `summary` field from the last `type="summary"` line
|
||||
5. Capture first and last `timestamp` fields for duration computation
|
||||
|
||||
No string building, no `JSON.stringify`, no markdown processing — just counting, timestamp capture, and first-match extraction. This is exact (matches `parseSessionContent().length` via shared helpers) but 2-3x faster than full parsing.
|
||||
|
||||
### Persistent metadata cache
|
||||
|
||||
**Location:** `~/.cache/session-viewer/metadata.json`
|
||||
|
||||
```typescript
|
||||
interface CacheFile {
|
||||
version: 1;
|
||||
entries: Record<string, { // keyed by absolute file path
|
||||
mtimeMs: number;
|
||||
size: number;
|
||||
messageCount: number;
|
||||
firstPrompt: string;
|
||||
summary: string;
|
||||
created: string; // ISO string from file birthtime
|
||||
modified: string; // ISO string from file mtime
|
||||
firstTimestamp: string; // ISO from first JSONL line with timestamp
|
||||
lastTimestamp: string; // ISO from last JSONL line with timestamp
|
||||
}>;
|
||||
}
|
||||
```
|
||||
|
||||
Behavior:
|
||||
- Loaded once on first `discoverSessions()` call
|
||||
- Entries validated by `(mtimeMs, size)` — if either changes, entry is re-extracted via Tier 3
|
||||
- Written to disk asynchronously using a dirty-flag write-behind strategy: only when cache has new/updated entries, coalescing multiple discovery passes, non-blocking
|
||||
- Flush any pending write on process exit (`SIGTERM`, `SIGINT`) and graceful server shutdown — prevents losing cache updates when the server stops before the async write fires
|
||||
- Corrupt or missing cache file triggers graceful fallback (all files go through Tier 3, cache rebuilt)
|
||||
- Atomic writes: write to temp file, then rename (prevents corruption from crashes during write)
|
||||
- Stale entries (file no longer exists on disk) are pruned on save
|
||||
|
||||
### Concurrency model
|
||||
|
||||
Cold start with 3,103 files requires bounded parallelism to avoid file-handle exhaustion and IO thrash, while still meeting the <5s target:
|
||||
|
||||
- **Stat phase**: Batch all `fs.stat()` calls with concurrency limit (e.g., 64). This classifies each file into Tier 1/2 (cache hit) or Tier 3 (needs parse). Files that fail stat (ENOENT from deletion race, EACCES) are silently skipped with a debug log.
|
||||
- **Parse phase**: Process Tier-3 misses with bounded concurrency (e.g., 8). Each parse reads + iterates via shared `forEachJsonlLine()` + shared counter. With max file size 4.5MB, each parse is ~5-50ms.
|
||||
- Use a simple async work queue (e.g., `p-limit` or hand-rolled semaphore). No worker threads needed for this IO-bound workload.
|
||||
|
||||
### Performance expectations
|
||||
|
||||
| Scenario | Estimated time |
|
||||
|----------|---------------|
|
||||
| Cold start (no cache, no index) | ~3-5s for 3,103 files (~500MB), bounded concurrency: stat@64, parse@8 |
|
||||
| Warm start (cache exists, few changes) | ~300-500ms (stat all files at bounded concurrency, in-memory lookups) |
|
||||
| Incremental (cache + few new sessions) | ~500ms + ~50ms per new file |
|
||||
| Subsequent API calls within 30s TTL | <1ms (in-memory session list cache) |
|
||||
|
||||
### Existing infrastructure leveraged
|
||||
|
||||
- **30-second in-memory cache** in `sessions.ts` (`getCachedSessions()`) — unchanged, provides the fast path for repeated API calls
|
||||
- **`?refresh=1` query parameter** — forces cache invalidation, unchanged
|
||||
- **Concurrent request deduplication** via `cachePromise` pattern — unchanged
|
||||
- **Security validations** — path traversal rejection, containment checks, `.jsonl` extension enforcement — applied to filesystem-discovered files identically
|
||||
|
||||
## Implementation scope
|
||||
|
||||
### Checkpoints
|
||||
|
||||
#### CP0 — Parser parity foundations
|
||||
- Extract `forEachJsonlLine()` shared line iterator from existing parser
|
||||
- Extract `countMessagesForLine()` and `classifyLine()` shared helpers
|
||||
- Refactor `extractMessages()` to use these internally (no behavior change to parseSessionContent)
|
||||
- Tests verify identical behavior on malformed/truncated lines
|
||||
|
||||
#### CP1 — Filesystem-first correctness
|
||||
- All `.jsonl` sessions appear even with missing/corrupt index
|
||||
- `extractSessionMetadata()` uses shared line iterator + counting helpers; exact counts verified by tests
|
||||
- Stat-derived `created`/`modified` are the single source for SessionEntry timestamps and list ordering
|
||||
- Duration computed from JSONL timestamps, not index
|
||||
- TOCTOU races (readdir/stat, stat/read) handled gracefully — disappeared files silently skipped
|
||||
|
||||
#### CP2 — Persistent cache
|
||||
- Atomic writes with dirty-flag write-behind; prune stale entries
|
||||
- Invalidation keyed on `(mtimeMs, size)`
|
||||
- Flush pending writes on process exit / server shutdown
|
||||
|
||||
#### CP3 — Index fast path (Tier 1)
|
||||
- Parse index into Map; normalize `modified` ISO → ms; validate against stat mtime with 1s tolerance
|
||||
- sessionId mapping: `basename(file, '.jsonl')`
|
||||
- Tier 1 trusts content fields only; timestamps always from stat
|
||||
|
||||
#### CP4 — Performance hardening
|
||||
- Bounded concurrency for stat + parse phases
|
||||
- Warm start <1s verified on real dataset
|
||||
|
||||
### Modified files
|
||||
|
||||
**`src/server/services/session-parser.ts`**
|
||||
|
||||
1. Extract `forEachJsonlLine(content, onLine): { parseErrors: number }` — shared line iterator with consistent malformed-line handling
|
||||
2. Extract `countMessagesForLine(parsed: RawLine): number` — shared counting helper
|
||||
3. Extract `classifyLine(parsed: RawLine): LineClassification` — shared classification
|
||||
4. Refactor `extractMessages()` to use these shared helpers internally (no behavior change to parseSessionContent)
|
||||
|
||||
**`src/server/services/session-discovery.ts`**
|
||||
|
||||
1. Add `extractSessionMetadata(content: string): SessionMetadata` — lightweight JSONL metadata extractor using shared line iterator + counting helper
|
||||
2. Add `MetadataCache` class — persistent cache with load/get/set/save, dirty-flag write-behind, shutdown flush
|
||||
3. Rewrite per-project discovery loop — filesystem-first, tiered metadata lookup with bounded concurrency
|
||||
4. Read `sessions-index.json` as optimization only — parse into `Map<sessionId, IndexEntry>`, normalize `modified` to ms, validate against stat mtime before trusting
|
||||
5. Register shutdown hooks for cache flush on `SIGTERM`/`SIGINT`
|
||||
|
||||
### Unchanged files
|
||||
|
||||
- `src/server/routes/sessions.ts` — existing caching layer works as-is
|
||||
- `src/shared/types.ts` — `SessionEntry` type already has `duration?: number`
|
||||
- All client components — no changes needed
|
||||
|
||||
### New tests
|
||||
|
||||
- Unit test: `forEachJsonlLine()` skips malformed lines identically to how `parseSessionContent` handles them
|
||||
- Unit test: `forEachJsonlLine()` reports parse error count for truncated/corrupted lines
|
||||
- Unit test: `countMessagesForLine()` matches actual `extractMessages()` output length on sample lines
|
||||
- Unit test: `extractSessionMetadata()` output matches `parseSessionContent().length` on sample fixtures (including malformed/truncated lines)
|
||||
- Unit test: Duration extracted from JSONL timestamps matches expected values
|
||||
- Unit test: SessionEntry `created`/`modified` always come from stat, even when Tier 1 index data is trusted
|
||||
- Unit test: Tier 1 validation rejects stale index entries (mtime mismatch beyond 1s tolerance)
|
||||
- Unit test: Tier 1 handles missing `modified` field gracefully (falls through to Tier 2)
|
||||
- Unit test: Discovery works with no `sessions-index.json` present
|
||||
- Unit test: Discovery silently skips files that disappear between readdir and stat (TOCTOU)
|
||||
- Unit test: Cache hit/miss/invalidation behavior (mtimeMs + size)
|
||||
- Unit test: Cache dirty-flag only triggers write when entries changed
|
||||
|
||||
## Edge cases
|
||||
|
||||
| Scenario | Behavior |
|
||||
|----------|----------|
|
||||
| File actively being written | mtime changes between stat and read. Next discovery pass re-extracts. Partial JSONL handled gracefully (malformed lines skipped via shared `forEachJsonlLine`, same behavior as real parser). |
|
||||
| Deleted session files | File in cache but gone from disk. Entry silently dropped, pruned from cache on next save. |
|
||||
| File disappears between readdir and stat | TOCTOU race. Stat failure (ENOENT) silently skipped with debug log. |
|
||||
| File disappears between stat and read | Read failure silently skipped; file excluded from results. Next pass re-discovers if it reappears. |
|
||||
| Index entry with wrong mtime | Tier 1 validation rejects it (>1s tolerance). Falls through to Tier 2/3. |
|
||||
| Index entry with no `modified` field | Tier 1 skips it. Falls through to Tier 2/3. |
|
||||
| Index `modified` in seconds vs milliseconds | Normalization handles both ISO strings and numeric timestamps. |
|
||||
| Cache file locked or unwritable | Extraction still works, just doesn't persist. Warning logged to stderr. |
|
||||
| Very large files | 4.5MB max observed. Tier 3 parse ~50ms. Acceptable. |
|
||||
| Concurrent server restarts | Cache writes are atomic (temp file + rename). |
|
||||
| Server killed before async cache write | Shutdown hooks flush pending writes on SIGTERM/SIGINT. Hard kills (SIGKILL) may lose updates — acceptable, cache rebuilt on next cold start. |
|
||||
| Empty JSONL files | Returns `messageCount: 0`, empty `firstPrompt`, `summary`, and timestamps. Duration: 0. |
|
||||
| Projects with no index file | Discovery proceeds normally via Tier 2/3. Common case (13 projects). |
|
||||
| Non-JSONL files in project dirs | Filtered out by `.jsonl` extension check in `readdir` results. |
|
||||
| File handle exhaustion | Bounded concurrency (stat@64, parse@8) prevents opening thousands of handles. |
|
||||
| Future parser changes (new message types) | Shared line iterator + counting helper in session-parser.ts means Tier 3 automatically stays in sync. |
|
||||
| Malformed JSONL lines (crash mid-write) | Shared `forEachJsonlLine()` skips identically in both metadata extraction and full parsing — no count drift. |
|
||||
|
||||
## Verification plan
|
||||
|
||||
1. Start dev server, confirm today's sessions appear immediately in the session list
|
||||
2. Compare message counts for indexed sessions: Tier 1 data vs Tier 3 extraction (should match)
|
||||
3. Verify duration is shown for sessions that have no index entry (JSONL-only sessions)
|
||||
4. Delete a `sessions-index.json`, refresh — verify all sessions for that project still appear with correct counts and durations
|
||||
5. Run existing test suite: `npm test`
|
||||
6. Run new unit tests for shared line iterator, counting helper, `extractSessionMetadata()`, and `MetadataCache`
|
||||
7. Verify `created`/`modified` in session list come from stat, not index (compare with `ls -l` output)
|
||||
8. Verify cold start performance: delete `~/.cache/session-viewer/metadata.json`, time the first API request
|
||||
9. Verify warm start performance: time a subsequent server start with cache in place
|
||||
10. Verify cache dirty-flag: repeated refreshes with no file changes should not write cache to disk
|
||||
11. Kill server with SIGTERM, restart — verify cache was flushed (no full re-parse on restart)
|
||||
@@ -6,10 +6,54 @@ import { SearchBar } from "./components/SearchBar";
|
||||
import { SearchMinimap } from "./components/SearchMinimap";
|
||||
import { ExportButton } from "./components/ExportButton";
|
||||
import { ErrorBoundary } from "./components/ErrorBoundary";
|
||||
import { Menu, LayoutRows } from "./components/Icons";
|
||||
import { useSession } from "./hooks/useSession";
|
||||
import { useFilters } from "./hooks/useFilters";
|
||||
import { countSensitiveMessages } from "../shared/sensitive-redactor";
|
||||
import type { SessionEntry } from "./lib/types";
|
||||
|
||||
type Density = "comfortable" | "compact";
|
||||
|
||||
function useDensity(): [Density, (d: Density) => void] {
|
||||
const [density, setDensityState] = useState<Density>(() => {
|
||||
try {
|
||||
const stored = localStorage.getItem("session-viewer-density");
|
||||
if (stored === "compact" || stored === "comfortable") return stored;
|
||||
} catch { /* localStorage unavailable */ }
|
||||
return "comfortable";
|
||||
});
|
||||
|
||||
const setDensity = useCallback((d: Density) => {
|
||||
setDensityState(d);
|
||||
try { localStorage.setItem("session-viewer-density", d); } catch { /* noop */ }
|
||||
}, []);
|
||||
|
||||
return [density, setDensity];
|
||||
}
|
||||
|
||||
function NavSessionInfo({ sessionId, project, sessions }: {
|
||||
sessionId: string;
|
||||
project: string;
|
||||
sessions: SessionEntry[];
|
||||
}): React.ReactElement {
|
||||
const entry = sessions.find((s) => s.id === sessionId);
|
||||
const title = entry?.summary || entry?.firstPrompt || "Session";
|
||||
return (
|
||||
<div className="flex items-center gap-1.5 min-w-0 overflow-hidden">
|
||||
{project && (
|
||||
<span className="text-caption text-foreground-muted whitespace-nowrap truncate max-w-[200px]" title={project}>
|
||||
{project}
|
||||
</span>
|
||||
)}
|
||||
{project && (
|
||||
<span className="text-foreground-muted opacity-40 flex-shrink-0">/</span>
|
||||
)}
|
||||
<span className="text-body font-medium text-foreground truncate">
|
||||
{title}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export function App() {
|
||||
const {
|
||||
@@ -22,6 +66,16 @@ export function App() {
|
||||
} = useSession();
|
||||
|
||||
const filters = useFilters();
|
||||
const [density, setDensity] = useDensity();
|
||||
const [sidebarOpen, setSidebarOpen] = useState(true);
|
||||
|
||||
// Close sidebar on mobile after selecting a session
|
||||
const handleSelectSession = useCallback((id: string) => {
|
||||
loadSession(id);
|
||||
if (window.innerWidth < 768) {
|
||||
setSidebarOpen(false);
|
||||
}
|
||||
}, [loadSession]);
|
||||
|
||||
// URL-driven session selection: sync session ID with URL search params
|
||||
const hasRestoredFromUrl = useRef(false);
|
||||
@@ -57,7 +111,6 @@ export function App() {
|
||||
const progressEnabled = filters.enabledCategories.has("hook_progress");
|
||||
|
||||
// Count across all session messages (not just filtered) — recompute only on session change.
|
||||
// This avoids re-running 37 regex patterns whenever filter toggles change.
|
||||
const sensitiveCount = useMemo(
|
||||
() => countSensitiveMessages(currentSession?.messages || []),
|
||||
[currentSession?.messages]
|
||||
@@ -182,10 +235,28 @@ export function App() {
|
||||
updateViewport();
|
||||
}, [filteredMessages, updateViewport]);
|
||||
|
||||
const isCompact = density === "compact";
|
||||
|
||||
return (
|
||||
<div className="flex h-screen" style={{ background: "var(--color-canvas)" }}>
|
||||
{/* Sidebar backdrop — visible on mobile when sidebar is open */}
|
||||
{sidebarOpen && (
|
||||
<div
|
||||
className="fixed inset-0 z-20 bg-black/50 md:hidden"
|
||||
onClick={() => setSidebarOpen(false)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Sidebar */}
|
||||
<div className="w-80 flex-shrink-0 border-r border-border bg-surface-raised flex flex-col">
|
||||
<div
|
||||
className={`
|
||||
flex-shrink-0 border-r border-border bg-surface-raised flex flex-col
|
||||
fixed inset-y-0 left-0 z-30 w-80
|
||||
transform transition-transform duration-200 ease-out
|
||||
md:relative md:translate-x-0
|
||||
${sidebarOpen ? "translate-x-0" : "-translate-x-full"}
|
||||
`}
|
||||
>
|
||||
<div className="px-5 py-4 border-b border-border" style={{ background: "linear-gradient(180deg, var(--color-surface-overlay) 0%, var(--color-surface-raised) 100%)" }}>
|
||||
<div className="flex items-center justify-between">
|
||||
<h1 className="text-heading font-semibold text-foreground tracking-tight">
|
||||
@@ -214,9 +285,10 @@ export function App() {
|
||||
sessions={sessions}
|
||||
loading={sessionsLoading}
|
||||
selectedId={currentSession?.id}
|
||||
onSelect={loadSession}
|
||||
onSelect={handleSelectSession}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-shrink-0">
|
||||
<FilterPanel
|
||||
enabledCategories={filters.enabledCategories}
|
||||
onToggle={filters.toggleCategory}
|
||||
@@ -225,12 +297,32 @@ export function App() {
|
||||
sensitiveCount={sensitiveCount}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Main */}
|
||||
<div className="flex-1 flex flex-col min-w-0">
|
||||
<div className="glass flex items-center px-5 py-4 border-b border-border z-10">
|
||||
{/* Left spacer — mirrors right side width to keep search centered */}
|
||||
<div className="flex-1 min-w-0" />
|
||||
<div className="glass flex items-center px-5 py-3 border-b border-border z-10 gap-3">
|
||||
{/* Mobile sidebar toggle */}
|
||||
<button
|
||||
onClick={() => setSidebarOpen(!sidebarOpen)}
|
||||
className="flex items-center justify-center w-8 h-8 rounded-md text-foreground-muted hover:text-foreground hover:bg-surface-overlay/60 transition-colors md:hidden flex-shrink-0"
|
||||
aria-label="Toggle sidebar"
|
||||
>
|
||||
<Menu />
|
||||
</button>
|
||||
|
||||
{/* Left — session info or app title */}
|
||||
<div className="flex-1 min-w-0">
|
||||
{currentSession ? (
|
||||
<NavSessionInfo
|
||||
sessionId={currentSession.id}
|
||||
project={currentSession.project}
|
||||
sessions={sessions}
|
||||
/>
|
||||
) : (
|
||||
<span className="text-body font-medium text-foreground-secondary hidden md:block">Session Viewer</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Center — search bar + contextual redaction controls */}
|
||||
<div className="flex items-center gap-3 flex-shrink-0">
|
||||
@@ -263,8 +355,19 @@ export function App() {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Right — export button, right-justified */}
|
||||
<div className="flex-1 min-w-0 flex justify-end">
|
||||
{/* Right — density toggle + export button */}
|
||||
<div className="flex-1 min-w-0 flex items-center justify-end gap-2">
|
||||
<button
|
||||
onClick={() => setDensity(isCompact ? "comfortable" : "compact")}
|
||||
className={`flex items-center justify-center w-8 h-8 rounded-md transition-colors flex-shrink-0 ${
|
||||
isCompact
|
||||
? "text-accent bg-accent-light"
|
||||
: "text-foreground-muted hover:text-foreground hover:bg-surface-overlay/60"
|
||||
}`}
|
||||
title={isCompact ? "Switch to comfortable density" : "Switch to compact density"}
|
||||
>
|
||||
<LayoutRows size="w-4 h-4" />
|
||||
</button>
|
||||
{currentSession && (
|
||||
<ExportButton
|
||||
session={currentSession}
|
||||
@@ -291,6 +394,9 @@ export function App() {
|
||||
focusedIndex={activeFocusIndex}
|
||||
toolProgress={currentSession?.toolProgress}
|
||||
progressEnabled={progressEnabled}
|
||||
sessionId={currentSession?.id}
|
||||
project={currentSession?.project}
|
||||
compact={isCompact}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
|
||||
116
src/client/components/Icons.tsx
Normal file
116
src/client/components/Icons.tsx
Normal file
@@ -0,0 +1,116 @@
|
||||
import React from "react";
|
||||
|
||||
interface IconProps {
|
||||
size?: string;
|
||||
strokeWidth?: number;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
const defaults = { size: "w-4 h-4", strokeWidth: 1.5 };
|
||||
|
||||
function icon(
|
||||
d: string | string[],
|
||||
defaultStrokeWidth = defaults.strokeWidth
|
||||
): React.FC<IconProps> {
|
||||
const paths = Array.isArray(d) ? d : [d];
|
||||
return function Icon({
|
||||
size = defaults.size,
|
||||
strokeWidth = defaultStrokeWidth,
|
||||
className = "",
|
||||
}: IconProps) {
|
||||
return (
|
||||
<svg
|
||||
className={`${size} ${className}`}
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
strokeWidth={strokeWidth}
|
||||
>
|
||||
{paths.map((p, i) => (
|
||||
<path key={i} strokeLinecap="round" strokeLinejoin="round" d={p} />
|
||||
))}
|
||||
</svg>
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
export const ChevronRight = icon(
|
||||
"M8.25 4.5l7.5 7.5-7.5 7.5",
|
||||
2
|
||||
);
|
||||
export const ChevronLeft = icon(
|
||||
"M15.75 19.5L8.25 12l7.5-7.5",
|
||||
2
|
||||
);
|
||||
export const ChevronDown = icon(
|
||||
"M19.5 8.25l-7.5 7.5-7.5-7.5",
|
||||
2
|
||||
);
|
||||
export const ChevronUp = icon(
|
||||
"M4.5 15.75l7.5-7.5 7.5 7.5",
|
||||
2
|
||||
);
|
||||
export const Search = icon(
|
||||
"M21 21l-5.197-5.197m0 0A7.5 7.5 0 105.196 5.196a7.5 7.5 0 0010.607 10.607z",
|
||||
2
|
||||
);
|
||||
export const X = icon(
|
||||
"M6 18L18 6M6 6l12 12",
|
||||
2
|
||||
);
|
||||
export const Copy = icon(
|
||||
"M15.75 17.25v3.375c0 .621-.504 1.125-1.125 1.125h-9.75a1.125 1.125 0 01-1.125-1.125V7.875c0-.621.504-1.125 1.125-1.125H6.75a9.06 9.06 0 011.5.124m7.5 10.376h3.375c.621 0 1.125-.504 1.125-1.125V11.25c0-4.46-3.243-8.161-7.5-8.876a9.06 9.06 0 00-1.5-.124H9.375c-.621 0-1.125.504-1.125 1.125v3.5m7.5 10.375H9.375a1.125 1.125 0 01-1.125-1.125v-9.25m0 0a2.625 2.625 0 115.25 0H12m-3.75 0h3.75"
|
||||
);
|
||||
export const Check = icon(
|
||||
"M4.5 12.75l6 6 9-13.5",
|
||||
2
|
||||
);
|
||||
export const Refresh = icon(
|
||||
[
|
||||
"M16.023 9.348h4.992v-.001M2.985 19.644v-4.992m0 0h4.992m-4.993 0l3.181 3.183a8.25 8.25 0 0013.803-3.7M4.031 9.865a8.25 8.25 0 0113.803-3.7l3.181 3.182",
|
||||
]
|
||||
);
|
||||
export const EyeSlash = icon(
|
||||
"M3.98 8.223A10.477 10.477 0 001.934 12C3.226 16.338 7.244 19.5 12 19.5c.993 0 1.953-.138 2.863-.395M6.228 6.228A10.45 10.45 0 0112 4.5c4.756 0 8.773 3.162 10.065 7.498a10.523 10.523 0 01-4.293 5.774M6.228 6.228L3 3m3.228 3.228l3.65 3.65m7.894 7.894L21 21m-3.228-3.228l-3.65-3.65m0 0a3 3 0 10-4.243-4.243m4.242 4.242L9.88 9.88"
|
||||
);
|
||||
export const Shield = icon(
|
||||
"M9 12.75L11.25 15 15 9.75m-3-7.036A11.959 11.959 0 013.598 6 11.99 11.99 0 003 9.749c0 5.592 3.824 10.29 9 11.623 5.176-1.332 9-6.03 9-11.622 0-1.31-.21-2.571-.598-3.751h-.152c-3.196 0-6.1-1.248-8.25-3.285z",
|
||||
2
|
||||
);
|
||||
export const Filter = icon(
|
||||
"M12 3c2.755 0 5.455.232 8.083.678.533.09.917.556.917 1.096v1.044a2.25 2.25 0 01-.659 1.591l-5.432 5.432a2.25 2.25 0 00-.659 1.591v2.927a2.25 2.25 0 01-1.244 2.013L9.75 21v-6.568a2.25 2.25 0 00-.659-1.591L3.659 7.409A2.25 2.25 0 013 5.818V4.774c0-.54.384-1.006.917-1.096A48.32 48.32 0 0112 3z"
|
||||
);
|
||||
export const Chat = icon(
|
||||
"M7.5 8.25h9m-9 3H12m-9.75 1.51c0 1.6 1.123 2.994 2.707 3.227 1.129.166 2.27.293 3.423.379.35.026.67.21.865.501L12 21l2.755-4.133a1.14 1.14 0 01.865-.501 48.172 48.172 0 003.423-.379c1.584-.233 2.707-1.626 2.707-3.228V6.741c0-1.602-1.123-2.995-2.707-3.228A48.394 48.394 0 0012 3c-2.392 0-4.744.175-7.043.513C3.373 3.746 2.25 5.14 2.25 6.741v6.018z"
|
||||
);
|
||||
export const Download = icon(
|
||||
"M3 16.5v2.25A2.25 2.25 0 005.25 21h13.5A2.25 2.25 0 0021 18.75V16.5M16.5 12L12 16.5m0 0L7.5 12m4.5 4.5V3",
|
||||
2
|
||||
);
|
||||
export const AlertCircle = icon(
|
||||
"M12 9v3.75m9-.75a9 9 0 11-18 0 9 9 0 0118 0zm-9 3.75h.008v.008H12v-.008z",
|
||||
2
|
||||
);
|
||||
export const Clipboard = icon(
|
||||
"M15.666 3.888A2.25 2.25 0 0013.5 2.25h-3c-1.03 0-1.9.693-2.166 1.638m7.332 0c.055.194.084.4.084.612v0a.75.75 0 01-.75.75H9.75a.75.75 0 01-.75-.75v0c0-.212.03-.418.084-.612m7.332 0c.646.049 1.288.11 1.927.184 1.1.128 1.907 1.077 1.907 2.185V19.5a2.25 2.25 0 01-2.25 2.25H6.75A2.25 2.25 0 014.5 19.5V6.257c0-1.108.806-2.057 1.907-2.185a48.208 48.208 0 011.927-.184"
|
||||
);
|
||||
export const ChatBubble = icon(
|
||||
"M20.25 8.511c.884.284 1.5 1.128 1.5 2.097v4.286c0 1.136-.847 2.1-1.98 2.193-.34.027-.68.052-1.02.072v3.091l-3-3c-1.354 0-2.694-.055-4.02-.163a2.115 2.115 0 01-.825-.242m9.345-8.334a2.126 2.126 0 00-.476-.095 48.64 48.64 0 00-8.048 0c-1.131.094-1.976 1.057-1.976 2.192v4.286c0 .837.46 1.58 1.155 1.951m9.345-8.334V6.637c0-1.621-1.152-3.026-2.76-3.235A48.455 48.455 0 0011.25 3c-2.115 0-4.198.137-6.24.402-1.608.209-2.76 1.614-2.76 3.235v6.226c0 1.621 1.152 3.026 2.76 3.235.577.075 1.157.14 1.74.194V21l4.155-4.155"
|
||||
);
|
||||
export const Menu = icon(
|
||||
"M3.75 6.75h16.5M3.75 12h16.5m-16.5 5.25h16.5",
|
||||
2
|
||||
);
|
||||
export const LayoutRows = icon(
|
||||
"M3.75 6A2.25 2.25 0 016 3.75h2.25A2.25 2.25 0 0110.5 6v2.25a2.25 2.25 0 01-2.25 2.25H6a2.25 2.25 0 01-2.25-2.25V6zM3.75 15.75A2.25 2.25 0 016 13.5h2.25a2.25 2.25 0 012.25 2.25V18a2.25 2.25 0 01-2.25 2.25H6A2.25 2.25 0 013.75 18v-2.25zM13.5 6a2.25 2.25 0 012.25-2.25H18A2.25 2.25 0 0120.25 6v2.25A2.25 2.25 0 0118 10.5h-2.25a2.25 2.25 0 01-2.25-2.25V6zM13.5 15.75a2.25 2.25 0 012.25-2.25H18a2.25 2.25 0 012.25 2.25V18A2.25 2.25 0 0118 20.25h-2.25A2.25 2.25 0 0113.5 18v-2.25z"
|
||||
);
|
||||
|
||||
/** Spinner icon — uses fill, not stroke */
|
||||
export function Spinner({ size = "w-3.5 h-3.5", className = "" }: Omit<IconProps, "strokeWidth">): React.ReactElement {
|
||||
return (
|
||||
<svg className={`${size} animate-spin ${className}`} fill="none" viewBox="0 0 24 24">
|
||||
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" />
|
||||
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z" />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import { renderMarkdown, highlightSearchText } from "../lib/markdown";
|
||||
import { redactMessage } from "../../shared/sensitive-redactor";
|
||||
import { escapeHtml } from "../../shared/escape-html";
|
||||
import { ProgressBadge } from "./ProgressBadge";
|
||||
import { ChevronRight, Copy, Check, EyeSlash } from "./Icons";
|
||||
|
||||
interface Props {
|
||||
message: ParsedMessage;
|
||||
@@ -16,6 +17,7 @@ interface Props {
|
||||
autoRedactEnabled: boolean;
|
||||
progressEvents?: ParsedMessage[];
|
||||
progressEnabled?: boolean;
|
||||
compact?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -34,6 +36,7 @@ export function MessageBubble({
|
||||
autoRedactEnabled,
|
||||
progressEvents,
|
||||
progressEnabled,
|
||||
compact = false,
|
||||
}: Props) {
|
||||
const colors = CATEGORY_COLORS[message.category];
|
||||
const label = CATEGORY_LABELS[message.category];
|
||||
@@ -58,7 +61,6 @@ export function MessageBubble({
|
||||
}
|
||||
|
||||
// Structured data categories: render as preformatted text, not markdown.
|
||||
// Avoids expensive marked.parse() on large JSON/log blobs.
|
||||
if (msg.category === "hook_progress" || msg.category === "file_snapshot") {
|
||||
const html = `<pre class="hljs"><code>${escapeHtml(tryPrettyJson(msg.content))}</code></pre>`;
|
||||
return searchQuery ? highlightSearchText(html, searchQuery) : html;
|
||||
@@ -99,11 +101,28 @@ export function MessageBubble({
|
||||
? formatTimestamp(message.timestamp)
|
||||
: null;
|
||||
|
||||
// Content is sourced from local user-owned JSONL files (~/.claude/projects/), not untrusted input
|
||||
const contentEl = !collapsed ? (
|
||||
<div
|
||||
className={`prose-message text-body text-foreground max-w-none break-words overflow-hidden ${
|
||||
compact ? "px-3 pb-2 pt-0.5" : "px-5 pb-4 pt-1"
|
||||
}`}
|
||||
dangerouslySetInnerHTML={{ __html: renderedHtml }}
|
||||
/>
|
||||
) : null;
|
||||
|
||||
const collapsedPreviewEl = collapsed && message.category === "thinking" && collapsedPreview ? (
|
||||
<div className={compact ? "px-3 pb-2 pt-0.5" : "px-5 pb-3 pt-1"}>
|
||||
<pre className="text-caption text-foreground-muted whitespace-pre-wrap line-clamp-2 font-mono">{collapsedPreview.preview}</pre>
|
||||
</div>
|
||||
) : null;
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`
|
||||
group rounded-xl border bg-surface-raised
|
||||
transition-all duration-200 relative overflow-hidden
|
||||
group rounded-xl border-l-[3px] border bg-surface-raised
|
||||
transition-all duration-200
|
||||
${colors.accentBorder}
|
||||
${colors.border}
|
||||
${dimmed ? "message-dimmed" : ""}
|
||||
${selectedForRedaction ? "redaction-selected" : ""}
|
||||
@@ -111,23 +130,19 @@ export function MessageBubble({
|
||||
shadow-card
|
||||
`}
|
||||
>
|
||||
{/* Category accent strip */}
|
||||
<div className={`absolute left-0 top-0 bottom-0 w-[3px] rounded-l-xl ${colors.dot}`} />
|
||||
|
||||
{/* Header bar */}
|
||||
<div className="flex items-center gap-1.5 px-5 min-h-10 py-2.5">
|
||||
<div className={`flex items-center gap-1.5 ${compact ? "px-3 min-h-8 py-1.5" : "px-5 min-h-10 py-2.5"}`}>
|
||||
{isCollapsible && (
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); setCollapsed(!collapsed); }}
|
||||
className="flex items-center justify-center w-5 h-5 text-foreground-muted hover:text-foreground transition-colors flex-shrink-0"
|
||||
aria-label={collapsed ? "Expand" : "Collapse"}
|
||||
>
|
||||
<svg
|
||||
className={`w-3.5 h-3.5 transition-transform duration-150 ${collapsed ? "" : "rotate-90"}`}
|
||||
fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2.5}
|
||||
>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M8.25 4.5l7.5 7.5-7.5 7.5" />
|
||||
</svg>
|
||||
<ChevronRight
|
||||
size="w-3.5 h-3.5"
|
||||
strokeWidth={2.5}
|
||||
className={`transition-transform duration-150 ${collapsed ? "" : "rotate-90"}`}
|
||||
/>
|
||||
</button>
|
||||
)}
|
||||
<span className={`w-1.5 h-1.5 rounded-full flex-shrink-0 ${colors.dot}`} />
|
||||
@@ -136,7 +151,7 @@ export function MessageBubble({
|
||||
</span>
|
||||
{timestamp && (
|
||||
<>
|
||||
<span className="text-border leading-none">·</span>
|
||||
<span className="text-border leading-none">·</span>
|
||||
<span className="text-caption text-foreground-muted tabular-nums leading-none">
|
||||
{timestamp}
|
||||
</span>
|
||||
@@ -144,7 +159,7 @@ export function MessageBubble({
|
||||
)}
|
||||
{isCollapsible && collapsed && collapsedPreview && (
|
||||
<>
|
||||
<span className="text-border leading-none">·</span>
|
||||
<span className="text-border leading-none">·</span>
|
||||
<span className="text-caption text-foreground-muted truncate max-w-[300px] leading-none">
|
||||
{message.category === "thinking" && collapsedPreview.totalLines > 2
|
||||
? `${collapsedPreview.totalLines} lines`
|
||||
@@ -170,13 +185,9 @@ export function MessageBubble({
|
||||
title="Copy message content"
|
||||
>
|
||||
{contentCopied ? (
|
||||
<svg className="w-4 h-4 text-green-400" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M4.5 12.75l6 6 9-13.5" />
|
||||
</svg>
|
||||
<Check size="w-4 h-4" className="text-green-400" />
|
||||
) : (
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.75 17.25v3.375c0 .621-.504 1.125-1.125 1.125h-9.75a1.125 1.125 0 01-1.125-1.125V7.875c0-.621.504-1.125 1.125-1.125H6.75a9.06 9.06 0 011.5.124m7.5 10.376h3.375c.621 0 1.125-.504 1.125-1.125V11.25c0-4.46-3.243-8.161-7.5-8.876a9.06 9.06 0 00-1.5-.124H9.375c-.621 0-1.125.504-1.125 1.125v3.5m7.5 10.375H9.375a1.125 1.125 0 01-1.125-1.125v-9.25m0 0a2.625 2.625 0 115.25 0H12m-3.75 0h3.75" />
|
||||
</svg>
|
||||
<Copy />
|
||||
)}
|
||||
</button>
|
||||
<button
|
||||
@@ -191,25 +202,13 @@ export function MessageBubble({
|
||||
}`}
|
||||
title={selectedForRedaction ? "Deselect for redaction" : "Select for redaction"}
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3.98 8.223A10.477 10.477 0 001.934 12C3.226 16.338 7.244 19.5 12 19.5c.993 0 1.953-.138 2.863-.395M6.228 6.228A10.45 10.45 0 0112 4.5c4.756 0 8.773 3.162 10.065 7.498a10.523 10.523 0 01-4.293 5.774M6.228 6.228L3 3m3.228 3.228l3.65 3.65m7.894 7.894L21 21m-3.228-3.228l-3.65-3.65m0 0a3 3 0 10-4.243-4.243m4.242 4.242L9.88 9.88" />
|
||||
</svg>
|
||||
<EyeSlash />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Content — sourced from local user-owned JSONL files, not external/untrusted input */}
|
||||
{!collapsed && (
|
||||
<div
|
||||
className="prose-message text-body text-foreground px-5 pb-4 pt-1 max-w-none break-words overflow-hidden"
|
||||
dangerouslySetInnerHTML={{ __html: renderedHtml }}
|
||||
/>
|
||||
)}
|
||||
{collapsed && message.category === "thinking" && collapsedPreview && (
|
||||
<div className="px-5 pb-3 pt-1">
|
||||
<pre className="text-caption text-foreground-muted whitespace-pre-wrap line-clamp-2 font-mono">{collapsedPreview.preview}</pre>
|
||||
</div>
|
||||
)}
|
||||
{contentEl}
|
||||
{collapsedPreviewEl}
|
||||
{message.category === "tool_call" && progressEnabled && progressEvents && progressEvents.length > 0 && (
|
||||
<ProgressBadge events={progressEvents} />
|
||||
)}
|
||||
@@ -230,8 +229,6 @@ function isDiffContent(content: string): boolean {
|
||||
diffLines++;
|
||||
}
|
||||
}
|
||||
// Require at least one hunk header AND some +/- lines to avoid false positives
|
||||
// on YAML lists, markdown lists, or other content with leading dashes
|
||||
return hunkHeaders >= 1 && diffLines >= 2;
|
||||
}
|
||||
|
||||
@@ -269,7 +266,6 @@ function formatTimestamp(ts: string): string {
|
||||
});
|
||||
}
|
||||
|
||||
/** If the string is valid JSON, return it pretty-printed; otherwise return as-is. */
|
||||
function tryPrettyJson(text: string): string {
|
||||
const trimmed = text.trimStart();
|
||||
if (trimmed[0] !== "{" && trimmed[0] !== "[") return text;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import React, { useState, useEffect, useMemo } from "react";
|
||||
import type { SessionEntry } from "../lib/types";
|
||||
import { ChevronRight, ChevronLeft, ChatBubble } from "./Icons";
|
||||
|
||||
interface Props {
|
||||
sessions: SessionEntry[];
|
||||
@@ -49,9 +50,7 @@ export function SessionList({ sessions, loading, selectedId, onSelect }: Props)
|
||||
return (
|
||||
<div className="flex flex-col items-center justify-center py-12 px-6 text-center">
|
||||
<div className="w-10 h-10 rounded-xl bg-surface-inset flex items-center justify-center mb-3">
|
||||
<svg className="w-5 h-5 text-foreground-muted" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M20.25 8.511c.884.284 1.5 1.128 1.5 2.097v4.286c0 1.136-.847 2.1-1.98 2.193-.34.027-.68.052-1.02.072v3.091l-3-3c-1.354 0-2.694-.055-4.02-.163a2.115 2.115 0 01-.825-.242m9.345-8.334a2.126 2.126 0 00-.476-.095 48.64 48.64 0 00-8.048 0c-1.131.094-1.976 1.057-1.976 2.192v4.286c0 .837.46 1.58 1.155 1.951m9.345-8.334V6.637c0-1.621-1.152-3.026-2.76-3.235A48.455 48.455 0 0011.25 3c-2.115 0-4.198.137-6.24.402-1.608.209-2.76 1.614-2.76 3.235v6.226c0 1.621 1.152 3.026 2.76 3.235.577.075 1.157.14 1.74.194V21l4.155-4.155" />
|
||||
</svg>
|
||||
<ChatBubble size="w-5 h-5" className="text-foreground-muted" />
|
||||
</div>
|
||||
<p className="text-body font-medium text-foreground-secondary">No sessions found</p>
|
||||
<p className="text-caption text-foreground-muted mt-1">Sessions will appear here once created</p>
|
||||
@@ -68,13 +67,15 @@ export function SessionList({ sessions, loading, selectedId, onSelect }: Props)
|
||||
onClick={() => setSelectedProject(null)}
|
||||
className="w-full text-left px-4 py-2.5 text-caption font-medium text-accent hover:text-accent-dark hover:bg-surface-overlay flex items-center gap-1.5 border-b border-border-muted transition-colors"
|
||||
>
|
||||
<svg className="w-3.5 h-3.5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.75 19.5L8.25 12l7.5-7.5" />
|
||||
</svg>
|
||||
<ChevronLeft size="w-3.5 h-3.5" />
|
||||
<span>All Projects</span>
|
||||
</button>
|
||||
<div className="px-4 py-2 text-caption font-semibold text-foreground-muted uppercase tracking-wider border-b border-border-muted" style={{ background: "var(--color-surface-inset)" }}>
|
||||
{formatProjectName(selectedProject)}
|
||||
<div
|
||||
className="px-4 py-2 text-caption font-semibold text-foreground-muted uppercase tracking-wider border-b border-border-muted"
|
||||
style={{ background: "var(--color-surface-inset)" }}
|
||||
title={formatProjectName(selectedProject)}
|
||||
>
|
||||
{truncateProjectName(selectedProject)}
|
||||
</div>
|
||||
<div className="py-1 px-2">
|
||||
{projectSessions.map((session, idx) => {
|
||||
@@ -90,18 +91,18 @@ export function SessionList({ sessions, loading, selectedId, onSelect }: Props)
|
||||
: "hover:bg-surface-overlay"
|
||||
}
|
||||
`}
|
||||
style={{ animationDelay: `${idx * 30}ms` }}
|
||||
style={{ animationDelay: `${Math.min(idx, 15) * 30}ms` }}
|
||||
>
|
||||
<div className={`text-body font-medium truncate ${isSelected ? "text-accent-dark" : "text-foreground"}`}>
|
||||
{session.summary || session.firstPrompt || "Untitled Session"}
|
||||
</div>
|
||||
<div className="flex items-center gap-1.5 mt-1 text-caption text-foreground-muted">
|
||||
<span>{formatDate(session.modified || session.created)}</span>
|
||||
<span className="text-border">·</span>
|
||||
<span>{formatRelativeTime(session.modified || session.created)}</span>
|
||||
<span className="text-border">·</span>
|
||||
<span className="tabular-nums">{session.messageCount} msgs</span>
|
||||
{session.duration && session.duration > 0 && (
|
||||
<>
|
||||
<span className="text-border">·</span>
|
||||
<span className="text-border">·</span>
|
||||
<span className="tabular-nums">{formatSessionDuration(session.duration)}</span>
|
||||
</>
|
||||
)}
|
||||
@@ -122,24 +123,26 @@ export function SessionList({ sessions, loading, selectedId, onSelect }: Props)
|
||||
(a.modified || a.created) > (b.modified || b.created) ? a : b
|
||||
);
|
||||
const count = projectSessions.length;
|
||||
const totalMessages = projectSessions.reduce((sum, s) => sum + s.messageCount, 0);
|
||||
return (
|
||||
<button
|
||||
key={project}
|
||||
onClick={() => setSelectedProject(project)}
|
||||
className="w-full text-left my-0.5 px-3 py-2.5 rounded-lg hover:bg-surface-overlay transition-all duration-200 group"
|
||||
title={formatProjectName(project)}
|
||||
>
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="text-body font-medium text-foreground truncate">
|
||||
{formatProjectName(project)}
|
||||
{truncateProjectName(project)}
|
||||
</div>
|
||||
<svg className="w-4 h-4 text-foreground-muted opacity-0 group-hover:opacity-100 transition-opacity flex-shrink-0" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M8.25 4.5l7.5 7.5-7.5 7.5" />
|
||||
</svg>
|
||||
<ChevronRight size="w-4 h-4" className="text-foreground-muted opacity-0 group-hover:opacity-100 transition-opacity flex-shrink-0" />
|
||||
</div>
|
||||
<div className="flex items-center gap-1.5 mt-1 text-caption text-foreground-muted">
|
||||
<span className="tabular-nums">{count} {count === 1 ? "session" : "sessions"}</span>
|
||||
<span className="text-border">·</span>
|
||||
<span>{formatDate(latest.modified || latest.created)}</span>
|
||||
<span className="text-border">·</span>
|
||||
<span>{formatRelativeTime(latest.modified || latest.created)}</span>
|
||||
<span className="text-border">·</span>
|
||||
<span className="tabular-nums">{totalMessages} msgs</span>
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
@@ -150,10 +153,6 @@ export function SessionList({ sessions, loading, selectedId, onSelect }: Props)
|
||||
|
||||
/**
|
||||
* Best-effort decode of Claude Code's project directory name back to a path.
|
||||
* Claude encodes project paths by replacing '/' with '-', but this is lossy:
|
||||
* a path like /home/user/my-cool-app encodes as -home-user-my-cool-app and
|
||||
* decodes as /home/user/my/cool/app (hyphens in the original name are lost).
|
||||
* There is no way to distinguish path separators from literal hyphens.
|
||||
*/
|
||||
function formatProjectName(project: string): string {
|
||||
if (project.startsWith("-")) {
|
||||
@@ -162,6 +161,14 @@ function formatProjectName(project: string): string {
|
||||
return project;
|
||||
}
|
||||
|
||||
/** Show last 2 path segments for compact display. */
|
||||
function truncateProjectName(project: string): string {
|
||||
const full = formatProjectName(project);
|
||||
const segments = full.split("/").filter(Boolean);
|
||||
if (segments.length <= 2) return full;
|
||||
return segments.slice(-2).join("/");
|
||||
}
|
||||
|
||||
function formatSessionDuration(ms: number): string {
|
||||
const minutes = Math.floor(ms / 60000);
|
||||
if (minutes < 1) return "<1m";
|
||||
@@ -172,14 +179,27 @@ function formatSessionDuration(ms: number): string {
|
||||
return `${hours}h ${rem}m`;
|
||||
}
|
||||
|
||||
function formatDate(dateStr: string): string {
|
||||
function formatRelativeTime(dateStr: string): string {
|
||||
if (!dateStr) return "";
|
||||
const d = new Date(dateStr);
|
||||
if (isNaN(d.getTime())) return dateStr;
|
||||
return d.toLocaleDateString(undefined, {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
});
|
||||
|
||||
const now = Date.now();
|
||||
const diffMs = now - d.getTime();
|
||||
|
||||
if (diffMs < 0) return "just now";
|
||||
if (diffMs < 60_000) return "just now";
|
||||
if (diffMs < 3_600_000) {
|
||||
const mins = Math.floor(diffMs / 60_000);
|
||||
return `${mins}m ago`;
|
||||
}
|
||||
if (diffMs < 86_400_000) {
|
||||
const hours = Math.floor(diffMs / 3_600_000);
|
||||
return `${hours}h ago`;
|
||||
}
|
||||
if (diffMs < 604_800_000) {
|
||||
const days = Math.floor(diffMs / 86_400_000);
|
||||
return `${days}d ago`;
|
||||
}
|
||||
return d.toLocaleDateString(undefined, { month: "short", day: "numeric" });
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import React, { useRef, useEffect, useMemo } from "react";
|
||||
import React, { useRef, useEffect, useMemo, useState } from "react";
|
||||
import type { ParsedMessage } from "../lib/types";
|
||||
import { MessageBubble } from "./MessageBubble";
|
||||
import { RedactedDivider } from "./RedactedDivider";
|
||||
import { Chat, Filter } from "./Icons";
|
||||
|
||||
interface Props {
|
||||
messages: ParsedMessage[];
|
||||
@@ -15,6 +16,9 @@ interface Props {
|
||||
focusedIndex?: number;
|
||||
toolProgress?: Record<string, ParsedMessage[]>;
|
||||
progressEnabled?: boolean;
|
||||
sessionId?: string;
|
||||
project?: string;
|
||||
compact?: boolean;
|
||||
}
|
||||
|
||||
function MessageSkeleton({ delay = 0 }: { delay?: number }) {
|
||||
@@ -48,6 +52,9 @@ export function SessionViewer({
|
||||
focusedIndex = -1,
|
||||
toolProgress,
|
||||
progressEnabled,
|
||||
sessionId,
|
||||
project,
|
||||
compact = false,
|
||||
}: Props) {
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
@@ -162,12 +169,25 @@ export function SessionViewer({
|
||||
className="w-14 h-14 rounded-2xl flex items-center justify-center mx-auto mb-4 border border-border-muted"
|
||||
style={{ background: "linear-gradient(135deg, var(--color-surface-overlay), var(--color-surface-inset))" }}
|
||||
>
|
||||
<svg className="w-7 h-7 text-foreground-muted" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M7.5 8.25h9m-9 3H12m-9.75 1.51c0 1.6 1.123 2.994 2.707 3.227 1.129.166 2.27.293 3.423.379.35.026.67.21.865.501L12 21l2.755-4.133a1.14 1.14 0 01.865-.501 48.172 48.172 0 003.423-.379c1.584-.233 2.707-1.626 2.707-3.228V6.741c0-1.602-1.123-2.995-2.707-3.228A48.394 48.394 0 0012 3c-2.392 0-4.744.175-7.043.513C3.373 3.746 2.25 5.14 2.25 6.741v6.018z" />
|
||||
</svg>
|
||||
<Chat size="w-7 h-7" className="text-foreground-muted" />
|
||||
</div>
|
||||
<p className="text-subheading font-medium text-foreground">Select a session</p>
|
||||
<p className="text-body text-foreground-muted mt-1.5">Choose a session from the sidebar to view its messages</p>
|
||||
<div className="mt-5 flex flex-col gap-2 text-caption text-foreground-muted">
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
<kbd className="inline-flex items-center justify-center w-5 h-5 text-[11px] bg-surface-overlay border border-border rounded font-mono">j</kbd>
|
||||
<kbd className="inline-flex items-center justify-center w-5 h-5 text-[11px] bg-surface-overlay border border-border rounded font-mono">k</kbd>
|
||||
<span>Navigate messages</span>
|
||||
</div>
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
<kbd className="inline-flex items-center justify-center w-5 h-5 text-[11px] bg-surface-overlay border border-border rounded font-mono">/</kbd>
|
||||
<span>Search</span>
|
||||
</div>
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
<kbd className="inline-flex items-center justify-center px-1.5 h-5 text-[11px] bg-surface-overlay border border-border rounded font-mono">Esc</kbd>
|
||||
<span>Clear search</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@@ -181,9 +201,7 @@ export function SessionViewer({
|
||||
className="w-14 h-14 rounded-2xl flex items-center justify-center mx-auto mb-4 border border-border-muted"
|
||||
style={{ background: "linear-gradient(135deg, var(--color-surface-overlay), var(--color-surface-inset))" }}
|
||||
>
|
||||
<svg className="w-7 h-7 text-foreground-muted" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 3c2.755 0 5.455.232 8.083.678.533.09.917.556.917 1.096v1.044a2.25 2.25 0 01-.659 1.591l-5.432 5.432a2.25 2.25 0 00-.659 1.591v2.927a2.25 2.25 0 01-1.244 2.013L9.75 21v-6.568a2.25 2.25 0 00-.659-1.591L3.659 7.409A2.25 2.25 0 013 5.818V4.774c0-.54.384-1.006.917-1.096A48.32 48.32 0 0112 3z" />
|
||||
</svg>
|
||||
<Filter size="w-7 h-7" className="text-foreground-muted" />
|
||||
</div>
|
||||
<p className="text-subheading font-medium text-foreground">No matching messages</p>
|
||||
<p className="text-body text-foreground-muted mt-1.5">Try adjusting your filters or search query</p>
|
||||
@@ -194,12 +212,28 @@ export function SessionViewer({
|
||||
|
||||
return (
|
||||
<div className="max-w-6xl mx-auto px-6 py-6">
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<span className="text-caption text-foreground-muted tabular-nums">
|
||||
<div className="sticky top-0 z-10 -mx-6 px-6 py-3 mb-4 glass-subtle border-b border-border-muted">
|
||||
<div className="flex items-center justify-between gap-4 min-w-0">
|
||||
<div className="flex items-center gap-2 min-w-0 overflow-x-auto scrollbar-none">
|
||||
{project && (
|
||||
<>
|
||||
<span className="text-caption text-foreground-muted whitespace-nowrap">{project}</span>
|
||||
<span className="text-foreground-muted opacity-40 flex-shrink-0">/</span>
|
||||
</>
|
||||
)}
|
||||
{sessionId && (
|
||||
<div className="flex items-center gap-1.5 min-w-0">
|
||||
<code className="text-caption text-foreground-muted font-mono whitespace-nowrap">{sessionId}</code>
|
||||
<CopyIdButton value={sessionId} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<span className="text-caption text-foreground-muted tabular-nums whitespace-nowrap flex-shrink-0">
|
||||
{messages.length} message{messages.length !== 1 ? "s" : ""}
|
||||
</span>
|
||||
</div>
|
||||
<div ref={containerRef} className="space-y-3">
|
||||
</div>
|
||||
<div ref={containerRef} className={compact ? "space-y-1.5" : "space-y-3"}>
|
||||
{displayItems.map((item, idx) => {
|
||||
if (item.type === "redacted_divider") {
|
||||
return <RedactedDivider key={item.key} />;
|
||||
@@ -234,7 +268,7 @@ export function SessionViewer({
|
||||
id={`msg-${msg.uuid}`}
|
||||
data-msg-index={item.messageIndex}
|
||||
className={`${idx < 20 ? "animate-fade-in" : ""} ${isFocused ? "search-match-focused rounded-xl" : ""}`}
|
||||
style={idx < 20 ? { animationDelay: `${idx * 20}ms`, animationFillMode: "backwards" } : undefined}
|
||||
style={idx < 20 ? { animationDelay: `${Math.min(idx, 15) * 20}ms`, animationFillMode: "backwards" } : undefined}
|
||||
>
|
||||
<MessageBubble
|
||||
message={msg}
|
||||
@@ -247,6 +281,7 @@ export function SessionViewer({
|
||||
autoRedactEnabled={autoRedactEnabled}
|
||||
progressEvents={progressEvents}
|
||||
progressEnabled={progressEnabled}
|
||||
compact={compact}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
@@ -256,6 +291,35 @@ export function SessionViewer({
|
||||
);
|
||||
}
|
||||
|
||||
function CopyIdButton({ value }: { value: string }) {
|
||||
const [copied, setCopied] = useState(false);
|
||||
|
||||
function handleCopy(): void {
|
||||
navigator.clipboard.writeText(value).then(() => {
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 1500);
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
onClick={handleCopy}
|
||||
className="flex-shrink-0 p-0.5 rounded text-foreground-muted opacity-50 hover:opacity-100 transition-opacity"
|
||||
title="Copy session ID"
|
||||
>
|
||||
{copied ? (
|
||||
<svg className="w-3.5 h-3.5 text-category-assistant" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M4.5 12.75l6 6 9-13.5" />
|
||||
</svg>
|
||||
) : (
|
||||
<svg className="w-3.5 h-3.5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.666 3.888A2.25 2.25 0 0013.5 2.25h-3c-1.03 0-1.9.693-2.166 1.638m7.332 0c.055.194.084.4.084.612v0a.75.75 0 01-.75.75H9.75a.75.75 0 01-.75-.75v0c0-.212.03-.418.084-.612m7.332 0c.646.049 1.288.11 1.927.184 1.1.128 1.907 1.077 1.907 2.185V19.5a2.25 2.25 0 01-2.25 2.25H6.75A2.25 2.25 0 014.5 19.5V6.257c0-1.108.806-2.057 1.907-2.185a48.208 48.208 0 011.927-.184" />
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
function formatDuration(ms: number): string {
|
||||
const minutes = Math.floor(ms / 60000);
|
||||
if (minutes < 60) return `${minutes}m`;
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
<title>Session Viewer</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;450;500;600;700&family=JetBrains+Mono:wght@400;500&display=swap" rel="stylesheet" />
|
||||
<link href="https://cdn.jsdelivr.net/npm/geist@1/dist/fonts/geist-sans/style.css" rel="stylesheet" />
|
||||
<link href="https://fonts.googleapis.com/css2?family=JetBrains+Mono:wght@400;500&display=swap" rel="stylesheet" />
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
|
||||
@@ -2,51 +2,60 @@ import type { MessageCategory } from "./types";
|
||||
|
||||
export const CATEGORY_COLORS: Record<
|
||||
MessageCategory,
|
||||
{ dot: string; border: string; text: string }
|
||||
{ dot: string; border: string; text: string; accentBorder: string }
|
||||
> = {
|
||||
user_message: {
|
||||
dot: "bg-category-user",
|
||||
border: "border-category-user-border",
|
||||
text: "text-category-user",
|
||||
accentBorder: "border-l-category-user",
|
||||
},
|
||||
assistant_text: {
|
||||
dot: "bg-category-assistant",
|
||||
border: "border-category-assistant-border",
|
||||
text: "text-category-assistant",
|
||||
accentBorder: "border-l-category-assistant",
|
||||
},
|
||||
thinking: {
|
||||
dot: "bg-category-thinking",
|
||||
border: "border-category-thinking-border",
|
||||
text: "text-category-thinking",
|
||||
accentBorder: "border-l-category-thinking",
|
||||
},
|
||||
tool_call: {
|
||||
dot: "bg-category-tool",
|
||||
border: "border-category-tool-border",
|
||||
text: "text-category-tool",
|
||||
accentBorder: "border-l-category-tool",
|
||||
},
|
||||
tool_result: {
|
||||
dot: "bg-category-result",
|
||||
border: "border-category-result-border",
|
||||
text: "text-category-result",
|
||||
accentBorder: "border-l-category-result",
|
||||
},
|
||||
system_message: {
|
||||
dot: "bg-category-system",
|
||||
border: "border-category-system-border",
|
||||
text: "text-category-system",
|
||||
accentBorder: "border-l-category-system",
|
||||
},
|
||||
hook_progress: {
|
||||
dot: "bg-category-hook",
|
||||
border: "border-category-hook-border",
|
||||
text: "text-category-hook",
|
||||
accentBorder: "border-l-category-hook",
|
||||
},
|
||||
file_snapshot: {
|
||||
dot: "bg-category-snapshot",
|
||||
border: "border-category-snapshot-border",
|
||||
text: "text-category-snapshot",
|
||||
accentBorder: "border-l-category-snapshot",
|
||||
},
|
||||
summary: {
|
||||
dot: "bg-category-summary",
|
||||
border: "border-category-summary-border",
|
||||
text: "text-category-summary",
|
||||
accentBorder: "border-l-category-summary",
|
||||
},
|
||||
};
|
||||
|
||||
@@ -36,11 +36,10 @@
|
||||
--color-glow-accent: rgba(91, 156, 245, 0.12);
|
||||
--color-glow-success: rgba(63, 185, 80, 0.12);
|
||||
|
||||
/* Inter font from Google Fonts CDN */
|
||||
font-family: "Inter", system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
|
||||
/* Geist font from jsDelivr CDN */
|
||||
font-family: "Geist", system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
font-feature-settings: "cv02", "cv03", "cv04", "cv11";
|
||||
}
|
||||
|
||||
/* Smooth transitions on all interactive elements */
|
||||
|
||||
128
src/server/services/metadata-cache.ts
Normal file
128
src/server/services/metadata-cache.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
|
||||
export interface CacheEntry {
|
||||
mtimeMs: number;
|
||||
size: number;
|
||||
messageCount: number;
|
||||
firstPrompt: string;
|
||||
summary: string;
|
||||
firstTimestamp: string;
|
||||
lastTimestamp: string;
|
||||
}
|
||||
|
||||
interface CacheFile {
|
||||
version: 1;
|
||||
entries: Record<string, CacheEntry>;
|
||||
}
|
||||
|
||||
const DEFAULT_CACHE_PATH = path.join(
|
||||
os.homedir(),
|
||||
".cache",
|
||||
"session-viewer",
|
||||
"metadata.json"
|
||||
);
|
||||
|
||||
export class MetadataCache {
|
||||
private entries: Map<string, CacheEntry> = new Map();
|
||||
private dirty = false;
|
||||
private cachePath: string;
|
||||
private saving: Promise<void> | null = null;
|
||||
|
||||
constructor(cachePath: string = DEFAULT_CACHE_PATH) {
|
||||
this.cachePath = cachePath;
|
||||
}
|
||||
|
||||
async load(): Promise<void> {
|
||||
try {
|
||||
const raw = await fs.readFile(this.cachePath, "utf-8");
|
||||
const parsed: CacheFile = JSON.parse(raw);
|
||||
if (parsed.version === 1 && parsed.entries) {
|
||||
this.entries = new Map(Object.entries(parsed.entries));
|
||||
}
|
||||
} catch {
|
||||
// Missing or corrupt — start empty
|
||||
this.entries = new Map();
|
||||
}
|
||||
this.dirty = false;
|
||||
}
|
||||
|
||||
get(filePath: string, mtimeMs: number, size: number): CacheEntry | null {
|
||||
const entry = this.entries.get(filePath);
|
||||
if (!entry) return null;
|
||||
if (entry.mtimeMs !== mtimeMs || entry.size !== size) return null;
|
||||
return entry;
|
||||
}
|
||||
|
||||
set(filePath: string, entry: CacheEntry): void {
|
||||
this.entries.set(filePath, entry);
|
||||
this.dirty = true;
|
||||
}
|
||||
|
||||
isDirty(): boolean {
|
||||
return this.dirty;
|
||||
}
|
||||
|
||||
async save(existingPaths?: Set<string>): Promise<void> {
|
||||
if (!this.dirty) return;
|
||||
|
||||
// Coalesce concurrent saves
|
||||
if (this.saving) {
|
||||
await this.saving;
|
||||
if (!this.dirty) return;
|
||||
}
|
||||
|
||||
this.saving = this.doSave(existingPaths);
|
||||
try {
|
||||
await this.saving;
|
||||
} finally {
|
||||
this.saving = null;
|
||||
}
|
||||
}
|
||||
|
||||
async flush(): Promise<void> {
|
||||
if (!this.dirty) return;
|
||||
|
||||
if (this.saving) {
|
||||
await this.saving;
|
||||
if (!this.dirty) return;
|
||||
}
|
||||
|
||||
this.saving = this.doSave();
|
||||
try {
|
||||
await this.saving;
|
||||
} finally {
|
||||
this.saving = null;
|
||||
}
|
||||
}
|
||||
|
||||
private async doSave(existingPaths?: Set<string>): Promise<void> {
|
||||
// Prune stale entries
|
||||
if (existingPaths) {
|
||||
for (const key of this.entries.keys()) {
|
||||
if (!existingPaths.has(key)) {
|
||||
this.entries.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const cacheFile: CacheFile = {
|
||||
version: 1,
|
||||
entries: Object.fromEntries(this.entries),
|
||||
};
|
||||
|
||||
const json = JSON.stringify(cacheFile);
|
||||
|
||||
// Ensure directory exists
|
||||
const dir = path.dirname(this.cachePath);
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
|
||||
// Atomic write: temp file + rename
|
||||
const tmpPath = this.cachePath + `.tmp.${process.pid}`;
|
||||
await fs.writeFile(tmpPath, json, "utf-8");
|
||||
await fs.rename(tmpPath, this.cachePath);
|
||||
|
||||
this.dirty = false;
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,54 @@ import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
import type { SessionEntry } from "../../shared/types.js";
|
||||
import { extractSessionMetadata } from "./session-metadata.js";
|
||||
import { MetadataCache } from "./metadata-cache.js";
|
||||
import type { CacheEntry } from "./metadata-cache.js";
|
||||
|
||||
const CLAUDE_PROJECTS_DIR = path.join(os.homedir(), ".claude", "projects");
|
||||
const FILE_CONCURRENCY = 32;
|
||||
|
||||
let cache: MetadataCache | null = null;
|
||||
let cacheLoaded = false;
|
||||
|
||||
export function setCache(c: MetadataCache | null): void {
|
||||
cache = c;
|
||||
cacheLoaded = c !== null;
|
||||
}
|
||||
|
||||
async function ensureCache(): Promise<MetadataCache> {
|
||||
if (!cache) {
|
||||
cache = new MetadataCache();
|
||||
}
|
||||
if (!cacheLoaded) {
|
||||
await cache.load();
|
||||
cacheLoaded = true;
|
||||
}
|
||||
return cache;
|
||||
}
|
||||
|
||||
async function mapWithLimit<T, R>(
|
||||
items: T[],
|
||||
limit: number,
|
||||
fn: (item: T) => Promise<R>
|
||||
): Promise<R[]> {
|
||||
const results: R[] = new Array(items.length);
|
||||
let nextIndex = 0;
|
||||
|
||||
async function worker(): Promise<void> {
|
||||
while (nextIndex < items.length) {
|
||||
const i = nextIndex++;
|
||||
results[i] = await fn(items[i]);
|
||||
}
|
||||
}
|
||||
|
||||
const workers = Array.from(
|
||||
{ length: Math.min(limit, items.length) },
|
||||
() => worker()
|
||||
);
|
||||
await Promise.all(workers);
|
||||
return results;
|
||||
}
|
||||
|
||||
interface IndexEntry {
|
||||
sessionId: string;
|
||||
@@ -14,12 +62,14 @@ interface IndexEntry {
|
||||
projectPath?: string;
|
||||
}
|
||||
|
||||
const CLAUDE_PROJECTS_DIR = path.join(os.homedir(), ".claude", "projects");
|
||||
const MTIME_TOLERANCE_MS = 1000;
|
||||
|
||||
export async function discoverSessions(
|
||||
projectsDir: string = CLAUDE_PROJECTS_DIR
|
||||
): Promise<SessionEntry[]> {
|
||||
const sessions: SessionEntry[] = [];
|
||||
const metadataCache = await ensureCache();
|
||||
const discoveredPaths = new Set<string>();
|
||||
|
||||
let projectDirs: string[];
|
||||
try {
|
||||
@@ -28,63 +78,152 @@ export async function discoverSessions(
|
||||
return sessions;
|
||||
}
|
||||
|
||||
// Parallel I/O: stat + readFile for all project dirs concurrently
|
||||
const results = await Promise.all(
|
||||
projectDirs.map(async (projectDir) => {
|
||||
const projectPath = path.join(projectsDir, projectDir);
|
||||
const entries: SessionEntry[] = [];
|
||||
|
||||
let stat;
|
||||
let dirStat;
|
||||
try {
|
||||
stat = await fs.stat(projectPath);
|
||||
dirStat = await fs.stat(projectPath);
|
||||
} catch {
|
||||
return entries;
|
||||
}
|
||||
if (!stat.isDirectory()) return entries;
|
||||
if (!dirStat.isDirectory()) return entries;
|
||||
|
||||
const indexPath = path.join(projectPath, "sessions-index.json");
|
||||
let files: string[];
|
||||
try {
|
||||
const content = await fs.readFile(indexPath, "utf-8");
|
||||
const parsed = JSON.parse(content);
|
||||
|
||||
// Handle both formats: raw array or { version, entries: [...] }
|
||||
const rawEntries: IndexEntry[] = Array.isArray(parsed)
|
||||
? parsed
|
||||
: parsed.entries ?? [];
|
||||
|
||||
for (const entry of rawEntries) {
|
||||
const sessionPath =
|
||||
entry.fullPath ||
|
||||
path.join(projectPath, `${entry.sessionId}.jsonl`);
|
||||
|
||||
// Validate: reject paths with traversal segments or non-JSONL extensions.
|
||||
// Check the raw path for ".." before resolving (resolve normalizes them away).
|
||||
if (sessionPath.includes("..") || !sessionPath.endsWith(".jsonl")) {
|
||||
continue;
|
||||
}
|
||||
const resolved = path.resolve(sessionPath);
|
||||
|
||||
// Containment check: reject paths that escape the projects directory.
|
||||
// A corrupted or malicious index could set fullPath to an arbitrary
|
||||
// absolute path like "/etc/shadow.jsonl".
|
||||
if (!resolved.startsWith(projectsDir + path.sep) && resolved !== projectsDir) {
|
||||
continue;
|
||||
}
|
||||
|
||||
entries.push({
|
||||
id: entry.sessionId,
|
||||
summary: entry.summary || "",
|
||||
firstPrompt: entry.firstPrompt || "",
|
||||
project: projectDir,
|
||||
created: entry.created || "",
|
||||
modified: entry.modified || "",
|
||||
messageCount: entry.messageCount || 0,
|
||||
path: resolved,
|
||||
duration: computeDuration(entry.created, entry.modified),
|
||||
});
|
||||
}
|
||||
files = await fs.readdir(projectPath);
|
||||
} catch {
|
||||
// Missing or corrupt index - skip
|
||||
return entries;
|
||||
}
|
||||
|
||||
const jsonlFiles = files.filter((f) => f.endsWith(".jsonl"));
|
||||
|
||||
// Tier 1: Load sessions-index.json for this project
|
||||
const indexMap = await loadProjectIndex(projectPath);
|
||||
|
||||
const fileResults = await mapWithLimit(
|
||||
jsonlFiles,
|
||||
FILE_CONCURRENCY,
|
||||
async (filename) => {
|
||||
const filePath = path.join(projectPath, filename);
|
||||
|
||||
// Security: reject traversal
|
||||
if (filename.includes("..")) return null;
|
||||
|
||||
const resolved = path.resolve(filePath);
|
||||
if (
|
||||
!resolved.startsWith(projectsDir + path.sep) &&
|
||||
resolved !== projectsDir
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let fileStat;
|
||||
try {
|
||||
fileStat = await fs.stat(resolved);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
discoveredPaths.add(resolved);
|
||||
|
||||
const sessionId = path.basename(filename, ".jsonl");
|
||||
|
||||
// Tier 1: Check index
|
||||
const indexEntry = indexMap.get(sessionId);
|
||||
if (indexEntry?.modified) {
|
||||
const indexMtimeMs = new Date(indexEntry.modified).getTime();
|
||||
if (
|
||||
!isNaN(indexMtimeMs) &&
|
||||
Math.abs(indexMtimeMs - fileStat.mtimeMs) <= MTIME_TOLERANCE_MS
|
||||
) {
|
||||
const duration = computeDuration(
|
||||
indexEntry.created,
|
||||
indexEntry.modified
|
||||
);
|
||||
return {
|
||||
id: sessionId,
|
||||
project: projectDir,
|
||||
path: resolved,
|
||||
created: new Date(fileStat.birthtimeMs).toISOString(),
|
||||
modified: new Date(fileStat.mtimeMs).toISOString(),
|
||||
messageCount: indexEntry.messageCount || 0,
|
||||
firstPrompt: indexEntry.firstPrompt || "",
|
||||
summary: indexEntry.summary || "",
|
||||
duration: duration > 0 ? duration : undefined,
|
||||
} satisfies SessionEntry;
|
||||
}
|
||||
}
|
||||
|
||||
// Tier 2: Check metadata cache
|
||||
const cached = metadataCache.get(
|
||||
resolved,
|
||||
fileStat.mtimeMs,
|
||||
fileStat.size
|
||||
);
|
||||
if (cached) {
|
||||
const duration = computeDuration(
|
||||
cached.firstTimestamp,
|
||||
cached.lastTimestamp
|
||||
);
|
||||
return {
|
||||
id: sessionId,
|
||||
project: projectDir,
|
||||
path: resolved,
|
||||
created: new Date(fileStat.birthtimeMs).toISOString(),
|
||||
modified: new Date(fileStat.mtimeMs).toISOString(),
|
||||
messageCount: cached.messageCount,
|
||||
firstPrompt: cached.firstPrompt,
|
||||
summary: cached.summary,
|
||||
duration: duration > 0 ? duration : undefined,
|
||||
} satisfies SessionEntry;
|
||||
}
|
||||
|
||||
// Tier 3: Full parse
|
||||
let content: string;
|
||||
try {
|
||||
content = await fs.readFile(resolved, "utf-8");
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
const metadata = extractSessionMetadata(content);
|
||||
|
||||
// Update cache
|
||||
const cacheEntry: CacheEntry = {
|
||||
mtimeMs: fileStat.mtimeMs,
|
||||
size: fileStat.size,
|
||||
messageCount: metadata.messageCount,
|
||||
firstPrompt: metadata.firstPrompt,
|
||||
summary: metadata.summary,
|
||||
firstTimestamp: metadata.firstTimestamp,
|
||||
lastTimestamp: metadata.lastTimestamp,
|
||||
};
|
||||
metadataCache.set(resolved, cacheEntry);
|
||||
|
||||
const duration = computeDuration(
|
||||
metadata.firstTimestamp,
|
||||
metadata.lastTimestamp
|
||||
);
|
||||
|
||||
return {
|
||||
id: sessionId,
|
||||
project: projectDir,
|
||||
path: resolved,
|
||||
created: new Date(fileStat.birthtimeMs).toISOString(),
|
||||
modified: new Date(fileStat.mtimeMs).toISOString(),
|
||||
messageCount: metadata.messageCount,
|
||||
firstPrompt: metadata.firstPrompt,
|
||||
summary: metadata.summary,
|
||||
duration: duration > 0 ? duration : undefined,
|
||||
} satisfies SessionEntry;
|
||||
}
|
||||
);
|
||||
|
||||
for (const entry of fileResults) {
|
||||
if (entry) entries.push(entry);
|
||||
}
|
||||
|
||||
return entries;
|
||||
@@ -101,14 +240,47 @@ export async function discoverSessions(
|
||||
return dateB - dateA;
|
||||
});
|
||||
|
||||
// Fire-and-forget cache save
|
||||
metadataCache.save(discoveredPaths).catch(() => {
|
||||
// Cache write failure is non-fatal
|
||||
});
|
||||
|
||||
return sessions;
|
||||
}
|
||||
|
||||
function computeDuration(created?: string, modified?: string): number {
|
||||
if (!created || !modified) return 0;
|
||||
const createdMs = new Date(created).getTime();
|
||||
const modifiedMs = new Date(modified).getTime();
|
||||
if (isNaN(createdMs) || isNaN(modifiedMs)) return 0;
|
||||
const diff = modifiedMs - createdMs;
|
||||
async function loadProjectIndex(
|
||||
projectPath: string
|
||||
): Promise<Map<string, IndexEntry>> {
|
||||
const indexMap = new Map<string, IndexEntry>();
|
||||
const indexPath = path.join(projectPath, "sessions-index.json");
|
||||
|
||||
try {
|
||||
const raw = await fs.readFile(indexPath, "utf-8");
|
||||
const parsed = JSON.parse(raw);
|
||||
const rawEntries: IndexEntry[] = Array.isArray(parsed)
|
||||
? parsed
|
||||
: parsed.entries ?? [];
|
||||
|
||||
for (const entry of rawEntries) {
|
||||
if (entry.sessionId) {
|
||||
indexMap.set(entry.sessionId, entry);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Missing or corrupt index — continue without Tier 1
|
||||
}
|
||||
|
||||
return indexMap;
|
||||
}
|
||||
|
||||
function computeDuration(
|
||||
firstTimestamp?: string,
|
||||
lastTimestamp?: string
|
||||
): number {
|
||||
if (!firstTimestamp || !lastTimestamp) return 0;
|
||||
const firstMs = new Date(firstTimestamp).getTime();
|
||||
const lastMs = new Date(lastTimestamp).getTime();
|
||||
if (isNaN(firstMs) || isNaN(lastMs)) return 0;
|
||||
const diff = lastMs - firstMs;
|
||||
return diff > 0 ? diff : 0;
|
||||
}
|
||||
|
||||
65
src/server/services/session-metadata.ts
Normal file
65
src/server/services/session-metadata.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import {
|
||||
forEachJsonlLine,
|
||||
countMessagesForLine,
|
||||
classifyLine,
|
||||
} from "./session-parser.js";
|
||||
import type { RawLine } from "./session-parser.js";
|
||||
|
||||
export interface SessionMetadata {
|
||||
messageCount: number;
|
||||
firstPrompt: string;
|
||||
summary: string;
|
||||
firstTimestamp: string;
|
||||
lastTimestamp: string;
|
||||
parseErrors: number;
|
||||
}
|
||||
|
||||
const MAX_FIRST_PROMPT_LENGTH = 200;
|
||||
|
||||
export function extractSessionMetadata(content: string): SessionMetadata {
|
||||
let messageCount = 0;
|
||||
let firstPrompt = "";
|
||||
let summary = "";
|
||||
let firstTimestamp = "";
|
||||
let lastTimestamp = "";
|
||||
|
||||
const { parseErrors } = forEachJsonlLine(content, (parsed: RawLine) => {
|
||||
messageCount += countMessagesForLine(parsed);
|
||||
|
||||
if (parsed.timestamp) {
|
||||
if (!firstTimestamp) {
|
||||
firstTimestamp = parsed.timestamp;
|
||||
}
|
||||
lastTimestamp = parsed.timestamp;
|
||||
}
|
||||
|
||||
if (!firstPrompt && classifyLine(parsed) === "user") {
|
||||
const msgContent = parsed.message?.content;
|
||||
if (typeof msgContent === "string" && !isSystemReminder(msgContent)) {
|
||||
firstPrompt = truncate(msgContent, MAX_FIRST_PROMPT_LENGTH);
|
||||
}
|
||||
}
|
||||
|
||||
if (parsed.type === "summary" && parsed.summary) {
|
||||
summary = parsed.summary;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
messageCount,
|
||||
firstPrompt,
|
||||
summary,
|
||||
firstTimestamp,
|
||||
lastTimestamp,
|
||||
parseErrors,
|
||||
};
|
||||
}
|
||||
|
||||
function isSystemReminder(text: string): boolean {
|
||||
return text.includes("<system-reminder>") || text.includes("</system-reminder>");
|
||||
}
|
||||
|
||||
function truncate(text: string, maxLength: number): string {
|
||||
if (text.length <= maxLength) return text;
|
||||
return text.slice(0, maxLength);
|
||||
}
|
||||
@@ -28,7 +28,7 @@ interface ContentBlock {
|
||||
content?: string | ContentBlock[];
|
||||
}
|
||||
|
||||
interface RawLine {
|
||||
export interface RawLine {
|
||||
type?: string;
|
||||
uuid?: string;
|
||||
timestamp?: string;
|
||||
@@ -43,6 +43,94 @@ interface RawLine {
|
||||
subtype?: string;
|
||||
}
|
||||
|
||||
export type LineClassification =
|
||||
| "user"
|
||||
| "assistant"
|
||||
| "progress"
|
||||
| "file-history-snapshot"
|
||||
| "summary"
|
||||
| "system"
|
||||
| "queue-operation"
|
||||
| "unknown";
|
||||
|
||||
export function forEachJsonlLine(
|
||||
content: string,
|
||||
onLine: (parsed: RawLine, lineIndex: number) => void
|
||||
): { parseErrors: number } {
|
||||
let parseErrors = 0;
|
||||
const lines = content.split("\n");
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const trimmed = lines[i].trim();
|
||||
if (!trimmed) continue;
|
||||
|
||||
let parsed: RawLine;
|
||||
try {
|
||||
parsed = JSON.parse(trimmed);
|
||||
} catch {
|
||||
parseErrors++;
|
||||
continue;
|
||||
}
|
||||
|
||||
onLine(parsed, i);
|
||||
}
|
||||
|
||||
return { parseErrors };
|
||||
}
|
||||
|
||||
export function classifyLine(parsed: RawLine): LineClassification {
|
||||
const type = parsed.type;
|
||||
if (type === "progress") return "progress";
|
||||
if (type === "file-history-snapshot") return "file-history-snapshot";
|
||||
if (type === "summary") return "summary";
|
||||
if (type === "system") return "system";
|
||||
if (type === "queue-operation") return "queue-operation";
|
||||
if (type === "user" || parsed.message?.role === "user") return "user";
|
||||
if (type === "assistant" || parsed.message?.role === "assistant") return "assistant";
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
export function countMessagesForLine(parsed: RawLine): number {
|
||||
const classification = classifyLine(parsed);
|
||||
|
||||
switch (classification) {
|
||||
case "progress":
|
||||
case "file-history-snapshot":
|
||||
case "summary":
|
||||
return 1;
|
||||
|
||||
case "system":
|
||||
case "queue-operation":
|
||||
case "unknown":
|
||||
return 0;
|
||||
|
||||
case "user": {
|
||||
const content = parsed.message?.content;
|
||||
if (content === undefined || content === null) return 0;
|
||||
if (typeof content === "string") return 1;
|
||||
if (Array.isArray(content)) {
|
||||
return content.filter(
|
||||
(b: ContentBlock) => b.type === "tool_result" || b.type === "text"
|
||||
).length;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
case "assistant": {
|
||||
const content = parsed.message?.content;
|
||||
if (content === undefined || content === null) return 0;
|
||||
if (typeof content === "string") return 1;
|
||||
if (Array.isArray(content)) {
|
||||
return content.filter(
|
||||
(b: ContentBlock) =>
|
||||
b.type === "thinking" || b.type === "text" || b.type === "tool_use"
|
||||
).length;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function parseSession(
|
||||
filePath: string
|
||||
): Promise<ParsedMessage[]> {
|
||||
@@ -58,31 +146,23 @@ export async function parseSession(
|
||||
|
||||
export function parseSessionContent(content: string): ParsedMessage[] {
|
||||
const messages: ParsedMessage[] = [];
|
||||
const lines = content.split("\n").filter((l) => l.trim());
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let parsed: RawLine;
|
||||
try {
|
||||
parsed = JSON.parse(lines[i]);
|
||||
} catch {
|
||||
continue; // Skip malformed lines
|
||||
}
|
||||
|
||||
const extracted = extractMessages(parsed, i);
|
||||
forEachJsonlLine(content, (parsed, lineIndex) => {
|
||||
const extracted = extractMessages(parsed, lineIndex);
|
||||
messages.push(...extracted);
|
||||
}
|
||||
});
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
function extractMessages(raw: RawLine, rawIndex: number): ParsedMessage[] {
|
||||
const messages: ParsedMessage[] = [];
|
||||
const type = raw.type;
|
||||
const classification = classifyLine(raw);
|
||||
const uuid = raw.uuid || `generated-${rawIndex}`;
|
||||
const timestamp = raw.timestamp;
|
||||
|
||||
// Progress/hook messages - content is in `data`, not `content`
|
||||
if (type === "progress") {
|
||||
if (classification === "progress") {
|
||||
const data = raw.data;
|
||||
const progressText = data
|
||||
? formatProgressData(data)
|
||||
@@ -102,7 +182,7 @@ function extractMessages(raw: RawLine, rawIndex: number): ParsedMessage[] {
|
||||
}
|
||||
|
||||
// File history snapshot
|
||||
if (type === "file-history-snapshot") {
|
||||
if (classification === "file-history-snapshot") {
|
||||
messages.push({
|
||||
uuid,
|
||||
category: "file_snapshot",
|
||||
@@ -114,7 +194,7 @@ function extractMessages(raw: RawLine, rawIndex: number): ParsedMessage[] {
|
||||
}
|
||||
|
||||
// Summary message - text is in `summary` field, not `content`
|
||||
if (type === "summary") {
|
||||
if (classification === "summary") {
|
||||
messages.push({
|
||||
uuid,
|
||||
category: "summary",
|
||||
@@ -126,7 +206,7 @@ function extractMessages(raw: RawLine, rawIndex: number): ParsedMessage[] {
|
||||
}
|
||||
|
||||
// System metadata (turn_duration etc.) - skip, not user-facing
|
||||
if (type === "system" || type === "queue-operation") {
|
||||
if (classification === "system" || classification === "queue-operation") {
|
||||
return messages;
|
||||
}
|
||||
|
||||
@@ -134,7 +214,7 @@ function extractMessages(raw: RawLine, rawIndex: number): ParsedMessage[] {
|
||||
const role = raw.message?.role;
|
||||
const content = raw.message?.content;
|
||||
|
||||
if ((type === "user" || role === "user") && content !== undefined) {
|
||||
if (classification === "user" && content !== undefined) {
|
||||
if (typeof content === "string") {
|
||||
const category = detectSystemReminder(content)
|
||||
? "system_message"
|
||||
@@ -183,7 +263,7 @@ function extractMessages(raw: RawLine, rawIndex: number): ParsedMessage[] {
|
||||
return messages;
|
||||
}
|
||||
|
||||
if ((type === "assistant" || role === "assistant") && content !== undefined) {
|
||||
if (classification === "assistant" && content !== undefined) {
|
||||
if (typeof content === "string") {
|
||||
messages.push({
|
||||
uuid,
|
||||
|
||||
@@ -41,7 +41,7 @@ export default {
|
||||
},
|
||||
fontFamily: {
|
||||
sans: [
|
||||
"Inter",
|
||||
"Geist",
|
||||
"system-ui",
|
||||
"-apple-system",
|
||||
"BlinkMacSystemFont",
|
||||
|
||||
174
tests/unit/metadata-cache.test.ts
Normal file
174
tests/unit/metadata-cache.test.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
import { MetadataCache } from "../../src/server/services/metadata-cache.js";
|
||||
import type { CacheEntry } from "../../src/server/services/metadata-cache.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
|
||||
function makeCacheEntry(overrides: Partial<CacheEntry> = {}): CacheEntry {
|
||||
return {
|
||||
mtimeMs: 1700000000000,
|
||||
size: 1024,
|
||||
messageCount: 5,
|
||||
firstPrompt: "Hello",
|
||||
summary: "Session summary",
|
||||
firstTimestamp: "2025-01-01T10:00:00Z",
|
||||
lastTimestamp: "2025-01-01T11:00:00Z",
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("MetadataCache", () => {
|
||||
let tmpDir: string;
|
||||
let cachePath: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = path.join(os.tmpdir(), `sv-cache-test-${Date.now()}`);
|
||||
await fs.mkdir(tmpDir, { recursive: true });
|
||||
cachePath = path.join(tmpDir, "metadata.json");
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("returns null for unknown file path", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
expect(cache.get("/unknown/path.jsonl", 1000, 500)).toBeNull();
|
||||
});
|
||||
|
||||
it("returns entry when mtimeMs and size match", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
|
||||
const entry = makeCacheEntry({ mtimeMs: 1000, size: 500 });
|
||||
cache.set("/test/session.jsonl", entry);
|
||||
|
||||
const result = cache.get("/test/session.jsonl", 1000, 500);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.messageCount).toBe(5);
|
||||
expect(result!.firstPrompt).toBe("Hello");
|
||||
});
|
||||
|
||||
it("returns null when mtimeMs differs", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
|
||||
const entry = makeCacheEntry({ mtimeMs: 1000, size: 500 });
|
||||
cache.set("/test/session.jsonl", entry);
|
||||
|
||||
expect(cache.get("/test/session.jsonl", 2000, 500)).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null when size differs", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
|
||||
const entry = makeCacheEntry({ mtimeMs: 1000, size: 500 });
|
||||
cache.set("/test/session.jsonl", entry);
|
||||
|
||||
expect(cache.get("/test/session.jsonl", 1000, 999)).toBeNull();
|
||||
});
|
||||
|
||||
it("save is no-op when not dirty", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
await cache.save();
|
||||
|
||||
// File should not exist since nothing was set
|
||||
await expect(fs.access(cachePath)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("save writes to disk when dirty", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
|
||||
cache.set("/test/session.jsonl", makeCacheEntry());
|
||||
await cache.save();
|
||||
|
||||
const raw = await fs.readFile(cachePath, "utf-8");
|
||||
const parsed = JSON.parse(raw);
|
||||
expect(parsed.version).toBe(1);
|
||||
expect(parsed.entries["/test/session.jsonl"]).toBeDefined();
|
||||
expect(parsed.entries["/test/session.jsonl"].messageCount).toBe(5);
|
||||
});
|
||||
|
||||
it("save prunes entries not in existingPaths", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
|
||||
cache.set("/test/a.jsonl", makeCacheEntry());
|
||||
cache.set("/test/b.jsonl", makeCacheEntry());
|
||||
cache.set("/test/c.jsonl", makeCacheEntry());
|
||||
|
||||
const existingPaths = new Set(["/test/a.jsonl", "/test/c.jsonl"]);
|
||||
await cache.save(existingPaths);
|
||||
|
||||
const raw = await fs.readFile(cachePath, "utf-8");
|
||||
const parsed = JSON.parse(raw);
|
||||
expect(Object.keys(parsed.entries)).toHaveLength(2);
|
||||
expect(parsed.entries["/test/b.jsonl"]).toBeUndefined();
|
||||
});
|
||||
|
||||
it("load handles missing cache file", async () => {
|
||||
const cache = new MetadataCache(
|
||||
path.join(tmpDir, "nonexistent", "cache.json")
|
||||
);
|
||||
await cache.load();
|
||||
expect(cache.get("/test/session.jsonl", 1000, 500)).toBeNull();
|
||||
});
|
||||
|
||||
it("load handles corrupt cache file", async () => {
|
||||
await fs.writeFile(cachePath, "not valid json {{{");
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
expect(cache.get("/test/session.jsonl", 1000, 500)).toBeNull();
|
||||
});
|
||||
|
||||
it("persists and reloads across instances", async () => {
|
||||
const cache1 = new MetadataCache(cachePath);
|
||||
await cache1.load();
|
||||
cache1.set("/test/session.jsonl", makeCacheEntry({ mtimeMs: 42, size: 100 }));
|
||||
await cache1.save();
|
||||
|
||||
const cache2 = new MetadataCache(cachePath);
|
||||
await cache2.load();
|
||||
const result = cache2.get("/test/session.jsonl", 42, 100);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.messageCount).toBe(5);
|
||||
});
|
||||
|
||||
it("isDirty returns false initially, true after set", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
expect(cache.isDirty()).toBe(false);
|
||||
|
||||
cache.set("/test/session.jsonl", makeCacheEntry());
|
||||
expect(cache.isDirty()).toBe(true);
|
||||
});
|
||||
|
||||
it("isDirty resets to false after save", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
cache.set("/test/session.jsonl", makeCacheEntry());
|
||||
expect(cache.isDirty()).toBe(true);
|
||||
|
||||
await cache.save();
|
||||
expect(cache.isDirty()).toBe(false);
|
||||
});
|
||||
|
||||
it("flush writes without pruning", async () => {
|
||||
const cache = new MetadataCache(cachePath);
|
||||
await cache.load();
|
||||
|
||||
cache.set("/test/a.jsonl", makeCacheEntry());
|
||||
cache.set("/test/b.jsonl", makeCacheEntry());
|
||||
await cache.flush();
|
||||
|
||||
const raw = await fs.readFile(cachePath, "utf-8");
|
||||
const parsed = JSON.parse(raw);
|
||||
// Both should be present (no pruning on flush)
|
||||
expect(Object.keys(parsed.entries)).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
@@ -1,70 +1,122 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { discoverSessions } from "../../src/server/services/session-discovery.js";
|
||||
import { describe, it, expect, beforeEach } from "vitest";
|
||||
import { discoverSessions, setCache } from "../../src/server/services/session-discovery.js";
|
||||
import { MetadataCache } from "../../src/server/services/metadata-cache.js";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import os from "os";
|
||||
|
||||
/** Helper to write a sessions-index.json in the real { version, entries } format */
|
||||
function makeIndex(entries: Record<string, unknown>[]) {
|
||||
function makeJsonlContent(lines: Record<string, unknown>[]): string {
|
||||
return lines.map((l) => JSON.stringify(l)).join("\n");
|
||||
}
|
||||
|
||||
function makeIndex(entries: Record<string, unknown>[]): string {
|
||||
return JSON.stringify({ version: 1, entries });
|
||||
}
|
||||
|
||||
describe("session-discovery", () => {
|
||||
it("discovers sessions from { version, entries } format", async () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `sv-test-${Date.now()}`);
|
||||
async function makeTmpProject(
|
||||
suffix: string
|
||||
): Promise<{ tmpDir: string; projectDir: string; cachePath: string; cleanup: () => Promise<void> }> {
|
||||
const tmpDir = path.join(os.tmpdir(), `sv-test-${suffix}-${Date.now()}`);
|
||||
const projectDir = path.join(tmpDir, "test-project");
|
||||
const cachePath = path.join(tmpDir, ".cache", "metadata.json");
|
||||
await fs.mkdir(projectDir, { recursive: true });
|
||||
return {
|
||||
tmpDir,
|
||||
projectDir,
|
||||
cachePath,
|
||||
cleanup: () => fs.rm(tmpDir, { recursive: true }),
|
||||
};
|
||||
}
|
||||
|
||||
const sessionPath = path.join(projectDir, "sess-001.jsonl");
|
||||
await fs.writeFile(
|
||||
path.join(projectDir, "sessions-index.json"),
|
||||
makeIndex([
|
||||
describe("session-discovery", () => {
|
||||
beforeEach(() => {
|
||||
// Reset global cache between tests to prevent cross-contamination
|
||||
setCache(new MetadataCache(path.join(os.tmpdir(), `sv-cache-${Date.now()}.json`)));
|
||||
});
|
||||
|
||||
it("discovers sessions from .jsonl files without index", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("no-index");
|
||||
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
sessionId: "sess-001",
|
||||
fullPath: sessionPath,
|
||||
summary: "Test session",
|
||||
firstPrompt: "Hello",
|
||||
created: "2025-10-15T10:00:00Z",
|
||||
modified: "2025-10-15T11:00:00Z",
|
||||
messageCount: 5,
|
||||
type: "user",
|
||||
message: { role: "user", content: "Hello world" },
|
||||
uuid: "u-1",
|
||||
timestamp: "2025-10-15T10:00:00Z",
|
||||
},
|
||||
])
|
||||
);
|
||||
{
|
||||
type: "assistant",
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "Hi there" }],
|
||||
},
|
||||
uuid: "a-1",
|
||||
timestamp: "2025-10-15T10:01:00Z",
|
||||
},
|
||||
]);
|
||||
|
||||
await fs.writeFile(path.join(projectDir, "sess-001.jsonl"), content);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
expect(sessions[0].id).toBe("sess-001");
|
||||
expect(sessions[0].summary).toBe("Test session");
|
||||
expect(sessions[0].project).toBe("test-project");
|
||||
expect(sessions[0].messageCount).toBe(5);
|
||||
expect(sessions[0].path).toBe(sessionPath);
|
||||
expect(sessions[0].messageCount).toBe(2);
|
||||
expect(sessions[0].firstPrompt).toBe("Hello world");
|
||||
expect(sessions[0].path).toBe(path.join(projectDir, "sess-001.jsonl"));
|
||||
|
||||
await fs.rm(tmpDir, { recursive: true });
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("also handles legacy raw array format", async () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `sv-test-legacy-${Date.now()}`);
|
||||
const projectDir = path.join(tmpDir, "legacy-project");
|
||||
await fs.mkdir(projectDir, { recursive: true });
|
||||
it("timestamps come from stat, not JSONL content", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("stat-times");
|
||||
|
||||
// Raw array (not wrapped in { version, entries })
|
||||
await fs.writeFile(
|
||||
path.join(projectDir, "sessions-index.json"),
|
||||
JSON.stringify([
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
sessionId: "legacy-001",
|
||||
summary: "Legacy format",
|
||||
created: "2025-10-15T10:00:00Z",
|
||||
modified: "2025-10-15T11:00:00Z",
|
||||
type: "user",
|
||||
message: { role: "user", content: "Hello" },
|
||||
uuid: "u-1",
|
||||
timestamp: "2020-01-01T00:00:00Z",
|
||||
},
|
||||
])
|
||||
);
|
||||
]);
|
||||
|
||||
const filePath = path.join(projectDir, "sess-stat.jsonl");
|
||||
await fs.writeFile(filePath, content);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
expect(sessions[0].id).toBe("legacy-001");
|
||||
|
||||
await fs.rm(tmpDir, { recursive: true });
|
||||
// created and modified should be from stat (recent), not from the 2020 timestamp
|
||||
const createdDate = new Date(sessions[0].created);
|
||||
const now = new Date();
|
||||
const diffMs = now.getTime() - createdDate.getTime();
|
||||
expect(diffMs).toBeLessThan(60_000); // within last minute
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("silently skips files deleted between readdir and stat", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("toctou");
|
||||
|
||||
// Write a session, discover will find it
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Survives" },
|
||||
uuid: "u-1",
|
||||
},
|
||||
]);
|
||||
await fs.writeFile(path.join(projectDir, "survivor.jsonl"), content);
|
||||
|
||||
// Write and immediately delete another
|
||||
await fs.writeFile(path.join(projectDir, "ghost.jsonl"), content);
|
||||
await fs.unlink(path.join(projectDir, "ghost.jsonl"));
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
expect(sessions[0].id).toBe("survivor");
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("handles missing projects directory gracefully", async () => {
|
||||
@@ -72,21 +124,6 @@ describe("session-discovery", () => {
|
||||
expect(sessions).toEqual([]);
|
||||
});
|
||||
|
||||
it("handles corrupt index files gracefully", async () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `sv-test-corrupt-${Date.now()}`);
|
||||
const projectDir = path.join(tmpDir, "corrupt-project");
|
||||
await fs.mkdir(projectDir, { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(projectDir, "sessions-index.json"),
|
||||
"not valid json {"
|
||||
);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toEqual([]);
|
||||
|
||||
await fs.rm(tmpDir, { recursive: true });
|
||||
});
|
||||
|
||||
it("aggregates across multiple project directories", async () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `sv-test-multi-${Date.now()}`);
|
||||
const proj1 = path.join(tmpDir, "project-a");
|
||||
@@ -94,14 +131,25 @@ describe("session-discovery", () => {
|
||||
await fs.mkdir(proj1, { recursive: true });
|
||||
await fs.mkdir(proj2, { recursive: true });
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(proj1, "sessions-index.json"),
|
||||
makeIndex([{ sessionId: "a-001", created: "2025-01-01T00:00:00Z", modified: "2025-01-01T00:00:00Z" }])
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(proj2, "sessions-index.json"),
|
||||
makeIndex([{ sessionId: "b-001", created: "2025-01-02T00:00:00Z", modified: "2025-01-02T00:00:00Z" }])
|
||||
);
|
||||
const contentA = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Project A" },
|
||||
uuid: "u-a",
|
||||
timestamp: "2025-01-01T00:00:00Z",
|
||||
},
|
||||
]);
|
||||
const contentB = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Project B" },
|
||||
uuid: "u-b",
|
||||
timestamp: "2025-01-02T00:00:00Z",
|
||||
},
|
||||
]);
|
||||
|
||||
await fs.writeFile(path.join(proj1, "a-001.jsonl"), contentA);
|
||||
await fs.writeFile(path.join(proj2, "b-001.jsonl"), contentB);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(2);
|
||||
@@ -112,93 +160,299 @@ describe("session-discovery", () => {
|
||||
await fs.rm(tmpDir, { recursive: true });
|
||||
});
|
||||
|
||||
it("rejects paths with traversal segments", async () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `sv-test-traversal-${Date.now()}`);
|
||||
const projectDir = path.join(tmpDir, "traversal-project");
|
||||
await fs.mkdir(projectDir, { recursive: true });
|
||||
it("ignores non-.jsonl files in project directories", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("filter-ext");
|
||||
|
||||
const goodPath = path.join(projectDir, "good-001.jsonl");
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Hello" },
|
||||
uuid: "u-1",
|
||||
},
|
||||
]);
|
||||
|
||||
await fs.writeFile(path.join(projectDir, "session.jsonl"), content);
|
||||
await fs.writeFile(
|
||||
path.join(projectDir, "sessions-index.json"),
|
||||
'{"version":1,"entries":[]}'
|
||||
);
|
||||
await fs.writeFile(path.join(projectDir, "notes.txt"), "notes");
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
expect(sessions[0].id).toBe("session");
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("duration computed from JSONL timestamps", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("duration");
|
||||
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Start" },
|
||||
uuid: "u-1",
|
||||
timestamp: "2025-10-15T10:00:00Z",
|
||||
},
|
||||
{
|
||||
type: "assistant",
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "End" }],
|
||||
},
|
||||
uuid: "a-1",
|
||||
timestamp: "2025-10-15T10:30:00Z",
|
||||
},
|
||||
]);
|
||||
|
||||
await fs.writeFile(path.join(projectDir, "sess-dur.jsonl"), content);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
// 30 minutes = 1800000 ms
|
||||
expect(sessions[0].duration).toBe(1_800_000);
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("handles empty .jsonl files", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("empty");
|
||||
|
||||
await fs.writeFile(path.join(projectDir, "empty.jsonl"), "");
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
expect(sessions[0].id).toBe("empty");
|
||||
expect(sessions[0].messageCount).toBe(0);
|
||||
expect(sessions[0].firstPrompt).toBe("");
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("sorts by modified descending", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("sort");
|
||||
|
||||
const content1 = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "First" },
|
||||
uuid: "u-1",
|
||||
},
|
||||
]);
|
||||
const content2 = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Second" },
|
||||
uuid: "u-2",
|
||||
},
|
||||
]);
|
||||
|
||||
await fs.writeFile(path.join(projectDir, "older.jsonl"), content1);
|
||||
// Small delay to ensure different mtime
|
||||
await new Promise((r) => setTimeout(r, 50));
|
||||
await fs.writeFile(path.join(projectDir, "newer.jsonl"), content2);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(2);
|
||||
expect(sessions[0].id).toBe("newer");
|
||||
expect(sessions[1].id).toBe("older");
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
describe("Tier 1 index validation", () => {
|
||||
it("uses index data when modified matches stat mtime within 1s", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("tier1-hit");
|
||||
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Hello" },
|
||||
uuid: "u-1",
|
||||
timestamp: "2025-10-15T10:00:00Z",
|
||||
},
|
||||
]);
|
||||
const filePath = path.join(projectDir, "sess-idx.jsonl");
|
||||
await fs.writeFile(filePath, content);
|
||||
|
||||
// Get the actual mtime from the file
|
||||
const stat = await fs.stat(filePath);
|
||||
const mtimeIso = new Date(stat.mtimeMs).toISOString();
|
||||
|
||||
// Write an index with the matching modified timestamp and different metadata
|
||||
await fs.writeFile(
|
||||
path.join(projectDir, "sessions-index.json"),
|
||||
makeIndex([
|
||||
{
|
||||
sessionId: "evil-001",
|
||||
fullPath: "/home/ubuntu/../../../etc/passwd",
|
||||
created: "2025-10-15T10:00:00Z",
|
||||
modified: "2025-10-15T11:00:00Z",
|
||||
},
|
||||
{
|
||||
sessionId: "evil-002",
|
||||
fullPath: "/home/ubuntu/sessions/not-a-jsonl.txt",
|
||||
created: "2025-10-15T10:00:00Z",
|
||||
modified: "2025-10-15T11:00:00Z",
|
||||
},
|
||||
{
|
||||
sessionId: "good-001",
|
||||
fullPath: goodPath,
|
||||
created: "2025-10-15T10:00:00Z",
|
||||
modified: "2025-10-15T11:00:00Z",
|
||||
sessionId: "sess-idx",
|
||||
summary: "Index summary",
|
||||
firstPrompt: "Index prompt",
|
||||
messageCount: 99,
|
||||
modified: mtimeIso,
|
||||
created: "2025-10-15T09:00:00Z",
|
||||
},
|
||||
])
|
||||
);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
expect(sessions[0].id).toBe("good-001");
|
||||
// Should use index data (Tier 1 hit)
|
||||
expect(sessions[0].messageCount).toBe(99);
|
||||
expect(sessions[0].summary).toBe("Index summary");
|
||||
expect(sessions[0].firstPrompt).toBe("Index prompt");
|
||||
|
||||
await fs.rm(tmpDir, { recursive: true });
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("rejects absolute paths outside the projects directory", async () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `sv-test-containment-${Date.now()}`);
|
||||
const projectDir = path.join(tmpDir, "contained-project");
|
||||
await fs.mkdir(projectDir, { recursive: true });
|
||||
it("rejects index data when mtime mismatch > 1s", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("tier1-miss");
|
||||
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Real content" },
|
||||
uuid: "u-1",
|
||||
timestamp: "2025-10-15T10:00:00Z",
|
||||
},
|
||||
]);
|
||||
await fs.writeFile(path.join(projectDir, "sess-stale.jsonl"), content);
|
||||
|
||||
// Write an index with a very old modified timestamp (stale)
|
||||
await fs.writeFile(
|
||||
path.join(projectDir, "sessions-index.json"),
|
||||
makeIndex([
|
||||
{
|
||||
sessionId: "escaped-001",
|
||||
fullPath: "/etc/shadow.jsonl",
|
||||
created: "2025-10-15T10:00:00Z",
|
||||
modified: "2025-10-15T11:00:00Z",
|
||||
},
|
||||
{
|
||||
sessionId: "escaped-002",
|
||||
fullPath: "/tmp/other-dir/secret.jsonl",
|
||||
created: "2025-10-15T10:00:00Z",
|
||||
modified: "2025-10-15T11:00:00Z",
|
||||
sessionId: "sess-stale",
|
||||
summary: "Stale index summary",
|
||||
firstPrompt: "Stale prompt",
|
||||
messageCount: 99,
|
||||
modified: "2020-01-01T00:00:00Z",
|
||||
created: "2020-01-01T00:00:00Z",
|
||||
},
|
||||
])
|
||||
);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(0);
|
||||
expect(sessions).toHaveLength(1);
|
||||
// Should NOT use index data (Tier 1 miss) — falls through to Tier 3
|
||||
expect(sessions[0].messageCount).toBe(1); // Actual parse count
|
||||
expect(sessions[0].firstPrompt).toBe("Real content");
|
||||
|
||||
await fs.rm(tmpDir, { recursive: true });
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("uses fullPath from index entry", async () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `sv-test-fp-${Date.now()}`);
|
||||
const projectDir = path.join(tmpDir, "fp-project");
|
||||
await fs.mkdir(projectDir, { recursive: true });
|
||||
it("skips Tier 1 when entry has no modified field", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("tier1-no-mod");
|
||||
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Real content" },
|
||||
uuid: "u-1",
|
||||
},
|
||||
]);
|
||||
await fs.writeFile(path.join(projectDir, "sess-nomod.jsonl"), content);
|
||||
|
||||
const sessionPath = path.join(projectDir, "fp-001.jsonl");
|
||||
await fs.writeFile(
|
||||
path.join(projectDir, "sessions-index.json"),
|
||||
makeIndex([
|
||||
{
|
||||
sessionId: "fp-001",
|
||||
fullPath: sessionPath,
|
||||
created: "2025-10-15T10:00:00Z",
|
||||
modified: "2025-10-15T11:00:00Z",
|
||||
sessionId: "sess-nomod",
|
||||
summary: "Index summary",
|
||||
messageCount: 99,
|
||||
// No modified field
|
||||
},
|
||||
])
|
||||
);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions[0].path).toBe(sessionPath);
|
||||
expect(sessions).toHaveLength(1);
|
||||
// Falls through to Tier 3 parse
|
||||
expect(sessions[0].messageCount).toBe(1);
|
||||
|
||||
await fs.rm(tmpDir, { recursive: true });
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("handles missing sessions-index.json", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("tier1-missing");
|
||||
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "No index" },
|
||||
uuid: "u-1",
|
||||
},
|
||||
]);
|
||||
await fs.writeFile(path.join(projectDir, "sess-noindex.jsonl"), content);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
expect(sessions[0].firstPrompt).toBe("No index");
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("handles corrupt sessions-index.json", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("tier1-corrupt");
|
||||
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Corrupt index" },
|
||||
uuid: "u-1",
|
||||
},
|
||||
]);
|
||||
await fs.writeFile(path.join(projectDir, "sess-corrupt.jsonl"), content);
|
||||
await fs.writeFile(
|
||||
path.join(projectDir, "sessions-index.json"),
|
||||
"not valid json {"
|
||||
);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
expect(sessions[0].firstPrompt).toBe("Corrupt index");
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it("timestamps always from stat even on Tier 1 hit", async () => {
|
||||
const { tmpDir, projectDir, cleanup } = await makeTmpProject("tier1-stat-ts");
|
||||
|
||||
const content = makeJsonlContent([
|
||||
{
|
||||
type: "user",
|
||||
message: { role: "user", content: "Hello" },
|
||||
uuid: "u-1",
|
||||
},
|
||||
]);
|
||||
const filePath = path.join(projectDir, "sess-ts.jsonl");
|
||||
await fs.writeFile(filePath, content);
|
||||
|
||||
const stat = await fs.stat(filePath);
|
||||
const mtimeIso = new Date(stat.mtimeMs).toISOString();
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(projectDir, "sessions-index.json"),
|
||||
makeIndex([
|
||||
{
|
||||
sessionId: "sess-ts",
|
||||
messageCount: 1,
|
||||
modified: mtimeIso,
|
||||
created: "1990-01-01T00:00:00Z",
|
||||
},
|
||||
])
|
||||
);
|
||||
|
||||
const sessions = await discoverSessions(tmpDir);
|
||||
expect(sessions).toHaveLength(1);
|
||||
|
||||
// created/modified should be from stat (recent), not from index's 1990 date
|
||||
const createdDate = new Date(sessions[0].created);
|
||||
const now = new Date();
|
||||
expect(now.getTime() - createdDate.getTime()).toBeLessThan(60_000);
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
192
tests/unit/session-metadata.test.ts
Normal file
192
tests/unit/session-metadata.test.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { extractSessionMetadata } from "../../src/server/services/session-metadata.js";
|
||||
import { parseSessionContent } from "../../src/server/services/session-parser.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
|
||||
describe("session-metadata", () => {
|
||||
it("messageCount matches parseSessionContent on sample-session.jsonl", async () => {
|
||||
const fixturePath = path.join(__dirname, "../fixtures/sample-session.jsonl");
|
||||
const content = await fs.readFile(fixturePath, "utf-8");
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
const parsed = parseSessionContent(content);
|
||||
|
||||
expect(meta.messageCount).toBe(parsed.length);
|
||||
});
|
||||
|
||||
it("messageCount matches parseSessionContent on edge-cases.jsonl", async () => {
|
||||
const fixturePath = path.join(__dirname, "../fixtures/edge-cases.jsonl");
|
||||
const content = await fs.readFile(fixturePath, "utf-8");
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
const parsed = parseSessionContent(content);
|
||||
|
||||
expect(meta.messageCount).toBe(parsed.length);
|
||||
});
|
||||
|
||||
it("firstPrompt skips system-reminder messages", () => {
|
||||
const content = [
|
||||
JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: "<system-reminder>hook output</system-reminder>" },
|
||||
uuid: "u-sr",
|
||||
timestamp: "2025-01-01T00:00:00Z",
|
||||
}),
|
||||
JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: "What is the project structure?" },
|
||||
uuid: "u-real",
|
||||
timestamp: "2025-01-01T00:00:01Z",
|
||||
}),
|
||||
].join("\n");
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
expect(meta.firstPrompt).toBe("What is the project structure?");
|
||||
});
|
||||
|
||||
it("firstPrompt truncated to 200 chars", () => {
|
||||
const longMessage = "a".repeat(300);
|
||||
const content = JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: longMessage },
|
||||
uuid: "u-long",
|
||||
timestamp: "2025-01-01T00:00:00Z",
|
||||
});
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
expect(meta.firstPrompt).toHaveLength(200);
|
||||
expect(meta.firstPrompt).toBe("a".repeat(200));
|
||||
});
|
||||
|
||||
it("summary captures the LAST summary line", () => {
|
||||
const content = [
|
||||
JSON.stringify({ type: "summary", summary: "First summary", uuid: "s-1" }),
|
||||
JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: "Hello" },
|
||||
uuid: "u-1",
|
||||
}),
|
||||
JSON.stringify({ type: "summary", summary: "Last summary", uuid: "s-2" }),
|
||||
].join("\n");
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
expect(meta.summary).toBe("Last summary");
|
||||
});
|
||||
|
||||
it("timestamps captured from first and last lines with timestamps", () => {
|
||||
const content = [
|
||||
JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: "Hello" },
|
||||
uuid: "u-1",
|
||||
timestamp: "2025-01-01T10:00:00Z",
|
||||
}),
|
||||
JSON.stringify({
|
||||
type: "assistant",
|
||||
message: { role: "assistant", content: "Hi" },
|
||||
uuid: "a-1",
|
||||
timestamp: "2025-01-01T10:05:00Z",
|
||||
}),
|
||||
JSON.stringify({
|
||||
type: "summary",
|
||||
summary: "Session done",
|
||||
uuid: "s-1",
|
||||
}),
|
||||
].join("\n");
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
expect(meta.firstTimestamp).toBe("2025-01-01T10:00:00Z");
|
||||
expect(meta.lastTimestamp).toBe("2025-01-01T10:05:00Z");
|
||||
});
|
||||
|
||||
it("empty content returns zero counts and empty strings", () => {
|
||||
const meta = extractSessionMetadata("");
|
||||
expect(meta.messageCount).toBe(0);
|
||||
expect(meta.firstPrompt).toBe("");
|
||||
expect(meta.summary).toBe("");
|
||||
expect(meta.firstTimestamp).toBe("");
|
||||
expect(meta.lastTimestamp).toBe("");
|
||||
expect(meta.parseErrors).toBe(0);
|
||||
});
|
||||
|
||||
it("JSONL with no user messages returns empty firstPrompt", () => {
|
||||
const content = [
|
||||
JSON.stringify({ type: "summary", summary: "No user", uuid: "s-1" }),
|
||||
JSON.stringify({ type: "progress", data: { type: "hook" }, uuid: "p-1" }),
|
||||
].join("\n");
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
expect(meta.firstPrompt).toBe("");
|
||||
});
|
||||
|
||||
it("JSONL with all system-reminder users returns empty firstPrompt", () => {
|
||||
const content = [
|
||||
JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: "<system-reminder>r1</system-reminder>" },
|
||||
uuid: "u-1",
|
||||
}),
|
||||
JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: "<system-reminder>r2</system-reminder>" },
|
||||
uuid: "u-2",
|
||||
}),
|
||||
].join("\n");
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
expect(meta.firstPrompt).toBe("");
|
||||
});
|
||||
|
||||
it("single-line JSONL: firstTimestamp equals lastTimestamp", () => {
|
||||
const content = JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: "solo" },
|
||||
uuid: "u-solo",
|
||||
timestamp: "2025-06-15T12:00:00Z",
|
||||
});
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
expect(meta.firstTimestamp).toBe("2025-06-15T12:00:00Z");
|
||||
expect(meta.lastTimestamp).toBe("2025-06-15T12:00:00Z");
|
||||
});
|
||||
|
||||
it("reports parseErrors from malformed lines", () => {
|
||||
const content = [
|
||||
"broken json",
|
||||
JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: "ok" },
|
||||
uuid: "u-1",
|
||||
}),
|
||||
"{truncated",
|
||||
].join("\n");
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
expect(meta.parseErrors).toBe(2);
|
||||
expect(meta.messageCount).toBe(1);
|
||||
});
|
||||
|
||||
it("skips array user content for firstPrompt (only captures string content)", () => {
|
||||
const content = [
|
||||
JSON.stringify({
|
||||
type: "user",
|
||||
message: {
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "tool_result", tool_use_id: "t1", content: "result" },
|
||||
],
|
||||
},
|
||||
uuid: "u-arr",
|
||||
}),
|
||||
JSON.stringify({
|
||||
type: "user",
|
||||
message: { role: "user", content: "Second prompt as string" },
|
||||
uuid: "u-str",
|
||||
}),
|
||||
].join("\n");
|
||||
|
||||
const meta = extractSessionMetadata(content);
|
||||
expect(meta.firstPrompt).toBe("Second prompt as string");
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,11 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { parseSessionContent } from "../../src/server/services/session-parser.js";
|
||||
import {
|
||||
parseSessionContent,
|
||||
forEachJsonlLine,
|
||||
classifyLine,
|
||||
countMessagesForLine,
|
||||
} from "../../src/server/services/session-parser.js";
|
||||
import type { RawLine } from "../../src/server/services/session-parser.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
|
||||
@@ -319,4 +325,255 @@ describe("session-parser", () => {
|
||||
const msgs = parseSessionContent(line);
|
||||
expect(msgs[0].progressSubtype).toBe("hook");
|
||||
});
|
||||
|
||||
describe("forEachJsonlLine", () => {
|
||||
it("skips malformed JSON lines and reports parseErrors count", () => {
|
||||
const content = [
|
||||
"not valid json",
|
||||
JSON.stringify({ type: "user", message: { role: "user", content: "Hello" } }),
|
||||
"{broken}",
|
||||
].join("\n");
|
||||
|
||||
const lines: RawLine[] = [];
|
||||
const result = forEachJsonlLine(content, (parsed) => {
|
||||
lines.push(parsed);
|
||||
});
|
||||
|
||||
expect(lines).toHaveLength(1);
|
||||
expect(result.parseErrors).toBe(2);
|
||||
});
|
||||
|
||||
it("skips empty and whitespace-only lines without incrementing parseErrors", () => {
|
||||
const content = [
|
||||
"",
|
||||
" ",
|
||||
JSON.stringify({ type: "summary", summary: "test" }),
|
||||
"\t",
|
||||
"",
|
||||
].join("\n");
|
||||
|
||||
const lines: RawLine[] = [];
|
||||
const result = forEachJsonlLine(content, (parsed) => {
|
||||
lines.push(parsed);
|
||||
});
|
||||
|
||||
expect(lines).toHaveLength(1);
|
||||
expect(result.parseErrors).toBe(0);
|
||||
});
|
||||
|
||||
it("returns parseErrors 0 for empty content", () => {
|
||||
const lines: RawLine[] = [];
|
||||
const result = forEachJsonlLine("", (parsed) => {
|
||||
lines.push(parsed);
|
||||
});
|
||||
|
||||
expect(lines).toHaveLength(0);
|
||||
expect(result.parseErrors).toBe(0);
|
||||
});
|
||||
|
||||
it("processes content without trailing newline", () => {
|
||||
const content = JSON.stringify({ type: "summary", summary: "no trailing newline" });
|
||||
|
||||
const lines: RawLine[] = [];
|
||||
forEachJsonlLine(content, (parsed) => {
|
||||
lines.push(parsed);
|
||||
});
|
||||
|
||||
expect(lines).toHaveLength(1);
|
||||
expect(lines[0].summary).toBe("no trailing newline");
|
||||
});
|
||||
|
||||
it("passes correct lineIndex to callback", () => {
|
||||
const content = [
|
||||
JSON.stringify({ type: "user", message: { role: "user", content: "first" } }),
|
||||
"",
|
||||
JSON.stringify({ type: "summary", summary: "third" }),
|
||||
].join("\n");
|
||||
|
||||
const indices: number[] = [];
|
||||
forEachJsonlLine(content, (_parsed, lineIndex) => {
|
||||
indices.push(lineIndex);
|
||||
});
|
||||
|
||||
expect(indices).toEqual([0, 2]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("classifyLine", () => {
|
||||
it("returns correct classification for each type", () => {
|
||||
expect(classifyLine({ type: "progress" })).toBe("progress");
|
||||
expect(classifyLine({ type: "file-history-snapshot" })).toBe("file-history-snapshot");
|
||||
expect(classifyLine({ type: "summary" })).toBe("summary");
|
||||
expect(classifyLine({ type: "system" })).toBe("system");
|
||||
expect(classifyLine({ type: "queue-operation" })).toBe("queue-operation");
|
||||
expect(classifyLine({ type: "user", message: { role: "user" } })).toBe("user");
|
||||
expect(classifyLine({ type: "assistant", message: { role: "assistant" } })).toBe("assistant");
|
||||
expect(classifyLine({})).toBe("unknown");
|
||||
});
|
||||
|
||||
it("classifies by message.role when type is missing", () => {
|
||||
expect(classifyLine({ message: { role: "user" } })).toBe("user");
|
||||
expect(classifyLine({ message: { role: "assistant" } })).toBe("assistant");
|
||||
});
|
||||
|
||||
it("returns unknown for missing type and no role", () => {
|
||||
expect(classifyLine({ message: {} })).toBe("unknown");
|
||||
expect(classifyLine({ uuid: "orphan" })).toBe("unknown");
|
||||
});
|
||||
});
|
||||
|
||||
describe("countMessagesForLine", () => {
|
||||
it("returns 1 for user string message", () => {
|
||||
const line: RawLine = {
|
||||
type: "user",
|
||||
message: { role: "user", content: "Hello" },
|
||||
};
|
||||
expect(countMessagesForLine(line)).toBe(1);
|
||||
});
|
||||
|
||||
it("matches extractMessages length for user array with tool_result and text", () => {
|
||||
const line: RawLine = {
|
||||
type: "user",
|
||||
message: {
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "tool_result", tool_use_id: "t1", content: "result" },
|
||||
{ type: "text", text: "description" },
|
||||
],
|
||||
},
|
||||
uuid: "u-arr",
|
||||
};
|
||||
const msgs = parseSessionContent(JSON.stringify(line));
|
||||
expect(countMessagesForLine(line)).toBe(msgs.length);
|
||||
expect(countMessagesForLine(line)).toBe(2);
|
||||
});
|
||||
|
||||
it("matches extractMessages length for assistant array with thinking/text/tool_use", () => {
|
||||
const line: RawLine = {
|
||||
type: "assistant",
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [
|
||||
{ type: "thinking", thinking: "hmm" },
|
||||
{ type: "text", text: "response" },
|
||||
{ type: "tool_use", name: "Read", input: { file_path: "/x" } },
|
||||
],
|
||||
},
|
||||
uuid: "a-arr",
|
||||
};
|
||||
const msgs = parseSessionContent(JSON.stringify(line));
|
||||
expect(countMessagesForLine(line)).toBe(msgs.length);
|
||||
expect(countMessagesForLine(line)).toBe(3);
|
||||
});
|
||||
|
||||
it("returns 1 for progress/file-history-snapshot/summary", () => {
|
||||
expect(countMessagesForLine({ type: "progress", data: { type: "hook" } })).toBe(1);
|
||||
expect(countMessagesForLine({ type: "file-history-snapshot", snapshot: {} })).toBe(1);
|
||||
expect(countMessagesForLine({ type: "summary", summary: "test" })).toBe(1);
|
||||
});
|
||||
|
||||
it("returns 0 for system/queue-operation", () => {
|
||||
expect(countMessagesForLine({ type: "system", subtype: "turn_duration" })).toBe(0);
|
||||
expect(countMessagesForLine({ type: "queue-operation" })).toBe(0);
|
||||
});
|
||||
|
||||
it("returns 0 for unknown type", () => {
|
||||
expect(countMessagesForLine({})).toBe(0);
|
||||
expect(countMessagesForLine({ type: "something-new" })).toBe(0);
|
||||
});
|
||||
|
||||
it("returns 0 for user message with empty content array", () => {
|
||||
const line: RawLine = {
|
||||
type: "user",
|
||||
message: { role: "user", content: [] },
|
||||
};
|
||||
expect(countMessagesForLine(line)).toBe(0);
|
||||
});
|
||||
|
||||
it("returns 0 for user message with undefined content", () => {
|
||||
const line: RawLine = {
|
||||
type: "user",
|
||||
message: { role: "user" },
|
||||
};
|
||||
expect(countMessagesForLine(line)).toBe(0);
|
||||
});
|
||||
|
||||
it("only counts known block types in assistant arrays", () => {
|
||||
const line: RawLine = {
|
||||
type: "assistant",
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [
|
||||
{ type: "thinking", thinking: "hmm" },
|
||||
{ type: "unknown_block" },
|
||||
{ type: "text", text: "hi" },
|
||||
],
|
||||
},
|
||||
};
|
||||
expect(countMessagesForLine(line)).toBe(2);
|
||||
});
|
||||
|
||||
it("returns 1 for assistant string content", () => {
|
||||
const line: RawLine = {
|
||||
type: "assistant",
|
||||
message: { role: "assistant", content: "direct string" },
|
||||
};
|
||||
expect(countMessagesForLine(line)).toBe(1);
|
||||
});
|
||||
|
||||
it("counts user text with system-reminder as 1 (reclassified but still counted)", () => {
|
||||
const line: RawLine = {
|
||||
type: "user",
|
||||
message: { role: "user", content: "<system-reminder>Some reminder</system-reminder>" },
|
||||
uuid: "u-sr-parity",
|
||||
};
|
||||
const msgs = parseSessionContent(JSON.stringify(line));
|
||||
expect(countMessagesForLine(line)).toBe(msgs.length);
|
||||
expect(countMessagesForLine(line)).toBe(1);
|
||||
});
|
||||
|
||||
it("handles truncated JSON (crash mid-write)", () => {
|
||||
const content = [
|
||||
JSON.stringify({ type: "user", message: { role: "user", content: "ok" }, uuid: "u-ok" }),
|
||||
'{"type":"assistant","message":{"role":"assistant","content":[{"type":"text","text":"trun',
|
||||
].join("\n");
|
||||
|
||||
const lines: RawLine[] = [];
|
||||
const result = forEachJsonlLine(content, (parsed) => {
|
||||
lines.push(parsed);
|
||||
});
|
||||
expect(lines).toHaveLength(1);
|
||||
expect(result.parseErrors).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("parser parity: fixture integration", () => {
|
||||
it("countMessagesForLine sum matches parseSessionContent on sample-session.jsonl", async () => {
|
||||
const fixturePath = path.join(__dirname, "../fixtures/sample-session.jsonl");
|
||||
const content = await fs.readFile(fixturePath, "utf-8");
|
||||
|
||||
const parsedMessages = parseSessionContent(content);
|
||||
|
||||
let countSum = 0;
|
||||
forEachJsonlLine(content, (parsed) => {
|
||||
countSum += countMessagesForLine(parsed);
|
||||
});
|
||||
|
||||
expect(countSum).toBe(parsedMessages.length);
|
||||
});
|
||||
|
||||
it("countMessagesForLine sum matches parseSessionContent on edge-cases.jsonl", async () => {
|
||||
const fixturePath = path.join(__dirname, "../fixtures/edge-cases.jsonl");
|
||||
const content = await fs.readFile(fixturePath, "utf-8");
|
||||
|
||||
const parsedMessages = parseSessionContent(content);
|
||||
|
||||
let countSum = 0;
|
||||
forEachJsonlLine(content, (parsed) => {
|
||||
countSum += countMessagesForLine(parsed);
|
||||
});
|
||||
|
||||
expect(countSum).toBe(parsedMessages.length);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user