feat(server): add conversation mtime tracking for real-time updates

Add conversation_mtime_ns field to session state that tracks the actual
modification time of conversation files. This enables more responsive
dashboard updates by detecting changes that occur between hook events
(e.g., during streaming tool execution).

Changes:
- state.py: Add _get_conversation_mtime() to stat conversation files
  and include mtime_ns in session payloads when available
- conversation.py: Add stable message IDs (claude-{session}-{n} format)
  for React key stability and message deduplication
- control.py: Fix FIFO eviction for dismissed_codex_ids - set.pop()
  removes arbitrary element, now uses dict with insertion-order iteration
- context.py: Update dismissed_codex_ids type from set to dict

The mtime approach complements existing last_event_at tracking:
- last_event_at: Changes on hook events (session boundaries)
- conversation_mtime_ns: Changes on every file write (real-time)

Dashboard can now detect mid-session conversation updates without
waiting for the next hook event.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
teernisse
2026-02-26 11:40:42 -05:00
parent fa1ad4b22b
commit dcbaf12f07
4 changed files with 61 additions and 4 deletions

View File

@@ -16,10 +16,11 @@ class SessionControlMixin:
safe_id = os.path.basename(session_id)
session_file = SESSIONS_DIR / f"{safe_id}.json"
# Track dismissed Codex sessions to prevent re-discovery
# Evict oldest entries if set is full (prevents unbounded growth)
# Evict oldest entries via FIFO (dict maintains insertion order in Python 3.7+)
while len(_dismissed_codex_ids) >= _DISMISSED_MAX:
_dismissed_codex_ids.pop()
_dismissed_codex_ids.add(safe_id)
oldest_key = next(iter(_dismissed_codex_ids))
del _dismissed_codex_ids[oldest_key]
_dismissed_codex_ids[safe_id] = True
session_file.unlink(missing_ok=True)
self._send_json(200, {"ok": True})

View File

@@ -39,6 +39,7 @@ class ConversationMixin:
def _parse_claude_conversation(self, session_id, project_dir):
"""Parse Claude Code JSONL conversation format."""
messages = []
msg_id = 0
conv_file = self._get_claude_conversation_file(session_id, project_dir)
@@ -58,10 +59,12 @@ class ConversationMixin:
# Only include actual human messages (strings), not tool results (arrays)
if content and isinstance(content, str):
messages.append({
"id": f"claude-{session_id[:8]}-{msg_id}",
"role": "user",
"content": content,
"timestamp": entry.get("timestamp", ""),
})
msg_id += 1
elif msg_type == "assistant":
# Assistant messages have structured content
@@ -90,6 +93,7 @@ class ConversationMixin:
text_parts.append(part)
if text_parts or tool_calls or thinking_parts:
msg = {
"id": f"claude-{session_id[:8]}-{msg_id}",
"role": "assistant",
"content": "\n".join(text_parts) if text_parts else "",
"timestamp": entry.get("timestamp", ""),
@@ -99,6 +103,7 @@ class ConversationMixin:
if thinking_parts:
msg["thinking"] = "\n\n".join(thinking_parts)
messages.append(msg)
msg_id += 1
except json.JSONDecodeError:
continue
@@ -117,6 +122,7 @@ class ConversationMixin:
"""
messages = []
pending_tool_calls = [] # Accumulate tool calls to attach to next assistant message
msg_id = 0
conv_file = self._find_codex_transcript_file(session_id)
@@ -161,19 +167,23 @@ class ConversationMixin:
# Flush any pending tool calls first
if pending_tool_calls:
messages.append({
"id": f"codex-{session_id[:8]}-{msg_id}",
"role": "assistant",
"content": "",
"tool_calls": pending_tool_calls,
"timestamp": timestamp,
})
msg_id += 1
pending_tool_calls = []
# Add thinking as assistant message
messages.append({
"id": f"codex-{session_id[:8]}-{msg_id}",
"role": "assistant",
"content": "",
"thinking": "\n".join(thinking_text),
"timestamp": timestamp,
})
msg_id += 1
continue
# Handle message (user/assistant text)
@@ -208,19 +218,24 @@ class ConversationMixin:
# Flush any pending tool calls before user message
if pending_tool_calls:
messages.append({
"id": f"codex-{session_id[:8]}-{msg_id}",
"role": "assistant",
"content": "",
"tool_calls": pending_tool_calls,
"timestamp": timestamp,
})
msg_id += 1
pending_tool_calls = []
messages.append({
"id": f"codex-{session_id[:8]}-{msg_id}",
"role": "user",
"content": "\n".join(text_parts),
"timestamp": timestamp,
})
msg_id += 1
elif role == "assistant":
msg = {
"id": f"codex-{session_id[:8]}-{msg_id}",
"role": "assistant",
"content": "\n".join(text_parts) if text_parts else "",
"timestamp": timestamp,
@@ -231,6 +246,7 @@ class ConversationMixin:
pending_tool_calls = []
if text_parts or msg.get("tool_calls"):
messages.append(msg)
msg_id += 1
except json.JSONDecodeError:
continue
@@ -238,6 +254,7 @@ class ConversationMixin:
# Flush any remaining pending tool calls
if pending_tool_calls:
messages.append({
"id": f"codex-{session_id[:8]}-{msg_id}",
"role": "assistant",
"content": "",
"tool_calls": pending_tool_calls,

View File

@@ -3,6 +3,7 @@ import json
import subprocess
import time
from datetime import datetime, timezone
from pathlib import Path
from amc_server.context import (
EVENTS_DIR,
@@ -119,6 +120,11 @@ class StateMixin:
if context_usage:
data["context_usage"] = context_usage
# Track conversation file mtime for real-time update detection
conv_mtime = self._get_conversation_mtime(data)
if conv_mtime:
data["conversation_mtime_ns"] = conv_mtime
sessions.append(data)
except (json.JSONDecodeError, OSError):
continue
@@ -166,6 +172,38 @@ class StateMixin:
return None # Return None on error (don't clean up if we can't verify)
def _get_conversation_mtime(self, session_data):
"""Get the conversation file's mtime for real-time change detection."""
agent = session_data.get("agent")
if agent == "claude":
conv_file = self._get_claude_conversation_file(
session_data.get("session_id", ""),
session_data.get("project_dir", ""),
)
if conv_file:
try:
return conv_file.stat().st_mtime_ns
except OSError:
pass
elif agent == "codex":
transcript_path = session_data.get("transcript_path", "")
if transcript_path:
try:
return Path(transcript_path).stat().st_mtime_ns
except OSError:
pass
# Fallback to discovery
transcript_file = self._find_codex_transcript_file(session_data.get("session_id", ""))
if transcript_file:
try:
return transcript_file.stat().st_mtime_ns
except OSError:
pass
return None
def _cleanup_stale(self, sessions):
"""Remove orphan event logs >24h and stale 'starting' sessions >1h."""
active_ids = {s.get("session_id") for s in sessions if s.get("session_id")}