refactor(server): extract context.py into focused modules
Split the monolithic context.py (117 lines) into five purpose-specific modules following single-responsibility principle: - config.py: Server-level constants (DATA_DIR, SESSIONS_DIR, PORT, STALE_EVENT_AGE, _state_lock) - agents.py: Agent-specific paths and caches (CLAUDE_PROJECTS_DIR, CODEX_SESSIONS_DIR, discovery caches) - auth.py: Authentication token generation/validation for spawn endpoint - spawn_config.py: Spawn feature configuration (PENDING_SPAWNS_DIR, rate limiting, projects watcher thread) - zellij.py: Zellij binary resolution and session management constants This refactoring improves: - Code navigation: Find relevant constants by domain, not alphabetically - Testing: Each module can be tested in isolation - Import clarity: Mixins import only what they need - Future maintenance: Changes to one domain don't risk breaking others All mixins updated to import from new module locations. Tests updated to use new import paths. Includes PROPOSED_CODE_FILE_REORGANIZATION_PLAN.md documenting the rationale and mapping from old to new locations. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
28
amc_server/agents.py
Normal file
28
amc_server/agents.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Agent-specific paths, caches, and constants for Claude/Codex discovery."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
# Claude Code conversation directory
|
||||
CLAUDE_PROJECTS_DIR = Path.home() / ".claude" / "projects"
|
||||
|
||||
# Codex conversation directory
|
||||
CODEX_SESSIONS_DIR = Path.home() / ".codex" / "sessions"
|
||||
|
||||
# Only discover recently-active Codex sessions (10 minutes)
|
||||
CODEX_ACTIVE_WINDOW = 600
|
||||
|
||||
# Cache for Codex pane info (avoid running pgrep/ps/lsof on every request)
|
||||
_codex_pane_cache = {"pid_info": {}, "cwd_map": {}, "expires": 0}
|
||||
|
||||
# Cache for parsed context usage by transcript file path + mtime/size
|
||||
_context_usage_cache = {}
|
||||
_CONTEXT_CACHE_MAX = 100
|
||||
|
||||
# Cache mapping Codex session IDs to transcript paths (or None when missing)
|
||||
_codex_transcript_cache = {}
|
||||
_CODEX_CACHE_MAX = 200
|
||||
|
||||
# Codex sessions dismissed during this server lifetime (prevents re-discovery)
|
||||
# Uses dict (not set) for O(1) lookup + FIFO eviction via insertion order (Python 3.7+)
|
||||
_dismissed_codex_ids = {}
|
||||
_DISMISSED_MAX = 500
|
||||
18
amc_server/auth.py
Normal file
18
amc_server/auth.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Auth token generation and validation for spawn endpoint security."""
|
||||
|
||||
import secrets
|
||||
|
||||
# Auth token for spawn endpoint
|
||||
_auth_token: str = ''
|
||||
|
||||
|
||||
def generate_auth_token():
|
||||
"""Generate a one-time auth token for this server instance."""
|
||||
global _auth_token
|
||||
_auth_token = secrets.token_urlsafe(32)
|
||||
return _auth_token
|
||||
|
||||
|
||||
def validate_auth_token(request_token: str) -> bool:
|
||||
"""Validate the Authorization header token."""
|
||||
return request_token == f'Bearer {_auth_token}'
|
||||
20
amc_server/config.py
Normal file
20
amc_server/config.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""Server-level constants: paths, port, timeouts, state lock."""
|
||||
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
# Runtime data lives in XDG data dir
|
||||
DATA_DIR = Path.home() / ".local" / "share" / "amc"
|
||||
SESSIONS_DIR = DATA_DIR / "sessions"
|
||||
EVENTS_DIR = DATA_DIR / "events"
|
||||
|
||||
# Source files live in project directory (relative to this module)
|
||||
PROJECT_DIR = Path(__file__).resolve().parent.parent
|
||||
DASHBOARD_DIR = PROJECT_DIR / "dashboard"
|
||||
|
||||
PORT = 7400
|
||||
STALE_EVENT_AGE = 86400 # 24 hours in seconds
|
||||
STALE_STARTING_AGE = 3600 # 1 hour - sessions stuck in "starting" are orphans
|
||||
|
||||
# Serialize state collection because it mutates session files/caches.
|
||||
_state_lock = threading.Lock()
|
||||
@@ -1,117 +0,0 @@
|
||||
import secrets
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
import threading
|
||||
|
||||
# Claude Code conversation directory
|
||||
CLAUDE_PROJECTS_DIR = Path.home() / ".claude" / "projects"
|
||||
|
||||
# Codex conversation directory
|
||||
CODEX_SESSIONS_DIR = Path.home() / ".codex" / "sessions"
|
||||
|
||||
# Plugin path for zellij-send-keys
|
||||
ZELLIJ_PLUGIN = Path.home() / ".config" / "zellij" / "plugins" / "zellij-send-keys.wasm"
|
||||
|
||||
|
||||
def _resolve_zellij_bin():
|
||||
"""Resolve zellij binary even when PATH is minimal (eg launchctl)."""
|
||||
from_path = shutil.which("zellij")
|
||||
if from_path:
|
||||
return from_path
|
||||
|
||||
common_paths = (
|
||||
"/opt/homebrew/bin/zellij", # Apple Silicon Homebrew
|
||||
"/usr/local/bin/zellij", # Intel Homebrew
|
||||
"/usr/bin/zellij",
|
||||
)
|
||||
for candidate in common_paths:
|
||||
p = Path(candidate)
|
||||
if p.exists() and p.is_file():
|
||||
return str(p)
|
||||
return "zellij" # Fallback for explicit error reporting by subprocess
|
||||
|
||||
|
||||
ZELLIJ_BIN = _resolve_zellij_bin()
|
||||
|
||||
# Runtime data lives in XDG data dir
|
||||
DATA_DIR = Path.home() / ".local" / "share" / "amc"
|
||||
SESSIONS_DIR = DATA_DIR / "sessions"
|
||||
EVENTS_DIR = DATA_DIR / "events"
|
||||
|
||||
# Source files live in project directory (relative to this module)
|
||||
PROJECT_DIR = Path(__file__).resolve().parent.parent
|
||||
DASHBOARD_DIR = PROJECT_DIR / "dashboard"
|
||||
|
||||
PORT = 7400
|
||||
STALE_EVENT_AGE = 86400 # 24 hours in seconds
|
||||
STALE_STARTING_AGE = 3600 # 1 hour - sessions stuck in "starting" are orphans
|
||||
CODEX_ACTIVE_WINDOW = 600 # 10 minutes - only discover recently-active Codex sessions
|
||||
|
||||
# Cache for Zellij session list (avoid calling zellij on every request)
|
||||
_zellij_cache = {"sessions": None, "expires": 0}
|
||||
|
||||
# Cache for Codex pane info (avoid running pgrep/ps/lsof on every request)
|
||||
_codex_pane_cache = {"pid_info": {}, "cwd_map": {}, "expires": 0}
|
||||
|
||||
# Cache for parsed context usage by transcript file path + mtime/size
|
||||
# Limited to prevent unbounded memory growth
|
||||
_context_usage_cache = {}
|
||||
_CONTEXT_CACHE_MAX = 100
|
||||
|
||||
# Cache mapping Codex session IDs to transcript paths (or None when missing)
|
||||
_codex_transcript_cache = {}
|
||||
_CODEX_CACHE_MAX = 200
|
||||
|
||||
# Codex sessions dismissed during this server lifetime (prevents re-discovery)
|
||||
# Uses dict (not set) for O(1) lookup + FIFO eviction via insertion order (Python 3.7+)
|
||||
_dismissed_codex_ids = {}
|
||||
_DISMISSED_MAX = 500
|
||||
|
||||
# Serialize state collection because it mutates session files/caches.
|
||||
_state_lock = threading.Lock()
|
||||
|
||||
# Projects directory for spawning agents
|
||||
PROJECTS_DIR = Path.home() / 'projects'
|
||||
|
||||
# Default Zellij session for spawning
|
||||
ZELLIJ_SESSION = 'infra'
|
||||
|
||||
# Lock for serializing spawn operations (prevents Zellij race conditions)
|
||||
_spawn_lock = threading.Lock()
|
||||
|
||||
# Rate limiting: track last spawn time per project (prevents spam)
|
||||
_spawn_timestamps: dict[str, float] = {}
|
||||
SPAWN_COOLDOWN_SEC = 10.0
|
||||
|
||||
# Auth token for spawn endpoint
|
||||
_auth_token: str = ''
|
||||
|
||||
|
||||
def generate_auth_token():
|
||||
"""Generate a one-time auth token for this server instance."""
|
||||
global _auth_token
|
||||
_auth_token = secrets.token_urlsafe(32)
|
||||
return _auth_token
|
||||
|
||||
|
||||
def validate_auth_token(request_token: str) -> bool:
|
||||
"""Validate the Authorization header token."""
|
||||
return request_token == f'Bearer {_auth_token}'
|
||||
|
||||
|
||||
def start_projects_watcher():
|
||||
"""Start background thread to refresh projects cache every 5 minutes."""
|
||||
import logging
|
||||
from amc_server.mixins.spawn import load_projects_cache
|
||||
|
||||
def _watch_loop():
|
||||
import time
|
||||
while True:
|
||||
try:
|
||||
time.sleep(300)
|
||||
load_projects_cache()
|
||||
except Exception:
|
||||
logging.exception('Projects cache refresh failed')
|
||||
|
||||
thread = threading.Thread(target=_watch_loop, daemon=True)
|
||||
thread.start()
|
||||
@@ -3,7 +3,9 @@ import os
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from amc_server.context import SESSIONS_DIR, ZELLIJ_BIN, ZELLIJ_PLUGIN, _DISMISSED_MAX, _dismissed_codex_ids
|
||||
from amc_server.agents import _DISMISSED_MAX, _dismissed_codex_ids
|
||||
from amc_server.config import SESSIONS_DIR
|
||||
from amc_server.zellij import ZELLIJ_BIN, ZELLIJ_PLUGIN
|
||||
from amc_server.logging_utils import LOGGER
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from amc_server.context import EVENTS_DIR
|
||||
from amc_server.config import EVENTS_DIR
|
||||
|
||||
|
||||
class ConversationMixin:
|
||||
|
||||
@@ -5,18 +5,99 @@ import subprocess
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from amc_server.context import (
|
||||
from amc_server.agents import (
|
||||
CODEX_ACTIVE_WINDOW,
|
||||
CODEX_SESSIONS_DIR,
|
||||
SESSIONS_DIR,
|
||||
_CODEX_CACHE_MAX,
|
||||
_codex_pane_cache,
|
||||
_codex_transcript_cache,
|
||||
_dismissed_codex_ids,
|
||||
)
|
||||
from amc_server.config import SESSIONS_DIR
|
||||
from amc_server.spawn_config import PENDING_SPAWNS_DIR
|
||||
from amc_server.logging_utils import LOGGER
|
||||
|
||||
|
||||
def _parse_session_timestamp(session_ts):
|
||||
"""Parse Codex session timestamp to Unix time. Returns None on failure."""
|
||||
if not session_ts:
|
||||
return None
|
||||
try:
|
||||
# Codex uses ISO format, possibly with Z suffix or +00:00
|
||||
ts_str = session_ts.replace('Z', '+00:00')
|
||||
dt = datetime.fromisoformat(ts_str)
|
||||
return dt.timestamp()
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
return None
|
||||
|
||||
|
||||
def _match_pending_spawn(session_cwd, session_start_ts):
|
||||
"""Match a Codex session to a pending spawn by CWD and timestamp.
|
||||
|
||||
Args:
|
||||
session_cwd: The CWD of the Codex session
|
||||
session_start_ts: The session's START timestamp (ISO string from Codex metadata)
|
||||
IMPORTANT: Must be session start time, not file mtime, to avoid false
|
||||
matches with pre-existing sessions that were recently active.
|
||||
|
||||
Returns:
|
||||
spawn_id if matched (and deletes the pending file), None otherwise
|
||||
"""
|
||||
if not PENDING_SPAWNS_DIR.exists():
|
||||
return None
|
||||
|
||||
normalized_cwd = os.path.normpath(session_cwd) if session_cwd else ""
|
||||
if not normalized_cwd:
|
||||
return None
|
||||
|
||||
# Parse session start time - if we can't parse it, we can't safely match
|
||||
session_start_unix = _parse_session_timestamp(session_start_ts)
|
||||
if session_start_unix is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
for pending_file in PENDING_SPAWNS_DIR.glob('*.json'):
|
||||
try:
|
||||
data = json.loads(pending_file.read_text())
|
||||
if not isinstance(data, dict):
|
||||
continue
|
||||
|
||||
# Check agent type (only match codex to codex)
|
||||
if data.get('agent_type') != 'codex':
|
||||
continue
|
||||
|
||||
# Check CWD match
|
||||
pending_path = os.path.normpath(data.get('project_path', ''))
|
||||
if normalized_cwd != pending_path:
|
||||
continue
|
||||
|
||||
# Check timing: session must have STARTED after spawn was initiated
|
||||
# Using session start time (not mtime) prevents false matches with
|
||||
# pre-existing sessions that happen to be recently active
|
||||
spawn_ts = data.get('timestamp', 0)
|
||||
if session_start_unix < spawn_ts:
|
||||
continue
|
||||
|
||||
# Match found - claim the spawn_id and delete the pending file
|
||||
spawn_id = data.get('spawn_id')
|
||||
try:
|
||||
pending_file.unlink()
|
||||
except OSError:
|
||||
pass
|
||||
LOGGER.info(
|
||||
'Matched Codex session (cwd=%s) to pending spawn_id=%s',
|
||||
session_cwd, spawn_id,
|
||||
)
|
||||
return spawn_id
|
||||
|
||||
except (json.JSONDecodeError, OSError):
|
||||
continue
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class SessionDiscoveryMixin:
|
||||
def _discover_active_codex_sessions(self):
|
||||
"""Find active Codex sessions and create/update session files with Zellij pane info."""
|
||||
@@ -131,6 +212,13 @@ class SessionDiscoveryMixin:
|
||||
session_ts = payload.get("timestamp", "")
|
||||
last_event_at = datetime.fromtimestamp(mtime, tz=timezone.utc).isoformat()
|
||||
|
||||
# Check for spawn_id: preserve existing, or match to pending spawn
|
||||
# Use session_ts (start time) not mtime to avoid false matches
|
||||
# with pre-existing sessions that were recently active
|
||||
spawn_id = existing.get("spawn_id")
|
||||
if not spawn_id:
|
||||
spawn_id = _match_pending_spawn(cwd, session_ts)
|
||||
|
||||
session_data = {
|
||||
"session_id": session_id,
|
||||
"agent": "codex",
|
||||
@@ -145,6 +233,8 @@ class SessionDiscoveryMixin:
|
||||
"zellij_pane": zellij_pane or existing.get("zellij_pane", ""),
|
||||
"transcript_path": str(jsonl_file),
|
||||
}
|
||||
if spawn_id:
|
||||
session_data["spawn_id"] = spawn_id
|
||||
if context_usage:
|
||||
session_data["context_usage"] = context_usage
|
||||
elif existing.get("context_usage"):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
import urllib.parse
|
||||
|
||||
import amc_server.context as ctx
|
||||
from amc_server.context import DASHBOARD_DIR
|
||||
import amc_server.auth as auth
|
||||
from amc_server.config import DASHBOARD_DIR
|
||||
from amc_server.logging_utils import LOGGER
|
||||
|
||||
|
||||
@@ -148,10 +148,10 @@ class HttpMixin:
|
||||
content_type = content_types.get(ext, "application/octet-stream")
|
||||
|
||||
# Inject auth token into index.html for spawn endpoint security
|
||||
if file_path == "index.html" and ctx._auth_token:
|
||||
if file_path == "index.html" and auth._auth_token:
|
||||
content = content.replace(
|
||||
b"<!-- AMC_AUTH_TOKEN -->",
|
||||
f'<script>window.AMC_AUTH_TOKEN = "{ctx._auth_token}";</script>'.encode(),
|
||||
f'<script>window.AMC_AUTH_TOKEN = "{auth._auth_token}";</script>'.encode(),
|
||||
)
|
||||
|
||||
# No caching during development
|
||||
|
||||
@@ -2,7 +2,7 @@ import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from amc_server.context import (
|
||||
from amc_server.agents import (
|
||||
CLAUDE_PROJECTS_DIR,
|
||||
CODEX_SESSIONS_DIR,
|
||||
_CODEX_CACHE_MAX,
|
||||
|
||||
@@ -37,7 +37,7 @@ class SkillsMixin:
|
||||
|
||||
Checks SKILL.md (canonical) first, then falls back to skill.md,
|
||||
prompt.md, README.md for description extraction. Parses YAML
|
||||
frontmatter if present to extract the description field.
|
||||
frontmatter if present to extract name and description fields.
|
||||
|
||||
Returns:
|
||||
List of {name: str, description: str} dicts.
|
||||
@@ -53,35 +53,41 @@ class SkillsMixin:
|
||||
if not skill_dir.is_dir() or skill_dir.name.startswith("."):
|
||||
continue
|
||||
|
||||
description = ""
|
||||
# Check files in priority order
|
||||
meta = {"name": "", "description": ""}
|
||||
# Check files in priority order, accumulating metadata
|
||||
# (earlier files take precedence for each field)
|
||||
for md_name in ["SKILL.md", "skill.md", "prompt.md", "README.md"]:
|
||||
md_file = skill_dir / md_name
|
||||
if md_file.exists():
|
||||
try:
|
||||
content = md_file.read_text()
|
||||
description = self._extract_description(content)
|
||||
if description:
|
||||
parsed = self._parse_frontmatter(content)
|
||||
if not meta["name"] and parsed["name"]:
|
||||
meta["name"] = parsed["name"]
|
||||
if not meta["description"] and parsed["description"]:
|
||||
meta["description"] = parsed["description"]
|
||||
if meta["description"]:
|
||||
break
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
skills.append({
|
||||
"name": skill_dir.name,
|
||||
"description": description or f"Skill: {skill_dir.name}",
|
||||
"name": meta["name"] or skill_dir.name,
|
||||
"description": meta["description"] or f"Skill: {skill_dir.name}",
|
||||
})
|
||||
|
||||
return skills
|
||||
|
||||
def _extract_description(self, content: str) -> str:
|
||||
"""Extract description from markdown content.
|
||||
def _parse_frontmatter(self, content: str) -> dict:
|
||||
"""Extract name and description from markdown YAML frontmatter.
|
||||
|
||||
Handles YAML frontmatter (looks for 'description:' field) and
|
||||
falls back to first meaningful line after frontmatter.
|
||||
Returns:
|
||||
Dict with 'name' and 'description' keys (both str, may be empty).
|
||||
"""
|
||||
result = {"name": "", "description": ""}
|
||||
lines = content.splitlines()
|
||||
if not lines:
|
||||
return ""
|
||||
return result
|
||||
|
||||
# Check for YAML frontmatter
|
||||
frontmatter_end = 0
|
||||
@@ -91,33 +97,37 @@ class SkillsMixin:
|
||||
if stripped == "---":
|
||||
frontmatter_end = i + 1
|
||||
break
|
||||
# Look for description field in frontmatter
|
||||
if stripped.startswith("description:"):
|
||||
# Extract value after colon
|
||||
desc = stripped[len("description:"):].strip()
|
||||
# Remove quotes if present
|
||||
if desc.startswith('"') and desc.endswith('"'):
|
||||
desc = desc[1:-1]
|
||||
elif desc.startswith("'") and desc.endswith("'"):
|
||||
desc = desc[1:-1]
|
||||
# Handle YAML multi-line indicators (>- or |-)
|
||||
if desc in (">-", "|-", ">", "|", ""):
|
||||
# Multi-line: read the next indented line
|
||||
if i + 1 < len(lines):
|
||||
next_line = lines[i + 1].strip()
|
||||
if next_line and not next_line.startswith("---"):
|
||||
return next_line[:100]
|
||||
elif desc:
|
||||
return desc[:100]
|
||||
# Check each known frontmatter field
|
||||
for field in ("name", "description"):
|
||||
if stripped.startswith(f"{field}:"):
|
||||
val = stripped[len(field) + 1:].strip()
|
||||
# Remove quotes if present
|
||||
if val.startswith('"') and val.endswith('"'):
|
||||
val = val[1:-1]
|
||||
elif val.startswith("'") and val.endswith("'"):
|
||||
val = val[1:-1]
|
||||
# Handle YAML multi-line indicators (>- or |-)
|
||||
if val in (">-", "|-", ">", "|", ""):
|
||||
if i + 1 < len(lines):
|
||||
next_line = lines[i + 1].strip()
|
||||
if next_line and not next_line.startswith("---"):
|
||||
val = next_line
|
||||
else:
|
||||
val = ""
|
||||
else:
|
||||
val = ""
|
||||
if val:
|
||||
result[field] = val[:100]
|
||||
|
||||
# Fall back to first meaningful line after frontmatter
|
||||
for line in lines[frontmatter_end:]:
|
||||
stripped = line.strip()
|
||||
# Skip empty lines, headers, comments, and frontmatter delimiters
|
||||
if stripped and not stripped.startswith("#") and not stripped.startswith("<!--") and stripped != "---":
|
||||
return stripped[:100]
|
||||
# Fall back to first meaningful line for description
|
||||
if not result["description"]:
|
||||
for line in lines[frontmatter_end:]:
|
||||
stripped = line.strip()
|
||||
if stripped and not stripped.startswith("#") and not stripped.startswith("<!--") and stripped != "---":
|
||||
result["description"] = stripped[:100]
|
||||
break
|
||||
|
||||
return ""
|
||||
return result
|
||||
|
||||
def _enumerate_codex_skills(self) -> list[dict]:
|
||||
"""Enumerate Codex skills from cache and user directory.
|
||||
@@ -161,19 +171,19 @@ class SkillsMixin:
|
||||
if not skill_dir.is_dir() or skill_dir.name.startswith("."):
|
||||
continue
|
||||
|
||||
description = ""
|
||||
# Check SKILL.md for description
|
||||
meta = {"name": "", "description": ""}
|
||||
# Check SKILL.md for metadata
|
||||
skill_md = skill_dir / "SKILL.md"
|
||||
if skill_md.exists():
|
||||
try:
|
||||
content = skill_md.read_text()
|
||||
description = self._extract_description(content)
|
||||
meta = self._parse_frontmatter(content)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
skills.append({
|
||||
"name": skill_dir.name,
|
||||
"description": description or f"User skill: {skill_dir.name}",
|
||||
"name": meta["name"] or skill_dir.name,
|
||||
"description": meta["description"] or f"User skill: {skill_dir.name}",
|
||||
})
|
||||
|
||||
return skills
|
||||
|
||||
@@ -5,13 +5,52 @@ import subprocess
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from amc_server.context import (
|
||||
PROJECTS_DIR, SESSIONS_DIR, ZELLIJ_BIN, ZELLIJ_SESSION,
|
||||
from amc_server.auth import validate_auth_token
|
||||
from amc_server.config import SESSIONS_DIR
|
||||
from amc_server.spawn_config import (
|
||||
PENDING_SPAWNS_DIR, PENDING_SPAWN_TTL,
|
||||
PROJECTS_DIR,
|
||||
_spawn_lock, _spawn_timestamps, SPAWN_COOLDOWN_SEC,
|
||||
validate_auth_token,
|
||||
)
|
||||
from amc_server.zellij import ZELLIJ_BIN, ZELLIJ_SESSION
|
||||
from amc_server.logging_utils import LOGGER
|
||||
|
||||
|
||||
def _write_pending_spawn(spawn_id, project_path, agent_type):
|
||||
"""Write a pending spawn record for later correlation by discovery.
|
||||
|
||||
This enables Codex session correlation since env vars don't propagate
|
||||
through Zellij's pane spawn mechanism.
|
||||
"""
|
||||
PENDING_SPAWNS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
pending_file = PENDING_SPAWNS_DIR / f'{spawn_id}.json'
|
||||
data = {
|
||||
'spawn_id': spawn_id,
|
||||
'project_path': str(project_path),
|
||||
'agent_type': agent_type,
|
||||
'timestamp': time.time(),
|
||||
}
|
||||
try:
|
||||
pending_file.write_text(json.dumps(data))
|
||||
except OSError:
|
||||
LOGGER.warning('Failed to write pending spawn file for %s', spawn_id)
|
||||
|
||||
|
||||
def _cleanup_stale_pending_spawns():
|
||||
"""Remove pending spawn files older than PENDING_SPAWN_TTL."""
|
||||
if not PENDING_SPAWNS_DIR.exists():
|
||||
return
|
||||
now = time.time()
|
||||
try:
|
||||
for f in PENDING_SPAWNS_DIR.glob('*.json'):
|
||||
try:
|
||||
if now - f.stat().st_mtime > PENDING_SPAWN_TTL:
|
||||
f.unlink()
|
||||
except OSError:
|
||||
continue
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# Agent commands (AC-8, AC-9: full autonomous permissions)
|
||||
AGENT_COMMANDS = {
|
||||
'claude': ['claude', '--dangerously-skip-permissions'],
|
||||
@@ -215,6 +254,16 @@ class SpawnMixin:
|
||||
|
||||
def _spawn_agent_in_project_tab(self, project, project_path, agent_type, spawn_id):
|
||||
"""Spawn an agent in a project-named Zellij tab."""
|
||||
# Clean up stale pending spawns opportunistically
|
||||
_cleanup_stale_pending_spawns()
|
||||
|
||||
# For Codex, write pending spawn record before launching.
|
||||
# Zellij doesn't propagate env vars to pane commands, so discovery
|
||||
# will match the session to this record by CWD + timestamp.
|
||||
# (Claude doesn't need this - amc-hook writes spawn_id directly)
|
||||
if agent_type == 'codex':
|
||||
_write_pending_spawn(spawn_id, project_path, agent_type)
|
||||
|
||||
# Check session exists
|
||||
if not self._check_zellij_session_exists():
|
||||
return {
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import os
|
||||
from http.server import ThreadingHTTPServer
|
||||
|
||||
from amc_server.context import DATA_DIR, PORT, generate_auth_token, start_projects_watcher
|
||||
from amc_server.auth import generate_auth_token
|
||||
from amc_server.config import DATA_DIR, PORT
|
||||
from amc_server.spawn_config import start_projects_watcher
|
||||
from amc_server.handler import AMCHandler
|
||||
from amc_server.logging_utils import LOGGER, configure_logging, install_signal_handlers
|
||||
from amc_server.mixins.spawn import load_projects_cache
|
||||
|
||||
40
amc_server/spawn_config.py
Normal file
40
amc_server/spawn_config.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Spawn feature config: paths, locks, rate limiting, projects watcher."""
|
||||
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
from amc_server.config import DATA_DIR
|
||||
|
||||
# Pending spawn registry
|
||||
PENDING_SPAWNS_DIR = DATA_DIR / "pending_spawns"
|
||||
|
||||
# Pending spawn TTL: how long to keep unmatched spawn records (seconds)
|
||||
PENDING_SPAWN_TTL = 60
|
||||
|
||||
# Projects directory for spawning agents
|
||||
PROJECTS_DIR = Path.home() / 'projects'
|
||||
|
||||
# Lock for serializing spawn operations (prevents Zellij race conditions)
|
||||
_spawn_lock = threading.Lock()
|
||||
|
||||
# Rate limiting: track last spawn time per project (prevents spam)
|
||||
_spawn_timestamps: dict[str, float] = {}
|
||||
SPAWN_COOLDOWN_SEC = 10.0
|
||||
|
||||
|
||||
def start_projects_watcher():
|
||||
"""Start background thread to refresh projects cache every 5 minutes."""
|
||||
import logging
|
||||
from amc_server.mixins.spawn import load_projects_cache
|
||||
|
||||
def _watch_loop():
|
||||
import time
|
||||
while True:
|
||||
try:
|
||||
time.sleep(300)
|
||||
load_projects_cache()
|
||||
except Exception:
|
||||
logging.exception('Projects cache refresh failed')
|
||||
|
||||
thread = threading.Thread(target=_watch_loop, daemon=True)
|
||||
thread.start()
|
||||
34
amc_server/zellij.py
Normal file
34
amc_server/zellij.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Zellij integration: binary resolution, plugin path, session name, cache."""
|
||||
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
# Plugin path for zellij-send-keys
|
||||
ZELLIJ_PLUGIN = Path.home() / ".config" / "zellij" / "plugins" / "zellij-send-keys.wasm"
|
||||
|
||||
|
||||
def _resolve_zellij_bin():
|
||||
"""Resolve zellij binary even when PATH is minimal (eg launchctl)."""
|
||||
from_path = shutil.which("zellij")
|
||||
if from_path:
|
||||
return from_path
|
||||
|
||||
common_paths = (
|
||||
"/opt/homebrew/bin/zellij", # Apple Silicon Homebrew
|
||||
"/usr/local/bin/zellij", # Intel Homebrew
|
||||
"/usr/bin/zellij",
|
||||
)
|
||||
for candidate in common_paths:
|
||||
p = Path(candidate)
|
||||
if p.exists() and p.is_file():
|
||||
return str(p)
|
||||
return "zellij" # Fallback for explicit error reporting by subprocess
|
||||
|
||||
|
||||
ZELLIJ_BIN = _resolve_zellij_bin()
|
||||
|
||||
# Default Zellij session for spawning
|
||||
ZELLIJ_SESSION = 'infra'
|
||||
|
||||
# Cache for Zellij session list (avoid calling zellij on every request)
|
||||
_zellij_cache = {"sessions": None, "expires": 0}
|
||||
Reference in New Issue
Block a user