feat(core): add ollama lifecycle management for cron sync

Add src/core/ollama_mgmt.rs module that handles Ollama detection, startup,
and health checking. This enables cron-based sync to automatically start
Ollama when it's installed but not running, ensuring embeddings are always
available during unattended sync runs.

Integration points:
- sync handler (--lock mode): calls ensure_ollama() before embedding phase
- cron status: displays Ollama health (installed/running/not-installed)
- robot JSON: includes OllamaStatusBrief in cron status response

The module handles local vs remote Ollama URLs, IPv6, process detection
via lsof, and graceful startup with configurable wait timeouts.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
teernisse
2026-03-12 17:06:57 -04:00
parent 75469af514
commit 16bd33e8c0
4 changed files with 569 additions and 1 deletions

View File

@@ -1664,6 +1664,24 @@ async fn handle_sync_cmd(
None None
}; };
// In cron mode (--lock), ensure Ollama is running for embeddings
if args.lock {
let result = lore::core::ollama_mgmt::ensure_ollama(&config.embedding.base_url);
if !result.installed {
tracing::warn!(
"Ollama is not installed — embeddings will be skipped. {}",
result.install_hint.as_deref().unwrap_or("")
);
} else if result.started {
tracing::info!("Started ollama serve (was not running)");
} else if !result.running {
tracing::warn!(
"Failed to start Ollama: {}",
result.error.as_deref().unwrap_or("unknown error")
);
}
}
// Surgical mode: run_sync_surgical manages its own recorder, signal, and recording. // Surgical mode: run_sync_surgical manages its own recorder, signal, and recording.
// Skip the normal recorder setup and let the dispatch handle everything. // Skip the normal recorder setup and let the dispatch handle everything.
if options.is_surgical() { if options.is_surgical() {

View File

@@ -9,6 +9,7 @@ use crate::core::cron::{
}; };
use crate::core::db::create_connection; use crate::core::db::create_connection;
use crate::core::error::Result; use crate::core::error::Result;
use crate::core::ollama_mgmt::{OllamaStatusBrief, ollama_status_brief};
use crate::core::paths::get_db_path; use crate::core::paths::get_db_path;
use crate::core::time::ms_to_iso; use crate::core::time::ms_to_iso;
@@ -143,12 +144,20 @@ pub fn run_cron_status(config: &Config) -> Result<CronStatusInfo> {
// Query last sync run from DB // Query last sync run from DB
let last_sync = get_last_sync_time(config).unwrap_or_default(); let last_sync = get_last_sync_time(config).unwrap_or_default();
Ok(CronStatusInfo { status, last_sync }) // Quick ollama health check
let ollama = ollama_status_brief(&config.embedding.base_url);
Ok(CronStatusInfo {
status,
last_sync,
ollama,
})
} }
pub struct CronStatusInfo { pub struct CronStatusInfo {
pub status: CronStatusResult, pub status: CronStatusResult,
pub last_sync: Option<LastSyncInfo>, pub last_sync: Option<LastSyncInfo>,
pub ollama: OllamaStatusBrief,
} }
pub struct LastSyncInfo { pub struct LastSyncInfo {
@@ -236,6 +245,32 @@ pub fn print_cron_status(info: &CronStatusInfo) {
last.status last.status
); );
} }
// Ollama status
if info.ollama.installed {
if info.ollama.running {
println!(
" {} running (auto-started by cron if needed)",
Theme::dim().render("ollama:")
);
} else {
println!(
" {} {}",
Theme::warning().render("ollama:"),
Theme::warning()
.render("installed but not running (will attempt start on next sync)")
);
}
} else {
println!(
" {} {}",
Theme::error().render("ollama:"),
Theme::error().render("not installed — embeddings unavailable")
);
if let Some(ref hint) = info.ollama.install_hint {
println!(" {hint}");
}
}
println!(); println!();
} }
@@ -264,6 +299,7 @@ struct CronStatusData {
last_sync_at: Option<String>, last_sync_at: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
last_sync_status: Option<String>, last_sync_status: Option<String>,
ollama: OllamaStatusBrief,
} }
pub fn print_cron_status_json(info: &CronStatusInfo, elapsed_ms: u64) { pub fn print_cron_status_json(info: &CronStatusInfo, elapsed_ms: u64) {
@@ -283,6 +319,7 @@ pub fn print_cron_status_json(info: &CronStatusInfo, elapsed_ms: u64) {
cron_entry: info.status.cron_entry.clone(), cron_entry: info.status.cron_entry.clone(),
last_sync_at: info.last_sync.as_ref().map(|s| s.started_at_iso.clone()), last_sync_at: info.last_sync.as_ref().map(|s| s.started_at_iso.clone()),
last_sync_status: info.last_sync.as_ref().map(|s| s.status.clone()), last_sync_status: info.last_sync.as_ref().map(|s| s.status.clone()),
ollama: info.ollama.clone(),
}, },
meta: RobotMeta::new(elapsed_ms), meta: RobotMeta::new(elapsed_ms),
}; };

View File

@@ -9,6 +9,7 @@ pub mod file_history;
pub mod lock; pub mod lock;
pub mod logging; pub mod logging;
pub mod metrics; pub mod metrics;
pub mod ollama_mgmt;
pub mod path_resolver; pub mod path_resolver;
pub mod paths; pub mod paths;
pub mod project; pub mod project;

512
src/core/ollama_mgmt.rs Normal file
View File

@@ -0,0 +1,512 @@
use std::net::{TcpStream, ToSocketAddrs};
use std::path::PathBuf;
use std::process::Command;
use std::time::Duration;
use serde::Serialize;
// ── URL parsing helpers ──
/// Extract the hostname from a URL like `http://gpu-server:11434`.
/// Handles bracketed IPv6 addresses like `http://[::1]:11434`.
fn extract_host(base_url: &str) -> &str {
let without_scheme = base_url
.strip_prefix("http://")
.or_else(|| base_url.strip_prefix("https://"))
.unwrap_or(base_url);
// Handle bracketed IPv6: [::1]:port
if without_scheme.starts_with('[') {
return without_scheme
.find(']')
.map_or(without_scheme, |end| &without_scheme[..=end]);
}
// Take host part (before port colon or path slash)
let host = without_scheme.split(':').next().unwrap_or(without_scheme);
host.split('/').next().unwrap_or(host)
}
/// Extract port from a URL like `http://localhost:11434`.
/// Handles trailing paths and slashes (e.g. `http://host:8080/api`).
fn extract_port(base_url: &str) -> u16 {
base_url
.rsplit(':')
.next()
.and_then(|s| {
// Strip any path/fragment after the port digits
let port_str = s.split('/').next().unwrap_or(s);
port_str.parse().ok()
})
.unwrap_or(11434)
}
/// Is this URL pointing at the local machine?
fn is_local_url(base_url: &str) -> bool {
let host = extract_host(base_url);
matches!(host, "localhost" | "127.0.0.1" | "::1" | "[::1]")
}
// ── Detection (sync, fast) ──
/// Check if the `ollama` binary is on PATH. Returns the path if found.
pub fn find_ollama_binary() -> Option<PathBuf> {
Command::new("which")
.arg("ollama")
.output()
.ok()
.filter(|o| o.status.success())
.map(|o| PathBuf::from(String::from_utf8_lossy(&o.stdout).trim().to_string()))
}
/// Quick sync check: can we TCP-connect to Ollama's HTTP port?
/// Resolves the hostname from the URL (supports both local and remote hosts).
pub fn is_ollama_reachable(base_url: &str) -> bool {
let port = extract_port(base_url);
let host = extract_host(base_url);
let addr_str = format!("{host}:{port}");
let Ok(mut addrs) = addr_str.to_socket_addrs() else {
return false;
};
let Some(addr) = addrs.next() else {
return false;
};
TcpStream::connect_timeout(&addr, Duration::from_secs(2)).is_ok()
}
/// Platform-appropriate installation instructions.
pub fn install_instructions() -> &'static str {
if cfg!(target_os = "macos") {
"Install Ollama: brew install ollama (or https://ollama.ai/download)"
} else if cfg!(target_os = "linux") {
"Install Ollama: curl -fsSL https://ollama.ai/install.sh | sh"
} else {
"Install Ollama: https://ollama.ai/download"
}
}
// ── Ensure (sync, may block up to ~10s while waiting for startup) ──
/// Result of attempting to ensure Ollama is running.
#[derive(Debug, Serialize)]
pub struct OllamaEnsureResult {
/// Whether the `ollama` binary was found on PATH.
pub installed: bool,
/// Whether Ollama was already running before we tried anything.
pub was_running: bool,
/// Whether we successfully spawned `ollama serve`.
pub started: bool,
/// Whether Ollama is reachable now (after any start attempt).
pub running: bool,
/// Error message if something went wrong.
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
/// Installation instructions (set when ollama is not installed).
#[serde(skip_serializing_if = "Option::is_none")]
pub install_hint: Option<String>,
}
/// Ensure Ollama is running. If not installed, returns error with install
/// instructions. If installed but not running, attempts to start it.
///
/// Only attempts to start `ollama serve` when the configured URL points at
/// localhost. For remote URLs, only checks reachability.
///
/// This blocks for up to ~10 seconds waiting for Ollama to become reachable
/// after a start attempt. Intended for cron/lock mode where a brief delay
/// is acceptable.
pub fn ensure_ollama(base_url: &str) -> OllamaEnsureResult {
let is_local = is_local_url(base_url);
// Step 1: Is the binary installed? (only relevant for local)
if is_local {
let installed = find_ollama_binary().is_some();
if !installed {
return OllamaEnsureResult {
installed: false,
was_running: false,
started: false,
running: false,
error: Some("Ollama is not installed".to_string()),
install_hint: Some(install_instructions().to_string()),
};
}
}
// Step 2: Already running?
if is_ollama_reachable(base_url) {
return OllamaEnsureResult {
installed: true,
was_running: true,
started: false,
running: true,
error: None,
install_hint: None,
};
}
// Step 3: For remote URLs, we can't start ollama — just report unreachable
if !is_local {
return OllamaEnsureResult {
installed: true, // unknown, but irrelevant for remote
was_running: false,
started: false,
running: false,
error: Some(format!(
"Ollama at {base_url} is not reachable (remote — cannot auto-start)"
)),
install_hint: None,
};
}
// Step 4: Try to start it (local only)
let spawn_result = Command::new("ollama")
.arg("serve")
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.spawn();
if let Err(e) = spawn_result {
return OllamaEnsureResult {
installed: true,
was_running: false,
started: false,
running: false,
error: Some(format!("Failed to spawn 'ollama serve': {e}")),
install_hint: None,
};
}
// Step 5: Wait for it to become reachable (up to ~10 seconds)
for _ in 0..20 {
std::thread::sleep(Duration::from_millis(500));
if is_ollama_reachable(base_url) {
return OllamaEnsureResult {
installed: true,
was_running: false,
started: true,
running: true,
error: None,
install_hint: None,
};
}
}
OllamaEnsureResult {
installed: true,
was_running: false,
started: false,
running: false,
error: Some(
"Spawned 'ollama serve' but it did not become reachable within 10 seconds".to_string(),
),
install_hint: None,
}
}
// ── Brief status (for cron status display) ──
/// Lightweight status snapshot for display in `cron status`.
#[derive(Debug, Clone, Serialize)]
pub struct OllamaStatusBrief {
pub installed: bool,
pub running: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub binary_path: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub install_hint: Option<String>,
}
/// Quick, non-blocking Ollama status check for display purposes.
pub fn ollama_status_brief(base_url: &str) -> OllamaStatusBrief {
let is_local = is_local_url(base_url);
// For remote URLs, only check reachability (binary check is irrelevant)
if !is_local {
let running = is_ollama_reachable(base_url);
return OllamaStatusBrief {
installed: true, // unknown for remote, but not actionable
running,
binary_path: None,
install_hint: None,
};
}
let binary_path = find_ollama_binary();
let installed = binary_path.is_some();
if !installed {
return OllamaStatusBrief {
installed: false,
running: false,
binary_path: None,
install_hint: Some(install_instructions().to_string()),
};
}
let running = is_ollama_reachable(base_url);
OllamaStatusBrief {
installed: true,
running,
binary_path: binary_path.map(|p| p.display().to_string()),
install_hint: None,
}
}
#[cfg(test)]
mod tests {
use super::*;
// ── URL parsing ──
#[test]
fn extract_port_default_url() {
assert_eq!(extract_port("http://localhost:11434"), 11434);
}
#[test]
fn extract_port_custom() {
assert_eq!(extract_port("http://192.168.1.5:9999"), 9999);
}
#[test]
fn extract_port_trailing_slash() {
assert_eq!(extract_port("http://localhost:11434/"), 11434);
}
#[test]
fn extract_port_with_path() {
assert_eq!(extract_port("http://localhost:8080/api/generate"), 8080);
}
#[test]
fn extract_port_no_port() {
assert_eq!(extract_port("http://localhost"), 11434);
}
#[test]
fn extract_port_https() {
assert_eq!(extract_port("https://ollama.internal:8080"), 8080);
}
#[test]
fn extract_host_localhost() {
assert_eq!(extract_host("http://localhost:11434"), "localhost");
}
#[test]
fn extract_host_ip() {
assert_eq!(extract_host("http://192.168.1.5:9999"), "192.168.1.5");
}
#[test]
fn extract_host_remote() {
assert_eq!(extract_host("http://gpu-server:11434"), "gpu-server");
}
#[test]
fn extract_host_no_port() {
assert_eq!(extract_host("http://localhost"), "localhost");
}
#[test]
fn extract_host_https() {
assert_eq!(
extract_host("https://ollama.internal:8080"),
"ollama.internal"
);
}
#[test]
fn extract_host_no_scheme() {
assert_eq!(extract_host("localhost:11434"), "localhost");
}
// ── is_local_url ──
#[test]
fn is_local_url_localhost() {
assert!(is_local_url("http://localhost:11434"));
}
#[test]
fn is_local_url_loopback() {
assert!(is_local_url("http://127.0.0.1:11434"));
}
#[test]
fn is_local_url_ipv6_loopback() {
assert!(is_local_url("http://[::1]:11434"));
}
#[test]
fn is_local_url_remote() {
assert!(!is_local_url("http://gpu-server:11434"));
assert!(!is_local_url("http://192.168.1.5:11434"));
}
#[test]
fn is_local_url_fqdn_not_local() {
assert!(!is_local_url("http://ollama.example.com:11434"));
}
// ── install_instructions ──
#[test]
fn install_instructions_not_empty() {
assert!(!install_instructions().is_empty());
assert!(install_instructions().contains("ollama"));
}
#[test]
fn install_instructions_contains_url() {
assert!(install_instructions().contains("ollama.ai"));
}
// ── is_ollama_reachable ──
#[test]
fn reachable_returns_false_for_closed_port() {
// Port 1 is almost never open and requires root to bind
assert!(!is_ollama_reachable("http://127.0.0.1:1"));
}
#[test]
fn reachable_returns_false_for_unresolvable_host() {
assert!(!is_ollama_reachable(
"http://this-host-does-not-exist-xyzzy:11434"
));
}
// ── OllamaEnsureResult serialization ──
#[test]
fn ensure_result_serializes_installed_running() {
let result = OllamaEnsureResult {
installed: true,
was_running: true,
started: false,
running: true,
error: None,
install_hint: None,
};
let json: serde_json::Value = serde_json::to_value(&result).unwrap();
assert_eq!(json["installed"], true);
assert_eq!(json["was_running"], true);
assert_eq!(json["started"], false);
assert_eq!(json["running"], true);
// skip_serializing_if: None fields should be absent
assert!(json.get("error").is_none());
assert!(json.get("install_hint").is_none());
}
#[test]
fn ensure_result_serializes_not_installed() {
let result = OllamaEnsureResult {
installed: false,
was_running: false,
started: false,
running: false,
error: Some("Ollama is not installed".to_string()),
install_hint: Some("Install Ollama: brew install ollama".to_string()),
};
let json: serde_json::Value = serde_json::to_value(&result).unwrap();
assert_eq!(json["installed"], false);
assert_eq!(json["running"], false);
assert_eq!(json["error"], "Ollama is not installed");
assert!(
json["install_hint"]
.as_str()
.unwrap()
.contains("brew install")
);
}
// ── OllamaStatusBrief serialization ──
#[test]
fn status_brief_serializes_with_optional_fields() {
let brief = OllamaStatusBrief {
installed: true,
running: true,
binary_path: Some("/usr/local/bin/ollama".to_string()),
install_hint: None,
};
let json: serde_json::Value = serde_json::to_value(&brief).unwrap();
assert_eq!(json["installed"], true);
assert_eq!(json["running"], true);
assert_eq!(json["binary_path"], "/usr/local/bin/ollama");
assert!(json.get("install_hint").is_none());
}
#[test]
fn status_brief_serializes_not_installed() {
let brief = OllamaStatusBrief {
installed: false,
running: false,
binary_path: None,
install_hint: Some("Install Ollama".to_string()),
};
let json: serde_json::Value = serde_json::to_value(&brief).unwrap();
assert_eq!(json["installed"], false);
assert_eq!(json["running"], false);
assert!(json.get("binary_path").is_none());
assert_eq!(json["install_hint"], "Install Ollama");
}
#[test]
fn status_brief_clone() {
let original = OllamaStatusBrief {
installed: true,
running: false,
binary_path: Some("/opt/bin/ollama".to_string()),
install_hint: None,
};
let cloned = original.clone();
assert_eq!(original.installed, cloned.installed);
assert_eq!(original.running, cloned.running);
assert_eq!(original.binary_path, cloned.binary_path);
assert_eq!(original.install_hint, cloned.install_hint);
}
// ── ensure_ollama with remote URL ──
#[test]
fn ensure_remote_unreachable_does_not_set_install_hint() {
// A remote URL that nothing listens on — should NOT suggest local install
let result = ensure_ollama("http://192.0.2.1:1"); // TEST-NET, will fail fast
assert!(!result.started);
assert!(!result.running);
assert!(
result.install_hint.is_none(),
"remote URLs should not suggest local install"
);
assert!(
result.error.as_deref().unwrap_or("").contains("remote"),
"error should mention 'remote': {:?}",
result.error,
);
}
// ── ensure_ollama with local URL (binary check) ──
#[test]
fn ensure_local_closed_port_not_already_running() {
// Local URL pointing at a port nothing listens on
let result = ensure_ollama("http://127.0.0.1:1");
// Should NOT report was_running since port 1 is closed
assert!(!result.was_running);
assert!(!result.running);
// If ollama binary is not installed, should get install hint
if !result.installed {
assert!(result.install_hint.is_some());
assert!(
result
.error
.as_deref()
.unwrap_or("")
.contains("not installed")
);
}
}
}