Files
gitlore/src/core/ollama_mgmt.rs
teernisse 3fed5a3048 fix(ollama): resolve 3 bugs preventing cron-triggered Ollama auto-start
1. PATH blindness in cron: find_ollama_binary() used `which ollama` which
   fails in cron's minimal PATH (/usr/bin:/bin). Added well-known install
   locations (/opt/homebrew/bin, /usr/local/bin, /usr/bin, /snap/bin) as
   fallback. ensure_ollama() now spawns using the discovered absolute path
   instead of bare "ollama".

2. IPv6-first DNS resolution: is_ollama_reachable() only tried the first
   address from to_socket_addrs(), which on macOS is ::1 (IPv6). Ollama
   only listens on 127.0.0.1 (IPv4), so the check always failed.
   Now iterates all resolved addresses — "Connection refused" on ::1 is
   instant so there's no performance cost.

3. Excessive blocking on cold start: ensure_ollama() blocked for 30s
   waiting for readiness, then reported failure even though ollama serve
   was successfully spawned and still booting. Reduced wait to 5s (catches
   hot restarts), and reports started=true on timeout since the ~90s
   ingestion phase gives Ollama plenty of time to cold-start before the
   embed stage needs it.
2026-03-13 11:01:12 -04:00

518 lines
15 KiB
Rust

use std::net::{TcpStream, ToSocketAddrs};
use std::path::PathBuf;
use std::process::Command;
use std::time::Duration;
use serde::Serialize;
// ── URL parsing helpers ──
/// Extract the hostname from a URL like `http://gpu-server:11434`.
/// Handles bracketed IPv6 addresses like `http://[::1]:11434`.
fn extract_host(base_url: &str) -> &str {
let without_scheme = base_url
.strip_prefix("http://")
.or_else(|| base_url.strip_prefix("https://"))
.unwrap_or(base_url);
// Handle bracketed IPv6: [::1]:port
if without_scheme.starts_with('[') {
return without_scheme
.find(']')
.map_or(without_scheme, |end| &without_scheme[..=end]);
}
// Take host part (before port colon or path slash)
let host = without_scheme.split(':').next().unwrap_or(without_scheme);
host.split('/').next().unwrap_or(host)
}
/// Extract port from a URL like `http://localhost:11434`.
/// Handles trailing paths and slashes (e.g. `http://host:8080/api`).
fn extract_port(base_url: &str) -> u16 {
base_url
.rsplit(':')
.next()
.and_then(|s| {
// Strip any path/fragment after the port digits
let port_str = s.split('/').next().unwrap_or(s);
port_str.parse().ok()
})
.unwrap_or(11434)
}
/// Is this URL pointing at the local machine?
fn is_local_url(base_url: &str) -> bool {
let host = extract_host(base_url);
matches!(host, "localhost" | "127.0.0.1" | "::1" | "[::1]")
}
// ── Detection (sync, fast) ──
/// Find the `ollama` binary. Checks PATH first, then well-known locations
/// as fallback for cron/launchd contexts where PATH is minimal.
pub fn find_ollama_binary() -> Option<PathBuf> {
let from_path = Command::new("which")
.arg("ollama")
.output()
.ok()
.filter(|o| o.status.success())
.map(|o| PathBuf::from(String::from_utf8_lossy(&o.stdout).trim().to_string()));
if from_path.is_some() {
return from_path;
}
const WELL_KNOWN: &[&str] = &[
"/opt/homebrew/bin/ollama",
"/usr/local/bin/ollama",
"/usr/bin/ollama",
"/snap/bin/ollama",
];
WELL_KNOWN
.iter()
.map(PathBuf::from)
.find(|p| p.is_file())
}
/// TCP-connect to Ollama's port. Tries all resolved addresses (IPv4/IPv6)
/// since `localhost` resolves to `::1` first on macOS but Ollama only
/// listens on `127.0.0.1`.
pub fn is_ollama_reachable(base_url: &str) -> bool {
let port = extract_port(base_url);
let host = extract_host(base_url);
let addr_str = format!("{host}:{port}");
let Ok(addrs) = addr_str.to_socket_addrs() else {
return false;
};
addrs
.into_iter()
.any(|addr| TcpStream::connect_timeout(&addr, Duration::from_secs(2)).is_ok())
}
/// Platform-appropriate installation instructions.
pub fn install_instructions() -> &'static str {
if cfg!(target_os = "macos") {
"Install Ollama: brew install ollama (or https://ollama.ai/download)"
} else if cfg!(target_os = "linux") {
"Install Ollama: curl -fsSL https://ollama.ai/install.sh | sh"
} else {
"Install Ollama: https://ollama.ai/download"
}
}
// ── Ensure (spawns ollama if needed) ──
#[derive(Debug, Serialize)]
pub struct OllamaEnsureResult {
pub installed: bool,
pub was_running: bool,
pub started: bool,
pub running: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub install_hint: Option<String>,
}
/// Ensure Ollama is available. For local URLs, spawns `ollama serve` if
/// not already running. For remote URLs, only checks reachability.
///
/// Waits briefly (5s) for hot restarts; cold starts finish during the
/// ingestion phase (~90s) before the embed stage needs Ollama.
pub fn ensure_ollama(base_url: &str) -> OllamaEnsureResult {
let is_local = is_local_url(base_url);
let binary_path = if is_local {
let path = find_ollama_binary();
if path.is_none() {
return OllamaEnsureResult {
installed: false,
was_running: false,
started: false,
running: false,
error: Some("Ollama is not installed".to_string()),
install_hint: Some(install_instructions().to_string()),
};
}
path
} else {
None
};
if is_ollama_reachable(base_url) {
return OllamaEnsureResult {
installed: true,
was_running: true,
started: false,
running: true,
error: None,
install_hint: None,
};
}
if !is_local {
return OllamaEnsureResult {
installed: true,
was_running: false,
started: false,
running: false,
error: Some(format!(
"Ollama at {base_url} is not reachable (remote — cannot auto-start)"
)),
install_hint: None,
};
}
// Spawn using the absolute path (cron PATH won't include Homebrew etc.)
let ollama_bin = binary_path.expect("binary_path is Some for local URLs after binary check");
let spawn_result = Command::new(&ollama_bin)
.arg("serve")
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.spawn();
if let Err(e) = spawn_result {
return OllamaEnsureResult {
installed: true,
was_running: false,
started: false,
running: false,
error: Some(format!("Failed to spawn 'ollama serve': {e}")),
install_hint: None,
};
}
// Brief poll for hot restarts; cold starts finish during ingestion.
for _ in 0..10 {
std::thread::sleep(Duration::from_millis(500));
if is_ollama_reachable(base_url) {
return OllamaEnsureResult {
installed: true,
was_running: false,
started: true,
running: true,
error: None,
install_hint: None,
};
}
}
OllamaEnsureResult {
installed: true,
was_running: false,
started: true,
running: false,
error: None,
install_hint: None,
}
}
// ── Brief status (for cron status display) ──
/// Lightweight status snapshot for display in `cron status`.
#[derive(Debug, Clone, Serialize)]
pub struct OllamaStatusBrief {
pub installed: bool,
pub running: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub binary_path: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub install_hint: Option<String>,
}
/// Quick, non-blocking Ollama status check for display purposes.
pub fn ollama_status_brief(base_url: &str) -> OllamaStatusBrief {
let is_local = is_local_url(base_url);
// For remote URLs, only check reachability (binary check is irrelevant)
if !is_local {
let running = is_ollama_reachable(base_url);
return OllamaStatusBrief {
installed: true, // unknown for remote, but not actionable
running,
binary_path: None,
install_hint: None,
};
}
let binary_path = find_ollama_binary();
let installed = binary_path.is_some();
if !installed {
return OllamaStatusBrief {
installed: false,
running: false,
binary_path: None,
install_hint: Some(install_instructions().to_string()),
};
}
let running = is_ollama_reachable(base_url);
OllamaStatusBrief {
installed: true,
running,
binary_path: binary_path.map(|p| p.display().to_string()),
install_hint: None,
}
}
#[cfg(test)]
mod tests {
use super::*;
// ── URL parsing ──
#[test]
fn extract_port_default_url() {
assert_eq!(extract_port("http://localhost:11434"), 11434);
}
#[test]
fn extract_port_custom() {
assert_eq!(extract_port("http://192.168.1.5:9999"), 9999);
}
#[test]
fn extract_port_trailing_slash() {
assert_eq!(extract_port("http://localhost:11434/"), 11434);
}
#[test]
fn extract_port_with_path() {
assert_eq!(extract_port("http://localhost:8080/api/generate"), 8080);
}
#[test]
fn extract_port_no_port() {
assert_eq!(extract_port("http://localhost"), 11434);
}
#[test]
fn extract_port_https() {
assert_eq!(extract_port("https://ollama.internal:8080"), 8080);
}
#[test]
fn extract_host_localhost() {
assert_eq!(extract_host("http://localhost:11434"), "localhost");
}
#[test]
fn extract_host_ip() {
assert_eq!(extract_host("http://192.168.1.5:9999"), "192.168.1.5");
}
#[test]
fn extract_host_remote() {
assert_eq!(extract_host("http://gpu-server:11434"), "gpu-server");
}
#[test]
fn extract_host_no_port() {
assert_eq!(extract_host("http://localhost"), "localhost");
}
#[test]
fn extract_host_https() {
assert_eq!(
extract_host("https://ollama.internal:8080"),
"ollama.internal"
);
}
#[test]
fn extract_host_no_scheme() {
assert_eq!(extract_host("localhost:11434"), "localhost");
}
// ── is_local_url ──
#[test]
fn is_local_url_localhost() {
assert!(is_local_url("http://localhost:11434"));
}
#[test]
fn is_local_url_loopback() {
assert!(is_local_url("http://127.0.0.1:11434"));
}
#[test]
fn is_local_url_ipv6_loopback() {
assert!(is_local_url("http://[::1]:11434"));
}
#[test]
fn is_local_url_remote() {
assert!(!is_local_url("http://gpu-server:11434"));
assert!(!is_local_url("http://192.168.1.5:11434"));
}
#[test]
fn is_local_url_fqdn_not_local() {
assert!(!is_local_url("http://ollama.example.com:11434"));
}
// ── install_instructions ──
#[test]
fn install_instructions_not_empty() {
assert!(!install_instructions().is_empty());
assert!(install_instructions().contains("ollama"));
}
#[test]
fn install_instructions_contains_url() {
assert!(install_instructions().contains("ollama.ai"));
}
// ── is_ollama_reachable ──
#[test]
fn reachable_returns_false_for_closed_port() {
// Port 1 is almost never open and requires root to bind
assert!(!is_ollama_reachable("http://127.0.0.1:1"));
}
#[test]
fn reachable_returns_false_for_unresolvable_host() {
assert!(!is_ollama_reachable(
"http://this-host-does-not-exist-xyzzy:11434"
));
}
// ── OllamaEnsureResult serialization ──
#[test]
fn ensure_result_serializes_installed_running() {
let result = OllamaEnsureResult {
installed: true,
was_running: true,
started: false,
running: true,
error: None,
install_hint: None,
};
let json: serde_json::Value = serde_json::to_value(&result).unwrap();
assert_eq!(json["installed"], true);
assert_eq!(json["was_running"], true);
assert_eq!(json["started"], false);
assert_eq!(json["running"], true);
// skip_serializing_if: None fields should be absent
assert!(json.get("error").is_none());
assert!(json.get("install_hint").is_none());
}
#[test]
fn ensure_result_serializes_not_installed() {
let result = OllamaEnsureResult {
installed: false,
was_running: false,
started: false,
running: false,
error: Some("Ollama is not installed".to_string()),
install_hint: Some("Install Ollama: brew install ollama".to_string()),
};
let json: serde_json::Value = serde_json::to_value(&result).unwrap();
assert_eq!(json["installed"], false);
assert_eq!(json["running"], false);
assert_eq!(json["error"], "Ollama is not installed");
assert!(
json["install_hint"]
.as_str()
.unwrap()
.contains("brew install")
);
}
// ── OllamaStatusBrief serialization ──
#[test]
fn status_brief_serializes_with_optional_fields() {
let brief = OllamaStatusBrief {
installed: true,
running: true,
binary_path: Some("/usr/local/bin/ollama".to_string()),
install_hint: None,
};
let json: serde_json::Value = serde_json::to_value(&brief).unwrap();
assert_eq!(json["installed"], true);
assert_eq!(json["running"], true);
assert_eq!(json["binary_path"], "/usr/local/bin/ollama");
assert!(json.get("install_hint").is_none());
}
#[test]
fn status_brief_serializes_not_installed() {
let brief = OllamaStatusBrief {
installed: false,
running: false,
binary_path: None,
install_hint: Some("Install Ollama".to_string()),
};
let json: serde_json::Value = serde_json::to_value(&brief).unwrap();
assert_eq!(json["installed"], false);
assert_eq!(json["running"], false);
assert!(json.get("binary_path").is_none());
assert_eq!(json["install_hint"], "Install Ollama");
}
#[test]
fn status_brief_clone() {
let original = OllamaStatusBrief {
installed: true,
running: false,
binary_path: Some("/opt/bin/ollama".to_string()),
install_hint: None,
};
let cloned = original.clone();
assert_eq!(original.installed, cloned.installed);
assert_eq!(original.running, cloned.running);
assert_eq!(original.binary_path, cloned.binary_path);
assert_eq!(original.install_hint, cloned.install_hint);
}
// ── ensure_ollama with remote URL ──
#[test]
fn ensure_remote_unreachable_does_not_set_install_hint() {
// A remote URL that nothing listens on — should NOT suggest local install
let result = ensure_ollama("http://192.0.2.1:1"); // TEST-NET, will fail fast
assert!(!result.started);
assert!(!result.running);
assert!(
result.install_hint.is_none(),
"remote URLs should not suggest local install"
);
assert!(
result.error.as_deref().unwrap_or("").contains("remote"),
"error should mention 'remote': {:?}",
result.error,
);
}
// ── ensure_ollama with local URL (binary check) ──
#[test]
fn ensure_local_closed_port_not_already_running() {
// Local URL pointing at a port nothing listens on
let result = ensure_ollama("http://127.0.0.1:1");
// Should NOT report was_running since port 1 is closed
assert!(!result.was_running);
assert!(!result.running);
// If ollama binary is not installed, should get install hint
if !result.installed {
assert!(result.install_hint.is_some());
assert!(
result
.error
.as_deref()
.unwrap_or("")
.contains("not installed")
);
}
}
}