fix: Ollama detection, install guide UI, and AI Providers auto-fill
Some checks failed
Auto Tag / auto-tag (push) Successful in 3s
Test / rust-fmt-check (push) Failing after 1m2s
Release / build-macos-arm64 (push) Successful in 2m50s
Test / rust-clippy (push) Successful in 7m24s
Test / frontend-typecheck (push) Has been cancelled
Test / frontend-tests (push) Has been cancelled
Test / rust-tests (push) Has been cancelled
Release / build-linux-amd64 (push) Has been cancelled
Release / build-windows-amd64 (push) Has been cancelled
Release / build-linux-arm64 (push) Has been cancelled

This commit is contained in:
Shaun Arman 2026-03-31 07:25:33 -05:00
parent 366c564834
commit 56e52ee09c
3 changed files with 56 additions and 5 deletions

View File

@ -10,7 +10,12 @@ pub async fn check_ollama() -> anyhow::Result<OllamaStatus> {
let which_result = std::process::Command::new(which_cmd).arg("ollama").output();
let installed = which_result.map(|o| o.status.success()).unwrap_or(false);
// Check common install paths explicitly — Tauri's process PATH may omit /usr/local/bin
let in_common_path = ["/usr/local/bin/ollama", "/opt/homebrew/bin/ollama", "/usr/bin/ollama"]
.iter()
.any(|p| std::path::Path::new(p).exists());
let installed = which_result.map(|o| o.status.success()).unwrap_or(false) || in_common_path;
let version = if installed {
std::process::Command::new("ollama")
@ -32,6 +37,9 @@ pub async fn check_ollama() -> anyhow::Result<OllamaStatus> {
.map(|r| r.status().is_success())
.unwrap_or(false);
// If the API is responding, Ollama is definitely installed even if binary wasn't found in PATH
let installed = installed || running;
Ok(OllamaStatus {
installed,
version,

View File

@ -195,9 +195,18 @@ export default function AIProviders() {
<Label>Type</Label>
<Select
value={form.provider_type}
onValueChange={(v) =>
setForm({ ...form, provider_type: v as ProviderConfig["provider_type"] })
}
onValueChange={(v) => {
const type = v as ProviderConfig["provider_type"];
const defaults: Partial<ProviderConfig> =
type === "ollama"
? { api_url: "http://localhost:11434", api_key: "", model: "llama3.2:3b" }
: type === "openai"
? { api_url: "https://api.openai.com/v1" }
: type === "anthropic"
? { api_url: "https://api.anthropic.com" }
: {};
setForm({ ...form, provider_type: type, ...defaults });
}}
>
<SelectTrigger>
<SelectValue />

View File

@ -23,15 +23,18 @@ import {
pullOllamaModelCmd,
deleteOllamaModelCmd,
listOllamaModelsCmd,
getOllamaInstallGuideCmd,
type OllamaStatus,
type HardwareInfo,
type ModelRecommendation,
type OllamaModel,
type InstallGuide,
} from "@/lib/tauriCommands";
import { listen } from "@tauri-apps/api/event";
export default function Ollama() {
const [status, setStatus] = useState<OllamaStatus | null>(null);
const [installGuide, setInstallGuide] = useState<InstallGuide | null>(null);
const [models, setModels] = useState<OllamaModel[]>([]);
const [hardware, setHardware] = useState<HardwareInfo | null>(null);
const [recommendations, setRecommendations] = useState<ModelRecommendation[]>([]);
@ -45,13 +48,18 @@ export default function Ollama() {
const loadData = async () => {
setIsLoading(true);
try {
const [ollamaStatus, hw, recs, modelList] = await Promise.all([
const platform = navigator.platform.toLowerCase().includes("mac") ? "macos"
: navigator.platform.toLowerCase().includes("win") ? "windows" : "linux";
const [ollamaStatus, hw, recs, modelList, guide] = await Promise.all([
checkOllamaInstalledCmd(),
detectHardwareCmd(),
recommendModelsCmd(),
listOllamaModelsCmd().catch(() => [] as OllamaModel[]),
getOllamaInstallGuideCmd(platform),
]);
setStatus(ollamaStatus);
setInstallGuide(guide);
setHardware(hw);
setRecommendations(recs);
setModels(modelList);
@ -157,6 +165,32 @@ export default function Ollama() {
</CardContent>
</Card>
{/* Install Instructions — shown when Ollama is not detected */}
{status && !status.installed && installGuide && (
<Card className="border-yellow-500/50">
<CardHeader>
<CardTitle className="text-lg flex items-center gap-2">
<Download className="w-5 h-5 text-yellow-500" />
Ollama Not Detected Installation Required
</CardTitle>
</CardHeader>
<CardContent className="space-y-4">
<ol className="space-y-2 list-decimal list-inside">
{installGuide.steps.map((step, i) => (
<li key={i} className="text-sm text-muted-foreground">{step}</li>
))}
</ol>
<Button
variant="outline"
onClick={() => window.open(installGuide.url, "_blank")}
>
<Download className="w-4 h-4 mr-2" />
Download Ollama for {installGuide.platform}
</Button>
</CardContent>
</Card>
)}
{/* Model List */}
<Card>
<CardHeader>