diff --git a/.gitea/workflows/auto-tag.yml b/.gitea/workflows/auto-tag.yml index 1737401f..e306a8a8 100644 --- a/.gitea/workflows/auto-tag.yml +++ b/.gitea/workflows/auto-tag.yml @@ -149,24 +149,6 @@ jobs: pkg-config curl perl jq curl -fsSL https://deb.nodesource.com/setup_22.x | bash - apt-get install -y nodejs - - name: Download Ollama - run: | - OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \ - | grep '"tag_name"' | cut -d'"' -f4) - mkdir -p src-tauri/resources/ollama /tmp/ollama-extract - curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-linux-amd64.tgz" \ - -o /tmp/ollama.tgz - curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \ - -o /tmp/ollama-sha256sums.txt - EXPECTED=$(awk '$2 == "ollama-linux-amd64.tgz" {print $1}' /tmp/ollama-sha256sums.txt) - if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi - ACTUAL=$(sha256sum /tmp/ollama.tgz | awk '{print $1}') - if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi - tar -xzf /tmp/ollama.tgz -C /tmp/ollama-extract/ - cp "$(find /tmp/ollama-extract -name 'ollama' -type f | head -1)" src-tauri/resources/ollama/ollama - chmod +x src-tauri/resources/ollama/ollama - rm -rf /tmp/ollama.tgz /tmp/ollama-extract /tmp/ollama-sha256sums.txt - echo "Bundled Ollama ${OLLAMA_VER} (checksum verified)" - name: Build run: | npm ci --legacy-peer-deps @@ -247,25 +229,9 @@ jobs: git checkout FETCH_HEAD - name: Install dependencies run: | - apt-get update -qq && apt-get install -y -qq mingw-w64 curl nsis perl make jq unzip + apt-get update -qq && apt-get install -y -qq mingw-w64 curl nsis perl make jq curl -fsSL https://deb.nodesource.com/setup_22.x | bash - apt-get install -y nodejs - - name: Download Ollama - run: | - OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \ - | grep '"tag_name"' | cut -d'"' -f4) - mkdir -p src-tauri/resources/ollama - curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-windows-amd64.zip" \ - -o /tmp/ollama-win.zip - curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \ - -o /tmp/ollama-sha256sums.txt - EXPECTED=$(awk '$2 == "ollama-windows-amd64.zip" {print $1}' /tmp/ollama-sha256sums.txt) - if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi - ACTUAL=$(sha256sum /tmp/ollama-win.zip | awk '{print $1}') - if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi - unzip -jo /tmp/ollama-win.zip 'ollama.exe' -d src-tauri/resources/ollama/ - rm /tmp/ollama-win.zip /tmp/ollama-sha256sums.txt - echo "Bundled Ollama ${OLLAMA_VER} for Windows (checksum verified)" - name: Build env: CC_x86_64_pc_windows_gnu: x86_64-w64-mingw32-gcc @@ -347,22 +313,6 @@ jobs: git remote add origin http://172.0.0.29:3000/sarman/tftsr-devops_investigation.git git fetch --depth=1 origin "$GITHUB_SHA" git checkout FETCH_HEAD - - name: Download Ollama - run: | - OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \ - | python3 -c "import sys,json; print(json.load(sys.stdin)['tag_name'])") - mkdir -p src-tauri/resources/ollama - curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-darwin" \ - -o src-tauri/resources/ollama/ollama - curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \ - -o /tmp/ollama-sha256sums.txt - EXPECTED=$(awk '$2 == "ollama-darwin" {print $1}' /tmp/ollama-sha256sums.txt) - if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi - ACTUAL=$(shasum -a 256 src-tauri/resources/ollama/ollama | awk '{print $1}') - if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi - chmod +x src-tauri/resources/ollama/ollama - rm /tmp/ollama-sha256sums.txt - echo "Bundled Ollama ${OLLAMA_VER} for macOS (checksum verified)" - name: Build env: MACOSX_DEPLOYMENT_TARGET: "11.0" @@ -489,24 +439,6 @@ jobs: # source "$HOME/.cargo/env" in the Build step handles PATH — no GITHUB_PATH needed curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ --default-toolchain 1.88.0 --profile minimal --no-modify-path - - name: Download Ollama - run: | - OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \ - | grep '"tag_name"' | cut -d'"' -f4) - mkdir -p src-tauri/resources/ollama /tmp/ollama-extract - curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-linux-arm64.tgz" \ - -o /tmp/ollama.tgz - curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \ - -o /tmp/ollama-sha256sums.txt - EXPECTED=$(awk '$2 == "ollama-linux-arm64.tgz" {print $1}' /tmp/ollama-sha256sums.txt) - if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi - ACTUAL=$(sha256sum /tmp/ollama.tgz | awk '{print $1}') - if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi - tar -xzf /tmp/ollama.tgz -C /tmp/ollama-extract/ - cp "$(find /tmp/ollama-extract -name 'ollama' -type f | head -1)" src-tauri/resources/ollama/ollama - chmod +x src-tauri/resources/ollama/ollama - rm -rf /tmp/ollama.tgz /tmp/ollama-extract /tmp/ollama-sha256sums.txt - echo "Bundled Ollama ${OLLAMA_VER} (checksum verified)" - name: Build env: CC_aarch64_unknown_linux_gnu: aarch64-linux-gnu-gcc diff --git a/docs/wiki/IPC-Commands.md b/docs/wiki/IPC-Commands.md index 2c1c8b84..d0b66498 100644 --- a/docs/wiki/IPC-Commands.md +++ b/docs/wiki/IPC-Commands.md @@ -218,16 +218,6 @@ getAuditLogCmd(filter: AuditLogFilter) → AuditEntry[] ``` Returns audit log entries. Filter by action, entity_type, date range. -### `install_ollama_from_bundle` -```typescript -installOllamaFromBundleCmd() → string -``` -Copies the Ollama binary bundled inside the app resources to the system install path: -- **Linux/macOS**: `/usr/local/bin/ollama` (requires write permission — user may need to run app with elevated privileges or `sudo`) -- **Windows**: `%LOCALAPPDATA%\Programs\Ollama\ollama.exe` - -Returns a success message with the install path. Errors if the bundled binary is not present in the app resources (i.e., the app was built without an Ollama bundle step in CI). - --- ## Integration Commands diff --git a/src-tauri/src/commands/system.rs b/src-tauri/src/commands/system.rs index 404955e3..a74846df 100644 --- a/src-tauri/src/commands/system.rs +++ b/src-tauri/src/commands/system.rs @@ -141,74 +141,3 @@ pub async fn get_audit_log( Ok(rows) } - -// Security note: the bundled binary's integrity is guaranteed by the CI release pipeline -// which verifies SHA256 checksums against Ollama's published sha256sums.txt before bundling. -// Runtime re-verification is not performed here; the app bundle itself is the trust boundary. -// On Unix, writing to /usr/local/bin requires elevated privileges. If the operation fails with -// PermissionDenied the caller receives an actionable error message. -#[tauri::command] -pub async fn install_ollama_from_bundle(app: tauri::AppHandle) -> Result { - use std::fs; - use std::path::PathBuf; - use tauri::Manager; - - let resource_dir = app - .path() - .resource_dir() - .map_err(|e: tauri::Error| e.to_string())?; - - let resource_path = resource_dir.join("ollama").join(if cfg!(windows) { - "ollama.exe" - } else { - "ollama" - }); - - if !resource_path.exists() { - return Err("Bundled Ollama not found in resources".to_string()); - } - - // Defense-in-depth: verify resolved path stays within the resource directory. - let canonical_resource = resource_path.canonicalize().map_err(|e| e.to_string())?; - let canonical_dir = resource_dir.canonicalize().map_err(|e| e.to_string())?; - if !canonical_resource.starts_with(&canonical_dir) { - return Err("Resource path validation failed".to_string()); - } - - #[cfg(unix)] - let install_path = PathBuf::from("/usr/local/bin/ollama"); - #[cfg(windows)] - let install_path = { - let local_app_data = std::env::var("LOCALAPPDATA").map_err(|e| e.to_string())?; - PathBuf::from(local_app_data) - .join("Programs") - .join("Ollama") - .join("ollama.exe") - }; - - if let Some(parent) = install_path.parent() { - fs::create_dir_all(parent).map_err(|e| e.to_string())?; - } - - fs::copy(&resource_path, &install_path).map_err(|e| { - if e.kind() == std::io::ErrorKind::PermissionDenied { - format!( - "Permission denied writing to {}. On Linux, re-run the app with elevated \ - privileges or install manually: sudo cp \"{}\" /usr/local/bin/ollama", - install_path.display(), - resource_path.display() - ) - } else { - e.to_string() - } - })?; - - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - fs::set_permissions(&install_path, fs::Permissions::from_mode(0o755)) - .map_err(|e| e.to_string())?; - } - - Ok(format!("Ollama installed to {}", install_path.display())) -} diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 64c91d81..6b147e2a 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -109,7 +109,6 @@ pub fn run() { commands::system::get_settings, commands::system::update_settings, commands::system::get_audit_log, - commands::system::install_ollama_from_bundle, ]) .run(tauri::generate_context!()) .expect("Error running Troubleshooting and RCA Assistant application"); diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index a5b80085..d38ffc27 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -34,7 +34,7 @@ "icons/icon.icns", "icons/icon.ico" ], - "resources": ["resources/ollama/*"], + "resources": [], "externalBin": [], "copyright": "Troubleshooting and RCA Assistant Contributors", "category": "Utility", diff --git a/src/lib/tauriCommands.ts b/src/lib/tauriCommands.ts index bf1250f8..b1711134 100644 --- a/src/lib/tauriCommands.ts +++ b/src/lib/tauriCommands.ts @@ -436,6 +436,3 @@ export const getIntegrationConfigCmd = (service: string) => export const getAllIntegrationConfigsCmd = () => invoke("get_all_integration_configs"); - -export const installOllamaFromBundleCmd = () => - invoke("install_ollama_from_bundle"); diff --git a/src/pages/Settings/Ollama.tsx b/src/pages/Settings/Ollama.tsx index cd557ad2..419047c6 100644 --- a/src/pages/Settings/Ollama.tsx +++ b/src/pages/Settings/Ollama.tsx @@ -24,7 +24,6 @@ import { deleteOllamaModelCmd, listOllamaModelsCmd, getOllamaInstallGuideCmd, - installOllamaFromBundleCmd, type OllamaStatus, type HardwareInfo, type ModelRecommendation, @@ -44,7 +43,6 @@ export default function Ollama() { const [customModel, setCustomModel] = useState(""); const [isPulling, setIsPulling] = useState(false); const [pullProgress, setPullProgress] = useState(0); - const [isInstallingBundle, setIsInstallingBundle] = useState(false); const [error, setError] = useState(null); const loadData = async () => { @@ -107,19 +105,6 @@ export default function Ollama() { } }; - const handleInstallFromBundle = async () => { - setIsInstallingBundle(true); - setError(null); - try { - await installOllamaFromBundleCmd(); - await loadData(); - } catch (err) { - setError(String(err)); - } finally { - setIsInstallingBundle(false); - } - }; - const handleDelete = async (modelName: string) => { try { await deleteOllamaModelCmd(modelName); @@ -184,33 +169,16 @@ export default function Ollama() { {status && !status.installed && installGuide && ( - - + Ollama Not Detected — Installation Required - +
    {installGuide.steps.map((step, i) => (
  1. {step}
  2. ))}
-
- - -
)}