Merge pull request 'feat(ui): UI fixes, theme toggle, PII persistence, Ollama install instructions' (#20) from feat/ui-fixes-ollama-bundle-theme into master

Reviewed-on: #20
This commit is contained in:
sarman 2026-04-06 01:54:36 +00:00
commit bf6e589b3c
7 changed files with 4 additions and 189 deletions

View File

@ -149,24 +149,6 @@ jobs:
pkg-config curl perl jq pkg-config curl perl jq
curl -fsSL https://deb.nodesource.com/setup_22.x | bash - curl -fsSL https://deb.nodesource.com/setup_22.x | bash -
apt-get install -y nodejs apt-get install -y nodejs
- name: Download Ollama
run: |
OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \
| grep '"tag_name"' | cut -d'"' -f4)
mkdir -p src-tauri/resources/ollama /tmp/ollama-extract
curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-linux-amd64.tgz" \
-o /tmp/ollama.tgz
curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \
-o /tmp/ollama-sha256sums.txt
EXPECTED=$(awk '$2 == "ollama-linux-amd64.tgz" {print $1}' /tmp/ollama-sha256sums.txt)
if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi
ACTUAL=$(sha256sum /tmp/ollama.tgz | awk '{print $1}')
if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi
tar -xzf /tmp/ollama.tgz -C /tmp/ollama-extract/
cp "$(find /tmp/ollama-extract -name 'ollama' -type f | head -1)" src-tauri/resources/ollama/ollama
chmod +x src-tauri/resources/ollama/ollama
rm -rf /tmp/ollama.tgz /tmp/ollama-extract /tmp/ollama-sha256sums.txt
echo "Bundled Ollama ${OLLAMA_VER} (checksum verified)"
- name: Build - name: Build
run: | run: |
npm ci --legacy-peer-deps npm ci --legacy-peer-deps
@ -247,25 +229,9 @@ jobs:
git checkout FETCH_HEAD git checkout FETCH_HEAD
- name: Install dependencies - name: Install dependencies
run: | run: |
apt-get update -qq && apt-get install -y -qq mingw-w64 curl nsis perl make jq unzip apt-get update -qq && apt-get install -y -qq mingw-w64 curl nsis perl make jq
curl -fsSL https://deb.nodesource.com/setup_22.x | bash - curl -fsSL https://deb.nodesource.com/setup_22.x | bash -
apt-get install -y nodejs apt-get install -y nodejs
- name: Download Ollama
run: |
OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \
| grep '"tag_name"' | cut -d'"' -f4)
mkdir -p src-tauri/resources/ollama
curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-windows-amd64.zip" \
-o /tmp/ollama-win.zip
curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \
-o /tmp/ollama-sha256sums.txt
EXPECTED=$(awk '$2 == "ollama-windows-amd64.zip" {print $1}' /tmp/ollama-sha256sums.txt)
if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi
ACTUAL=$(sha256sum /tmp/ollama-win.zip | awk '{print $1}')
if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi
unzip -jo /tmp/ollama-win.zip 'ollama.exe' -d src-tauri/resources/ollama/
rm /tmp/ollama-win.zip /tmp/ollama-sha256sums.txt
echo "Bundled Ollama ${OLLAMA_VER} for Windows (checksum verified)"
- name: Build - name: Build
env: env:
CC_x86_64_pc_windows_gnu: x86_64-w64-mingw32-gcc CC_x86_64_pc_windows_gnu: x86_64-w64-mingw32-gcc
@ -347,22 +313,6 @@ jobs:
git remote add origin http://172.0.0.29:3000/sarman/tftsr-devops_investigation.git git remote add origin http://172.0.0.29:3000/sarman/tftsr-devops_investigation.git
git fetch --depth=1 origin "$GITHUB_SHA" git fetch --depth=1 origin "$GITHUB_SHA"
git checkout FETCH_HEAD git checkout FETCH_HEAD
- name: Download Ollama
run: |
OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \
| python3 -c "import sys,json; print(json.load(sys.stdin)['tag_name'])")
mkdir -p src-tauri/resources/ollama
curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-darwin" \
-o src-tauri/resources/ollama/ollama
curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \
-o /tmp/ollama-sha256sums.txt
EXPECTED=$(awk '$2 == "ollama-darwin" {print $1}' /tmp/ollama-sha256sums.txt)
if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi
ACTUAL=$(shasum -a 256 src-tauri/resources/ollama/ollama | awk '{print $1}')
if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi
chmod +x src-tauri/resources/ollama/ollama
rm /tmp/ollama-sha256sums.txt
echo "Bundled Ollama ${OLLAMA_VER} for macOS (checksum verified)"
- name: Build - name: Build
env: env:
MACOSX_DEPLOYMENT_TARGET: "11.0" MACOSX_DEPLOYMENT_TARGET: "11.0"
@ -489,24 +439,6 @@ jobs:
# source "$HOME/.cargo/env" in the Build step handles PATH — no GITHUB_PATH needed # source "$HOME/.cargo/env" in the Build step handles PATH — no GITHUB_PATH needed
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
--default-toolchain 1.88.0 --profile minimal --no-modify-path --default-toolchain 1.88.0 --profile minimal --no-modify-path
- name: Download Ollama
run: |
OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \
| grep '"tag_name"' | cut -d'"' -f4)
mkdir -p src-tauri/resources/ollama /tmp/ollama-extract
curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-linux-arm64.tgz" \
-o /tmp/ollama.tgz
curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \
-o /tmp/ollama-sha256sums.txt
EXPECTED=$(awk '$2 == "ollama-linux-arm64.tgz" {print $1}' /tmp/ollama-sha256sums.txt)
if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi
ACTUAL=$(sha256sum /tmp/ollama.tgz | awk '{print $1}')
if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi
tar -xzf /tmp/ollama.tgz -C /tmp/ollama-extract/
cp "$(find /tmp/ollama-extract -name 'ollama' -type f | head -1)" src-tauri/resources/ollama/ollama
chmod +x src-tauri/resources/ollama/ollama
rm -rf /tmp/ollama.tgz /tmp/ollama-extract /tmp/ollama-sha256sums.txt
echo "Bundled Ollama ${OLLAMA_VER} (checksum verified)"
- name: Build - name: Build
env: env:
CC_aarch64_unknown_linux_gnu: aarch64-linux-gnu-gcc CC_aarch64_unknown_linux_gnu: aarch64-linux-gnu-gcc

View File

@ -218,16 +218,6 @@ getAuditLogCmd(filter: AuditLogFilter) → AuditEntry[]
``` ```
Returns audit log entries. Filter by action, entity_type, date range. Returns audit log entries. Filter by action, entity_type, date range.
### `install_ollama_from_bundle`
```typescript
installOllamaFromBundleCmd() → string
```
Copies the Ollama binary bundled inside the app resources to the system install path:
- **Linux/macOS**: `/usr/local/bin/ollama` (requires write permission — user may need to run app with elevated privileges or `sudo`)
- **Windows**: `%LOCALAPPDATA%\Programs\Ollama\ollama.exe`
Returns a success message with the install path. Errors if the bundled binary is not present in the app resources (i.e., the app was built without an Ollama bundle step in CI).
--- ---
## Integration Commands ## Integration Commands

View File

@ -141,74 +141,3 @@ pub async fn get_audit_log(
Ok(rows) Ok(rows)
} }
// Security note: the bundled binary's integrity is guaranteed by the CI release pipeline
// which verifies SHA256 checksums against Ollama's published sha256sums.txt before bundling.
// Runtime re-verification is not performed here; the app bundle itself is the trust boundary.
// On Unix, writing to /usr/local/bin requires elevated privileges. If the operation fails with
// PermissionDenied the caller receives an actionable error message.
#[tauri::command]
pub async fn install_ollama_from_bundle(app: tauri::AppHandle) -> Result<String, String> {
use std::fs;
use std::path::PathBuf;
use tauri::Manager;
let resource_dir = app
.path()
.resource_dir()
.map_err(|e: tauri::Error| e.to_string())?;
let resource_path = resource_dir.join("ollama").join(if cfg!(windows) {
"ollama.exe"
} else {
"ollama"
});
if !resource_path.exists() {
return Err("Bundled Ollama not found in resources".to_string());
}
// Defense-in-depth: verify resolved path stays within the resource directory.
let canonical_resource = resource_path.canonicalize().map_err(|e| e.to_string())?;
let canonical_dir = resource_dir.canonicalize().map_err(|e| e.to_string())?;
if !canonical_resource.starts_with(&canonical_dir) {
return Err("Resource path validation failed".to_string());
}
#[cfg(unix)]
let install_path = PathBuf::from("/usr/local/bin/ollama");
#[cfg(windows)]
let install_path = {
let local_app_data = std::env::var("LOCALAPPDATA").map_err(|e| e.to_string())?;
PathBuf::from(local_app_data)
.join("Programs")
.join("Ollama")
.join("ollama.exe")
};
if let Some(parent) = install_path.parent() {
fs::create_dir_all(parent).map_err(|e| e.to_string())?;
}
fs::copy(&resource_path, &install_path).map_err(|e| {
if e.kind() == std::io::ErrorKind::PermissionDenied {
format!(
"Permission denied writing to {}. On Linux, re-run the app with elevated \
privileges or install manually: sudo cp \"{}\" /usr/local/bin/ollama",
install_path.display(),
resource_path.display()
)
} else {
e.to_string()
}
})?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
fs::set_permissions(&install_path, fs::Permissions::from_mode(0o755))
.map_err(|e| e.to_string())?;
}
Ok(format!("Ollama installed to {}", install_path.display()))
}

View File

@ -109,7 +109,6 @@ pub fn run() {
commands::system::get_settings, commands::system::get_settings,
commands::system::update_settings, commands::system::update_settings,
commands::system::get_audit_log, commands::system::get_audit_log,
commands::system::install_ollama_from_bundle,
]) ])
.run(tauri::generate_context!()) .run(tauri::generate_context!())
.expect("Error running Troubleshooting and RCA Assistant application"); .expect("Error running Troubleshooting and RCA Assistant application");

View File

@ -34,7 +34,7 @@
"icons/icon.icns", "icons/icon.icns",
"icons/icon.ico" "icons/icon.ico"
], ],
"resources": ["resources/ollama/*"], "resources": [],
"externalBin": [], "externalBin": [],
"copyright": "Troubleshooting and RCA Assistant Contributors", "copyright": "Troubleshooting and RCA Assistant Contributors",
"category": "Utility", "category": "Utility",

View File

@ -436,6 +436,3 @@ export const getIntegrationConfigCmd = (service: string) =>
export const getAllIntegrationConfigsCmd = () => export const getAllIntegrationConfigsCmd = () =>
invoke<IntegrationConfig[]>("get_all_integration_configs"); invoke<IntegrationConfig[]>("get_all_integration_configs");
export const installOllamaFromBundleCmd = () =>
invoke<string>("install_ollama_from_bundle");

View File

@ -24,7 +24,6 @@ import {
deleteOllamaModelCmd, deleteOllamaModelCmd,
listOllamaModelsCmd, listOllamaModelsCmd,
getOllamaInstallGuideCmd, getOllamaInstallGuideCmd,
installOllamaFromBundleCmd,
type OllamaStatus, type OllamaStatus,
type HardwareInfo, type HardwareInfo,
type ModelRecommendation, type ModelRecommendation,
@ -44,7 +43,6 @@ export default function Ollama() {
const [customModel, setCustomModel] = useState(""); const [customModel, setCustomModel] = useState("");
const [isPulling, setIsPulling] = useState(false); const [isPulling, setIsPulling] = useState(false);
const [pullProgress, setPullProgress] = useState(0); const [pullProgress, setPullProgress] = useState(0);
const [isInstallingBundle, setIsInstallingBundle] = useState(false);
const [error, setError] = useState<string | null>(null); const [error, setError] = useState<string | null>(null);
const loadData = async () => { const loadData = async () => {
@ -107,19 +105,6 @@ export default function Ollama() {
} }
}; };
const handleInstallFromBundle = async () => {
setIsInstallingBundle(true);
setError(null);
try {
await installOllamaFromBundleCmd();
await loadData();
} catch (err) {
setError(String(err));
} finally {
setIsInstallingBundle(false);
}
};
const handleDelete = async (modelName: string) => { const handleDelete = async (modelName: string) => {
try { try {
await deleteOllamaModelCmd(modelName); await deleteOllamaModelCmd(modelName);
@ -184,33 +169,16 @@ export default function Ollama() {
{status && !status.installed && installGuide && ( {status && !status.installed && installGuide && (
<Card className="border-yellow-500/50"> <Card className="border-yellow-500/50">
<CardHeader> <CardHeader>
<CardTitle className="text-lg flex items-center gap-2"> <CardTitle className="text-lg">
<Download className="w-5 h-5 text-yellow-500" />
Ollama Not Detected Installation Required Ollama Not Detected Installation Required
</CardTitle> </CardTitle>
</CardHeader> </CardHeader>
<CardContent className="space-y-4"> <CardContent>
<ol className="space-y-2 list-decimal list-inside"> <ol className="space-y-2 list-decimal list-inside">
{installGuide.steps.map((step, i) => ( {installGuide.steps.map((step, i) => (
<li key={i} className="text-sm text-muted-foreground">{step}</li> <li key={i} className="text-sm text-muted-foreground">{step}</li>
))} ))}
</ol> </ol>
<div className="flex flex-wrap gap-2">
<Button
variant="outline"
onClick={() => window.open(installGuide.url, "_blank")}
>
<Download className="w-4 h-4 mr-2" />
Download Ollama for {installGuide.platform}
</Button>
<Button
onClick={handleInstallFromBundle}
disabled={isInstallingBundle}
>
<Download className="w-4 h-4 mr-2" />
{isInstallingBundle ? "Installing..." : "Install Ollama (Offline)"}
</Button>
</div>
</CardContent> </CardContent>
</Card> </Card>
)} )}