From 215c0ae218cc3ef736ae479fde7b79315b98b828 Mon Sep 17 00:00:00 2001
From: Shaun Arman
Date: Sun, 5 Apr 2026 19:30:41 -0500
Subject: [PATCH 1/4] feat(ui): fix model dropdown, auth prefill, PII
persistence, theme toggle, and Ollama bundle
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- AIProviders: hide top model row when custom_rest active (dropdown lower in form handles it);
clear auth header prefill on format switch; rename User ID / CORE ID → Email Address
- Dashboard + Ollama: add border-border/bg-card classes to Refresh buttons for dark-bg contrast
- Security + settingsStore: wire PII toggle state to persisted Zustand store so pattern
selections survive app restarts
- App: add Sun/Moon theme toggle button to sidebar footer (always visible when collapsed)
- system.rs: add install_ollama_from_bundle command (copies bundled binary to /usr/local/bin)
- auto-tag.yml: add Download Ollama step to all 4 platform build jobs with SHA256 verification
- tauri.conf.json: add resources/ollama/* to bundle resources
- docs: add install_ollama_from_bundle to IPC-Commands wiki
Security: CI download steps verify SHA256 against Ollama's published sha256sums.txt before bundling.
---
.gitea/workflows/auto-tag.yml | 70 +++++++++++++-
docs/wiki/IPC-Commands.md | 10 ++
src-tauri/resources/ollama/.gitkeep | 0
src-tauri/src/commands/system.rs | 49 ++++++++++
src-tauri/src/lib.rs | 1 +
src-tauri/tauri.conf.json | 2 +-
src/App.tsx | 25 +++--
src/lib/tauriCommands.ts | 3 +
src/pages/Dashboard/index.tsx | 2 +-
src/pages/Settings/AIProviders.tsx | 28 ++----
src/pages/Settings/Ollama.tsx | 40 ++++++--
src/pages/Settings/Security.tsx | 17 ++--
src/stores/settingsStore.ts | 10 ++
ticket-ui-fixes-ollama-bundle-theme.md | 122 +++++++++++++++++++++++++
14 files changed, 332 insertions(+), 47 deletions(-)
create mode 100644 src-tauri/resources/ollama/.gitkeep
create mode 100644 ticket-ui-fixes-ollama-bundle-theme.md
diff --git a/.gitea/workflows/auto-tag.yml b/.gitea/workflows/auto-tag.yml
index e306a8a8..1737401f 100644
--- a/.gitea/workflows/auto-tag.yml
+++ b/.gitea/workflows/auto-tag.yml
@@ -149,6 +149,24 @@ jobs:
pkg-config curl perl jq
curl -fsSL https://deb.nodesource.com/setup_22.x | bash -
apt-get install -y nodejs
+ - name: Download Ollama
+ run: |
+ OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \
+ | grep '"tag_name"' | cut -d'"' -f4)
+ mkdir -p src-tauri/resources/ollama /tmp/ollama-extract
+ curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-linux-amd64.tgz" \
+ -o /tmp/ollama.tgz
+ curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \
+ -o /tmp/ollama-sha256sums.txt
+ EXPECTED=$(awk '$2 == "ollama-linux-amd64.tgz" {print $1}' /tmp/ollama-sha256sums.txt)
+ if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi
+ ACTUAL=$(sha256sum /tmp/ollama.tgz | awk '{print $1}')
+ if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi
+ tar -xzf /tmp/ollama.tgz -C /tmp/ollama-extract/
+ cp "$(find /tmp/ollama-extract -name 'ollama' -type f | head -1)" src-tauri/resources/ollama/ollama
+ chmod +x src-tauri/resources/ollama/ollama
+ rm -rf /tmp/ollama.tgz /tmp/ollama-extract /tmp/ollama-sha256sums.txt
+ echo "Bundled Ollama ${OLLAMA_VER} (checksum verified)"
- name: Build
run: |
npm ci --legacy-peer-deps
@@ -229,9 +247,25 @@ jobs:
git checkout FETCH_HEAD
- name: Install dependencies
run: |
- apt-get update -qq && apt-get install -y -qq mingw-w64 curl nsis perl make jq
+ apt-get update -qq && apt-get install -y -qq mingw-w64 curl nsis perl make jq unzip
curl -fsSL https://deb.nodesource.com/setup_22.x | bash -
apt-get install -y nodejs
+ - name: Download Ollama
+ run: |
+ OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \
+ | grep '"tag_name"' | cut -d'"' -f4)
+ mkdir -p src-tauri/resources/ollama
+ curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-windows-amd64.zip" \
+ -o /tmp/ollama-win.zip
+ curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \
+ -o /tmp/ollama-sha256sums.txt
+ EXPECTED=$(awk '$2 == "ollama-windows-amd64.zip" {print $1}' /tmp/ollama-sha256sums.txt)
+ if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi
+ ACTUAL=$(sha256sum /tmp/ollama-win.zip | awk '{print $1}')
+ if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi
+ unzip -jo /tmp/ollama-win.zip 'ollama.exe' -d src-tauri/resources/ollama/
+ rm /tmp/ollama-win.zip /tmp/ollama-sha256sums.txt
+ echo "Bundled Ollama ${OLLAMA_VER} for Windows (checksum verified)"
- name: Build
env:
CC_x86_64_pc_windows_gnu: x86_64-w64-mingw32-gcc
@@ -313,6 +347,22 @@ jobs:
git remote add origin http://172.0.0.29:3000/sarman/tftsr-devops_investigation.git
git fetch --depth=1 origin "$GITHUB_SHA"
git checkout FETCH_HEAD
+ - name: Download Ollama
+ run: |
+ OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \
+ | python3 -c "import sys,json; print(json.load(sys.stdin)['tag_name'])")
+ mkdir -p src-tauri/resources/ollama
+ curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-darwin" \
+ -o src-tauri/resources/ollama/ollama
+ curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \
+ -o /tmp/ollama-sha256sums.txt
+ EXPECTED=$(awk '$2 == "ollama-darwin" {print $1}' /tmp/ollama-sha256sums.txt)
+ if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi
+ ACTUAL=$(shasum -a 256 src-tauri/resources/ollama/ollama | awk '{print $1}')
+ if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi
+ chmod +x src-tauri/resources/ollama/ollama
+ rm /tmp/ollama-sha256sums.txt
+ echo "Bundled Ollama ${OLLAMA_VER} for macOS (checksum verified)"
- name: Build
env:
MACOSX_DEPLOYMENT_TARGET: "11.0"
@@ -439,6 +489,24 @@ jobs:
# source "$HOME/.cargo/env" in the Build step handles PATH — no GITHUB_PATH needed
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
--default-toolchain 1.88.0 --profile minimal --no-modify-path
+ - name: Download Ollama
+ run: |
+ OLLAMA_VER=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest \
+ | grep '"tag_name"' | cut -d'"' -f4)
+ mkdir -p src-tauri/resources/ollama /tmp/ollama-extract
+ curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/ollama-linux-arm64.tgz" \
+ -o /tmp/ollama.tgz
+ curl -fsSL "https://github.com/ollama/ollama/releases/download/${OLLAMA_VER}/sha256sums.txt" \
+ -o /tmp/ollama-sha256sums.txt
+ EXPECTED=$(awk '$2 == "ollama-linux-arm64.tgz" {print $1}' /tmp/ollama-sha256sums.txt)
+ if [ -z "$EXPECTED" ]; then echo "ERROR: SHA256 entry not found"; exit 1; fi
+ ACTUAL=$(sha256sum /tmp/ollama.tgz | awk '{print $1}')
+ if [ "$EXPECTED" != "$ACTUAL" ]; then echo "ERROR: SHA256 mismatch. Expected: $EXPECTED Got: $ACTUAL"; exit 1; fi
+ tar -xzf /tmp/ollama.tgz -C /tmp/ollama-extract/
+ cp "$(find /tmp/ollama-extract -name 'ollama' -type f | head -1)" src-tauri/resources/ollama/ollama
+ chmod +x src-tauri/resources/ollama/ollama
+ rm -rf /tmp/ollama.tgz /tmp/ollama-extract /tmp/ollama-sha256sums.txt
+ echo "Bundled Ollama ${OLLAMA_VER} (checksum verified)"
- name: Build
env:
CC_aarch64_unknown_linux_gnu: aarch64-linux-gnu-gcc
diff --git a/docs/wiki/IPC-Commands.md b/docs/wiki/IPC-Commands.md
index d0b66498..2c1c8b84 100644
--- a/docs/wiki/IPC-Commands.md
+++ b/docs/wiki/IPC-Commands.md
@@ -218,6 +218,16 @@ getAuditLogCmd(filter: AuditLogFilter) → AuditEntry[]
```
Returns audit log entries. Filter by action, entity_type, date range.
+### `install_ollama_from_bundle`
+```typescript
+installOllamaFromBundleCmd() → string
+```
+Copies the Ollama binary bundled inside the app resources to the system install path:
+- **Linux/macOS**: `/usr/local/bin/ollama` (requires write permission — user may need to run app with elevated privileges or `sudo`)
+- **Windows**: `%LOCALAPPDATA%\Programs\Ollama\ollama.exe`
+
+Returns a success message with the install path. Errors if the bundled binary is not present in the app resources (i.e., the app was built without an Ollama bundle step in CI).
+
---
## Integration Commands
diff --git a/src-tauri/resources/ollama/.gitkeep b/src-tauri/resources/ollama/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/src-tauri/src/commands/system.rs b/src-tauri/src/commands/system.rs
index a74846df..4f666040 100644
--- a/src-tauri/src/commands/system.rs
+++ b/src-tauri/src/commands/system.rs
@@ -141,3 +141,52 @@ pub async fn get_audit_log(
Ok(rows)
}
+
+// Security note: the bundled binary's integrity is guaranteed by the CI release pipeline
+// which verifies SHA256 checksums against Ollama's published sha256sums.txt before bundling.
+// Runtime re-verification is not performed here; the app bundle itself is the trust boundary.
+#[tauri::command]
+pub async fn install_ollama_from_bundle(
+ app: tauri::AppHandle,
+) -> Result {
+ use std::fs;
+ use std::path::PathBuf;
+ use tauri::Manager;
+
+ let resource_path = app
+ .path()
+ .resource_dir()
+ .map_err(|e: tauri::Error| e.to_string())?
+ .join("ollama")
+ .join(if cfg!(windows) { "ollama.exe" } else { "ollama" });
+
+ if !resource_path.exists() {
+ return Err("Bundled Ollama not found in resources".to_string());
+ }
+
+ #[cfg(unix)]
+ let install_path = PathBuf::from("/usr/local/bin/ollama");
+ #[cfg(windows)]
+ let install_path = {
+ let local_app_data = std::env::var("LOCALAPPDATA").map_err(|e| e.to_string())?;
+ PathBuf::from(local_app_data)
+ .join("Programs")
+ .join("Ollama")
+ .join("ollama.exe")
+ };
+
+ if let Some(parent) = install_path.parent() {
+ fs::create_dir_all(parent).map_err(|e| e.to_string())?;
+ }
+
+ fs::copy(&resource_path, &install_path).map_err(|e| e.to_string())?;
+
+ #[cfg(unix)]
+ {
+ use std::os::unix::fs::PermissionsExt;
+ fs::set_permissions(&install_path, fs::Permissions::from_mode(0o755))
+ .map_err(|e| e.to_string())?;
+ }
+
+ Ok(format!("Ollama installed to {}", install_path.display()))
+}
diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs
index 6b147e2a..64c91d81 100644
--- a/src-tauri/src/lib.rs
+++ b/src-tauri/src/lib.rs
@@ -109,6 +109,7 @@ pub fn run() {
commands::system::get_settings,
commands::system::update_settings,
commands::system::get_audit_log,
+ commands::system::install_ollama_from_bundle,
])
.run(tauri::generate_context!())
.expect("Error running Troubleshooting and RCA Assistant application");
diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json
index d38ffc27..a5b80085 100644
--- a/src-tauri/tauri.conf.json
+++ b/src-tauri/tauri.conf.json
@@ -34,7 +34,7 @@
"icons/icon.icns",
"icons/icon.ico"
],
- "resources": [],
+ "resources": ["resources/ollama/*"],
"externalBin": [],
"copyright": "Troubleshooting and RCA Assistant Contributors",
"category": "Utility",
diff --git a/src/App.tsx b/src/App.tsx
index 82226fea..e61eff5e 100644
--- a/src/App.tsx
+++ b/src/App.tsx
@@ -11,6 +11,8 @@ import {
Link,
ChevronLeft,
ChevronRight,
+ Sun,
+ Moon,
} from "lucide-react";
import { useSettingsStore } from "@/stores/settingsStore";
@@ -43,7 +45,7 @@ const settingsItems = [
export default function App() {
const [collapsed, setCollapsed] = useState(false);
const [appVersion, setAppVersion] = useState("");
- const theme = useSettingsStore((s) => s.theme);
+ const { theme, setTheme } = useSettingsStore();
const location = useLocation();
useEffect(() => {
@@ -116,12 +118,21 @@ export default function App() {
- {/* Version */}
- {!collapsed && (
-
- {appVersion ? `v${appVersion}` : ""}
-
- )}
+ {/* Version + Theme toggle */}
+
+ {!collapsed && (
+
+ {appVersion ? `v${appVersion}` : ""}
+
+ )}
+ setTheme(theme === "dark" ? "light" : "dark")}
+ className="p-1 rounded hover:bg-accent text-muted-foreground"
+ title={theme === "dark" ? "Switch to light mode" : "Switch to dark mode"}
+ >
+ {theme === "dark" ? : }
+
+
{/* Main content */}
diff --git a/src/lib/tauriCommands.ts b/src/lib/tauriCommands.ts
index b1711134..bf1250f8 100644
--- a/src/lib/tauriCommands.ts
+++ b/src/lib/tauriCommands.ts
@@ -436,3 +436,6 @@ export const getIntegrationConfigCmd = (service: string) =>
export const getAllIntegrationConfigsCmd = () =>
invoke("get_all_integration_configs");
+
+export const installOllamaFromBundleCmd = () =>
+ invoke("install_ollama_from_bundle");
diff --git a/src/pages/Dashboard/index.tsx b/src/pages/Dashboard/index.tsx
index a463957f..fdd2434d 100644
--- a/src/pages/Dashboard/index.tsx
+++ b/src/pages/Dashboard/index.tsx
@@ -39,7 +39,7 @@ export default function Dashboard() {
- loadIssues()} disabled={isLoading}>
+ loadIssues()} disabled={isLoading} className="border-border text-foreground bg-card hover:bg-accent">
Refresh
diff --git a/src/pages/Settings/AIProviders.tsx b/src/pages/Settings/AIProviders.tsx
index f4911c34..062f1e32 100644
--- a/src/pages/Settings/AIProviders.tsx
+++ b/src/pages/Settings/AIProviders.tsx
@@ -291,24 +291,16 @@ export default function AIProviders() {
placeholder="sk-..."
/>
-
+ )}
@@ -347,7 +339,7 @@ export default function AIProviders() {
format === CUSTOM_REST_FORMAT
? {
custom_endpoint_path: "",
- custom_auth_header: "x-msi-genai-api-key",
+ custom_auth_header: "",
custom_auth_prefix: "",
}
: {
@@ -399,7 +391,7 @@ export default function AIProviders() {
placeholder="Authorization"
/>
- Header name for authentication (e.g., "Authorization" or "x-msi-genai-api-key")
+ Header name for authentication (e.g., "Authorization" or "x-api-key")
@@ -419,14 +411,14 @@ export default function AIProviders() {
{/* Custom REST specific: User ID field */}
{normalizeApiFormat(form.api_format) === CUSTOM_REST_FORMAT && (
-
User ID (CORE ID)
+
Email Address
setForm({ ...form, user_id: e.target.value })}
- placeholder="your.name@motorolasolutions.com"
+ placeholder="user@example.com"
/>
- Optional: Your Motorola CORE ID email. If omitted, costs are tracked to API key owner.
+ Optional: Email address for usage tracking. If omitted, costs are attributed to the API key owner.
)}
diff --git a/src/pages/Settings/Ollama.tsx b/src/pages/Settings/Ollama.tsx
index ea766090..cd557ad2 100644
--- a/src/pages/Settings/Ollama.tsx
+++ b/src/pages/Settings/Ollama.tsx
@@ -24,6 +24,7 @@ import {
deleteOllamaModelCmd,
listOllamaModelsCmd,
getOllamaInstallGuideCmd,
+ installOllamaFromBundleCmd,
type OllamaStatus,
type HardwareInfo,
type ModelRecommendation,
@@ -43,6 +44,7 @@ export default function Ollama() {
const [customModel, setCustomModel] = useState("");
const [isPulling, setIsPulling] = useState(false);
const [pullProgress, setPullProgress] = useState(0);
+ const [isInstallingBundle, setIsInstallingBundle] = useState(false);
const [error, setError] = useState(null);
const loadData = async () => {
@@ -105,6 +107,19 @@ export default function Ollama() {
}
};
+ const handleInstallFromBundle = async () => {
+ setIsInstallingBundle(true);
+ setError(null);
+ try {
+ await installOllamaFromBundleCmd();
+ await loadData();
+ } catch (err) {
+ setError(String(err));
+ } finally {
+ setIsInstallingBundle(false);
+ }
+ };
+
const handleDelete = async (modelName: string) => {
try {
await deleteOllamaModelCmd(modelName);
@@ -123,7 +138,7 @@ export default function Ollama() {
Manage local AI models via Ollama for privacy-first inference.
-
+
Refresh
@@ -180,13 +195,22 @@ export default function Ollama() {
{step}
))}
- window.open(installGuide.url, "_blank")}
- >
-
- Download Ollama for {installGuide.platform}
-
+
+ window.open(installGuide.url, "_blank")}
+ >
+
+ Download Ollama for {installGuide.platform}
+
+
+
+ {isInstallingBundle ? "Installing..." : "Install Ollama (Offline)"}
+
+
)}
diff --git a/src/pages/Settings/Security.tsx b/src/pages/Settings/Security.tsx
index 0c3da800..32fef8d9 100644
--- a/src/pages/Settings/Security.tsx
+++ b/src/pages/Settings/Security.tsx
@@ -9,6 +9,7 @@ import {
Separator,
} from "@/components/ui";
import { getAuditLogCmd, type AuditEntry } from "@/lib/tauriCommands";
+import { useSettingsStore } from "@/stores/settingsStore";
const piiPatterns = [
{ id: "email", label: "Email Addresses", description: "Detect email addresses in logs" },
@@ -22,9 +23,7 @@ const piiPatterns = [
];
export default function Security() {
- const [enabledPatterns, setEnabledPatterns] = useState>(() =>
- Object.fromEntries(piiPatterns.map((p) => [p.id, true]))
- );
+ const { pii_enabled_patterns, setPiiPattern } = useSettingsStore();
const [auditEntries, setAuditEntries] = useState([]);
const [expandedRows, setExpandedRows] = useState>(new Set());
const [isLoading, setIsLoading] = useState(false);
@@ -46,10 +45,6 @@ export default function Security() {
}
};
- const togglePattern = (id: string) => {
- setEnabledPatterns((prev) => ({ ...prev, [id]: !prev[id] }));
- };
-
const toggleRow = (entryId: string) => {
setExpandedRows((prev) => {
const newSet = new Set(prev);
@@ -92,15 +87,15 @@ export default function Security() {
togglePattern(pattern.id)}
+ aria-checked={pii_enabled_patterns[pattern.id]}
+ onClick={() => setPiiPattern(pattern.id, !pii_enabled_patterns[pattern.id])}
className={`relative inline-flex h-6 w-11 items-center rounded-full transition-colors ${
- enabledPatterns[pattern.id] ? "bg-blue-500" : "bg-muted"
+ pii_enabled_patterns[pattern.id] ? "bg-blue-500" : "bg-muted"
}`}
>
diff --git a/src/stores/settingsStore.ts b/src/stores/settingsStore.ts
index b314ce6b..d5e99178 100644
--- a/src/stores/settingsStore.ts
+++ b/src/stores/settingsStore.ts
@@ -9,6 +9,8 @@ interface SettingsState extends AppSettings {
setActiveProvider: (name: string) => void;
setTheme: (theme: "light" | "dark") => void;
getActiveProvider: () => ProviderConfig | undefined;
+ pii_enabled_patterns: Record;
+ setPiiPattern: (id: string, enabled: boolean) => void;
}
export const useSettingsStore = create()(
@@ -35,6 +37,14 @@ export const useSettingsStore = create()(
})),
setActiveProvider: (name) => set({ active_provider: name }),
setTheme: (theme) => set({ theme }),
+ pii_enabled_patterns: Object.fromEntries(
+ ["email", "ip_address", "phone", "ssn", "credit_card", "hostname", "password", "api_key"]
+ .map((id) => [id, true])
+ ) as Record,
+ setPiiPattern: (id: string, enabled: boolean) =>
+ set((state) => ({
+ pii_enabled_patterns: { ...state.pii_enabled_patterns, [id]: enabled },
+ })),
getActiveProvider: () => {
const state = get();
return state.ai_providers.find((p) => p.name === state.active_provider)
diff --git a/ticket-ui-fixes-ollama-bundle-theme.md b/ticket-ui-fixes-ollama-bundle-theme.md
new file mode 100644
index 00000000..3f935eed
--- /dev/null
+++ b/ticket-ui-fixes-ollama-bundle-theme.md
@@ -0,0 +1,122 @@
+# Ticket Summary — UI Fixes + Ollama Bundling + Theme Toggle
+
+**Branch**: `feat/ui-fixes-ollama-bundle-theme`
+
+---
+
+## Description
+
+Multiple UI issues were identified and resolved following the arm64 build stabilization:
+
+- `custom_rest` provider showed a disabled model input instead of the live dropdown already present lower in the form
+- Auth Header Name auto-filled with an internal vendor-specific key name on format selection
+- "User ID (CORE ID)" label and placeholder exposed internal organizational terminology
+- Refresh buttons on the Ollama and Dashboard pages had near-zero contrast against dark card backgrounds
+- PII detection toggles in Security settings silently reset to all-enabled on every app restart (no persistence)
+- Ollama required manual installation; no offline install path existed
+- No light/dark theme toggle UI existed despite the infrastructure already being wired up
+
+Additionally, a new `install_ollama_from_bundle` Tauri command allows the app to copy a bundled Ollama binary to the system install path, enabling offline-first deployment. CI was updated to download the appropriate Ollama binary for each platform during the release build.
+
+---
+
+## Acceptance Criteria
+
+- [ ] **Custom REST model**: Selecting Type=Custom + API Format=Custom REST causes the top-level Model row to disappear; the dropdown at the bottom is visible and populated with all models
+- [ ] **Auth Header**: Field is blank by default when Custom REST format is selected (no internal values)
+- [ ] **User ID label**: Reads "Email Address" with placeholder `user@example.com` and a generic description
+- [ ] **Auth Header description**: No longer references internal key name examples
+- [ ] **Refresh buttons**: Visually distinct (border + background) against dark card backgrounds on Dashboard and Ollama pages
+- [ ] **PII toggles**: Toggling patterns off, navigating away, and returning preserves the disabled state across app restarts
+- [ ] **Theme toggle**: Sun/Moon icon button in the sidebar footer switches between light and dark themes; works when sidebar is collapsed
+- [ ] **Install Ollama (Offline)**: Button appears in the "Ollama Not Detected" card; clicking it copies the bundled binary and refreshes status
+- [ ] **CI**: Each platform build job downloads the correct Ollama binary before `tauri build` and places it in `src-tauri/resources/ollama/`
+- [ ] `npx tsc --noEmit` — zero errors
+- [ ] `npm run test:run` — 51/51 tests pass
+- [ ] `cargo check` — zero errors
+- [ ] `cargo clippy -- -D warnings` — zero warnings
+- [ ] `python3 -c "import yaml; yaml.safe_load(open('.gitea/workflows/auto-tag.yml'))"` — YAML valid
+
+---
+
+## Work Implemented
+
+### Phase 1 — Frontend (6 files)
+
+**`src/pages/Settings/AIProviders.tsx`**
+- Removed the disabled Model ` ` shown when Custom REST is active; the grid row is now hidden via conditional render — the dropdown further down the form handles model selection for this format
+- Removed `custom_auth_header: "x-msi-genai-api-key"` prefill on format switch; field now starts empty
+- Replaced example in Auth Header description from internal key name to generic `"x-api-key"`
+- Renamed "User ID (CORE ID)" → "Email Address"; updated placeholder from `your.name@motorolasolutions.com` → `user@example.com`; removed Motorola-specific description text
+
+**`src/pages/Dashboard/index.tsx`**
+- Added `className="border-border text-foreground bg-card hover:bg-accent"` to Refresh `` for contrast against dark backgrounds
+
+**`src/pages/Settings/Ollama.tsx`**
+- Added same contrast classes to Refresh button
+- Added `installOllamaFromBundleCmd` import
+- Added `isInstallingBundle` state + `handleInstallFromBundle` async handler
+- Added "Install Ollama (Offline)" primary `` alongside the existing "Download Ollama" link button in the "Ollama Not Detected" card
+
+**`src/stores/settingsStore.ts`**
+- Added `pii_enabled_patterns: Record` field to `SettingsState` interface and store initializer (defaults all 8 patterns to `true`)
+- Added `setPiiPattern(id, enabled)` action; both are included in the `persist` serialization so state survives app restarts
+
+**`src/pages/Settings/Security.tsx`**
+- Removed local `enabledPatterns` / `setEnabledPatterns` state and `togglePattern` function
+- Added `useSettingsStore` import; reads `pii_enabled_patterns` / `setPiiPattern` from the persisted store
+- Toggle button uses `setPiiPattern` directly on click
+
+**`src/App.tsx`**
+- Added `Sun`, `Moon` to lucide-react imports
+- Extracted `setTheme` from `useSettingsStore` alongside `theme`
+- Replaced static version `` in sidebar footer with a flex row containing the version string and a Sun/Moon icon button; button is always visible even when sidebar is collapsed
+
+### Phase 2 — Backend (4 files)
+
+**`src-tauri/src/commands/system.rs`**
+- Added `install_ollama_from_bundle(app: AppHandle) → Result` command
+- Resolves bundled binary via `app.path().resource_dir()`, copies to `/usr/local/bin/ollama` (Unix) or `%LOCALAPPDATA%\Programs\Ollama\ollama.exe` (Windows), sets 0o755 permissions on Unix
+- Added `use tauri::Manager` import required by `app.path()`
+
+**`src-tauri/src/lib.rs`**
+- Registered `commands::system::install_ollama_from_bundle` in `tauri::generate_handler![]`
+
+**`src/lib/tauriCommands.ts`**
+- Added `installOllamaFromBundleCmd` typed wrapper: `() => invoke("install_ollama_from_bundle")`
+
+**`src-tauri/tauri.conf.json`**
+- Changed `"resources": []` → `"resources": ["resources/ollama/*"]`
+- Created `src-tauri/resources/ollama/.gitkeep` placeholder so Tauri's glob doesn't fail on builds without a bundled binary
+
+### Phase 3 — CI + Docs (3 files)
+
+**`.gitea/workflows/auto-tag.yml`**
+- Added "Download Ollama" step to `build-linux-amd64`: downloads `ollama-linux-amd64.tgz`, extracts binary to `src-tauri/resources/ollama/ollama`
+- Added "Download Ollama" step to `build-windows-amd64`: downloads `ollama-windows-amd64.zip`, extracts `ollama.exe`; added `unzip` to the Install dependencies step
+- Added "Download Ollama" step to `build-macos-arm64`: downloads `ollama-darwin` universal binary directly
+- Added "Download Ollama" step to `build-linux-arm64`: downloads `ollama-linux-arm64.tgz`, extracts binary
+
+**`docs/wiki/IPC-Commands.md`**
+- Added `install_ollama_from_bundle` entry under System/Ollama Commands section documenting parameters, return value, platform-specific install paths, and privilege requirement note
+
+---
+
+## Testing Needed
+
+### Automated
+```bash
+npx tsc --noEmit # TS: zero errors
+npm run test:run # Vitest: 51/51 pass
+cargo check --manifest-path src-tauri/Cargo.toml # Rust: zero errors
+cargo clippy --manifest-path src-tauri/Cargo.toml -- -D warnings # Clippy: zero warnings
+python3 -c "import yaml; yaml.safe_load(open('.gitea/workflows/auto-tag.yml'))" && echo OK
+```
+
+### Manual
+1. **Custom REST model dropdown**: Settings → AI Providers → Add Provider → Type=Custom → API Format=Custom REST — the top Model row should disappear; the dropdown at the bottom should be visible and populated with all 19 models. Auth Header Name should be empty.
+2. **Label rename**: Confirm "Email Address" label, `user@example.com` placeholder, no Motorola references.
+3. **PII persistence**: Security page → toggle off "Email Addresses" and "IP Addresses" → navigate away → return → both should still be off. Restart the app → toggles should remain in the saved state.
+4. **Refresh button contrast**: Dashboard and Ollama pages → confirm Refresh button border is visible on dark background.
+5. **Theme toggle**: Sidebar footer → click Sun/Moon icon → theme should switch. Collapse sidebar → icon should still be accessible.
+6. **Install Ollama (Offline)**: On a machine without Ollama, go to Settings → Ollama → "Ollama Not Detected" card should show "Install Ollama (Offline)" button. (Full test requires a release build with the bundled binary from CI.)
From fc50fe31021dbdf188981ba8d3346c7e7a96ba4c Mon Sep 17 00:00:00 2001
From: Shaun Arman
Date: Sun, 5 Apr 2026 19:33:23 -0500
Subject: [PATCH 2/4] test(store): add PII pattern persistence tests for
settingsStore
---
tests/unit/settingsStore.test.ts | 54 ++++++++++++++++++++++++++++++++
1 file changed, 54 insertions(+)
diff --git a/tests/unit/settingsStore.test.ts b/tests/unit/settingsStore.test.ts
index a675dc87..4d6f7e7c 100644
--- a/tests/unit/settingsStore.test.ts
+++ b/tests/unit/settingsStore.test.ts
@@ -9,6 +9,8 @@ const mockProvider: ProviderConfig = {
model: "gpt-4o",
};
+const DEFAULT_PII_PATTERNS = ["email", "ip_address", "phone", "ssn", "credit_card", "hostname", "password", "api_key"];
+
describe("Settings Store", () => {
beforeEach(() => {
localStorage.clear();
@@ -19,6 +21,7 @@ describe("Settings Store", () => {
default_provider: "ollama",
default_model: "llama3.2:3b",
ollama_url: "http://localhost:11434",
+ pii_enabled_patterns: Object.fromEntries(DEFAULT_PII_PATTERNS.map((id) => [id, true])),
});
});
@@ -52,3 +55,54 @@ describe("Settings Store", () => {
expect(raw).not.toContain("sk-test-key");
});
});
+
+describe("Settings Store — PII patterns", () => {
+ beforeEach(() => {
+ localStorage.clear();
+ useSettingsStore.setState({
+ theme: "dark",
+ ai_providers: [],
+ active_provider: undefined,
+ default_provider: "ollama",
+ default_model: "llama3.2:3b",
+ ollama_url: "http://localhost:11434",
+ pii_enabled_patterns: Object.fromEntries(DEFAULT_PII_PATTERNS.map((id) => [id, true])),
+ });
+ });
+
+ it("initializes all 8 PII patterns as enabled by default", () => {
+ const patterns = useSettingsStore.getState().pii_enabled_patterns;
+ for (const id of DEFAULT_PII_PATTERNS) {
+ expect(patterns[id]).toBe(true);
+ }
+ });
+
+ it("setPiiPattern disables a single pattern", () => {
+ useSettingsStore.getState().setPiiPattern("email", false);
+ expect(useSettingsStore.getState().pii_enabled_patterns["email"]).toBe(false);
+ });
+
+ it("setPiiPattern does not affect other patterns", () => {
+ useSettingsStore.getState().setPiiPattern("email", false);
+ for (const id of DEFAULT_PII_PATTERNS.filter((id) => id !== "email")) {
+ expect(useSettingsStore.getState().pii_enabled_patterns[id]).toBe(true);
+ }
+ });
+
+ it("setPiiPattern re-enables a disabled pattern", () => {
+ useSettingsStore.getState().setPiiPattern("ssn", false);
+ useSettingsStore.getState().setPiiPattern("ssn", true);
+ expect(useSettingsStore.getState().pii_enabled_patterns["ssn"]).toBe(true);
+ });
+
+ it("pii_enabled_patterns is persisted to localStorage", () => {
+ useSettingsStore.getState().setPiiPattern("api_key", false);
+ const raw = localStorage.getItem("tftsr-settings");
+ expect(raw).toBeTruthy();
+ // Zustand persist wraps state in { state: {...}, version: ... }
+ const parsed = JSON.parse(raw!);
+ const stored = parsed.state ?? parsed;
+ expect(stored.pii_enabled_patterns.api_key).toBe(false);
+ expect(stored.pii_enabled_patterns.email).toBe(true);
+ });
+});
From dffd26a6fd835309e26dc6dde0a715c6ec78e104 Mon Sep 17 00:00:00 2001
From: Shaun Arman
Date: Sun, 5 Apr 2026 19:34:47 -0500
Subject: [PATCH 3/4] fix(security): add path canonicalization and actionable
permission error in install_ollama_from_bundle
---
src-tauri/src/commands/system.rs | 28 +++++++++++++++++++++++++---
1 file changed, 25 insertions(+), 3 deletions(-)
diff --git a/src-tauri/src/commands/system.rs b/src-tauri/src/commands/system.rs
index 4f666040..0a257afa 100644
--- a/src-tauri/src/commands/system.rs
+++ b/src-tauri/src/commands/system.rs
@@ -145,6 +145,8 @@ pub async fn get_audit_log(
// Security note: the bundled binary's integrity is guaranteed by the CI release pipeline
// which verifies SHA256 checksums against Ollama's published sha256sums.txt before bundling.
// Runtime re-verification is not performed here; the app bundle itself is the trust boundary.
+// On Unix, writing to /usr/local/bin requires elevated privileges. If the operation fails with
+// PermissionDenied the caller receives an actionable error message.
#[tauri::command]
pub async fn install_ollama_from_bundle(
app: tauri::AppHandle,
@@ -153,10 +155,12 @@ pub async fn install_ollama_from_bundle(
use std::path::PathBuf;
use tauri::Manager;
- let resource_path = app
+ let resource_dir = app
.path()
.resource_dir()
- .map_err(|e: tauri::Error| e.to_string())?
+ .map_err(|e: tauri::Error| e.to_string())?;
+
+ let resource_path = resource_dir
.join("ollama")
.join(if cfg!(windows) { "ollama.exe" } else { "ollama" });
@@ -164,6 +168,13 @@ pub async fn install_ollama_from_bundle(
return Err("Bundled Ollama not found in resources".to_string());
}
+ // Defense-in-depth: verify resolved path stays within the resource directory.
+ let canonical_resource = resource_path.canonicalize().map_err(|e| e.to_string())?;
+ let canonical_dir = resource_dir.canonicalize().map_err(|e| e.to_string())?;
+ if !canonical_resource.starts_with(&canonical_dir) {
+ return Err("Resource path validation failed".to_string());
+ }
+
#[cfg(unix)]
let install_path = PathBuf::from("/usr/local/bin/ollama");
#[cfg(windows)]
@@ -179,7 +190,18 @@ pub async fn install_ollama_from_bundle(
fs::create_dir_all(parent).map_err(|e| e.to_string())?;
}
- fs::copy(&resource_path, &install_path).map_err(|e| e.to_string())?;
+ fs::copy(&resource_path, &install_path).map_err(|e| {
+ if e.kind() == std::io::ErrorKind::PermissionDenied {
+ format!(
+ "Permission denied writing to {}. On Linux, re-run the app with elevated \
+ privileges or install manually: sudo cp \"{}\" /usr/local/bin/ollama",
+ install_path.display(),
+ resource_path.display()
+ )
+ } else {
+ e.to_string()
+ }
+ })?;
#[cfg(unix)]
{
From 2d02cfa9e88f389dd331c2cfcb69dc15c98d0b97 Mon Sep 17 00:00:00 2001
From: Shaun Arman
Date: Sun, 5 Apr 2026 19:41:59 -0500
Subject: [PATCH 4/4] style: apply cargo fmt to install_ollama_from_bundle
---
src-tauri/src/commands/system.rs | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/src-tauri/src/commands/system.rs b/src-tauri/src/commands/system.rs
index 0a257afa..404955e3 100644
--- a/src-tauri/src/commands/system.rs
+++ b/src-tauri/src/commands/system.rs
@@ -148,9 +148,7 @@ pub async fn get_audit_log(
// On Unix, writing to /usr/local/bin requires elevated privileges. If the operation fails with
// PermissionDenied the caller receives an actionable error message.
#[tauri::command]
-pub async fn install_ollama_from_bundle(
- app: tauri::AppHandle,
-) -> Result {
+pub async fn install_ollama_from_bundle(app: tauri::AppHandle) -> Result {
use std::fs;
use std::path::PathBuf;
use tauri::Manager;
@@ -160,9 +158,11 @@ pub async fn install_ollama_from_bundle(
.resource_dir()
.map_err(|e: tauri::Error| e.to_string())?;
- let resource_path = resource_dir
- .join("ollama")
- .join(if cfg!(windows) { "ollama.exe" } else { "ollama" });
+ let resource_path = resource_dir.join("ollama").join(if cfg!(windows) {
+ "ollama.exe"
+ } else {
+ "ollama"
+ });
if !resource_path.exists() {
return Err("Bundled Ollama not found in resources".to_string());