Merge pull request 'security/audit' (#8) from security/audit into master

Reviewed-on: #8
This commit is contained in:
sarman 2026-04-05 15:56:26 +00:00
commit 1b26bf5214
33 changed files with 562 additions and 359 deletions

View File

@ -59,6 +59,69 @@ jobs:
echo "Tag $NEXT pushed successfully"
wiki-sync:
runs-on: linux-amd64
container:
image: alpine:latest
steps:
- name: Install dependencies
run: apk add --no-cache git
- name: Checkout main repository
run: |
git init
git remote add origin http://172.0.0.29:3000/sarman/tftsr-devops_investigation.git
git fetch --depth=1 origin $GITHUB_SHA
git checkout FETCH_HEAD
- name: Configure git
run: |
git config --global user.email "actions@gitea.local"
git config --global user.name "Gitea Actions"
git config --global credential.helper ''
- name: Clone and sync wiki
env:
WIKI_TOKEN: ${{ secrets.Wiki }}
run: |
cd /tmp
if [ -n "$WIKI_TOKEN" ]; then
WIKI_URL="http://${WIKI_TOKEN}@172.0.0.29:3000/sarman/tftsr-devops_investigation.wiki.git"
else
WIKI_URL="http://172.0.0.29:3000/sarman/tftsr-devops_investigation.wiki.git"
fi
if ! git clone "$WIKI_URL" wiki 2>/dev/null; then
echo "Wiki doesn't exist yet, creating initial structure..."
mkdir -p wiki
cd wiki
git init
git checkout -b master
echo "# Wiki" > Home.md
git add Home.md
git commit -m "Initial wiki commit"
git remote add origin "$WIKI_URL"
fi
cd /tmp/wiki
if [ -d "$GITHUB_WORKSPACE/docs/wiki" ]; then
cp -v "$GITHUB_WORKSPACE"/docs/wiki/*.md . 2>/dev/null || echo "No wiki files to copy"
fi
git add -A
if ! git diff --staged --quiet; then
git commit -m "docs: sync from docs/wiki/ at commit ${GITHUB_SHA:0:8}"
echo "Pushing to wiki..."
if git push origin master; then
echo "✓ Wiki successfully synced"
else
echo "⚠ Wiki push failed - check token permissions"
exit 1
fi
else
echo "No wiki changes to commit"
fi
build-linux-amd64:
needs: autotag
runs-on: linux-amd64
@ -84,8 +147,7 @@ jobs:
run: |
npm ci --legacy-peer-deps
rustup target add x86_64-unknown-linux-gnu
cargo install tauri-cli --version "^2" --locked
CI=true cargo tauri build --target x86_64-unknown-linux-gnu
CI=true npx tauri build --target x86_64-unknown-linux-gnu
- name: Upload artifacts
env:
RELEASE_TOKEN: ${{ secrets.RELEASE_TOKEN }}
@ -175,8 +237,7 @@ jobs:
run: |
npm ci --legacy-peer-deps
rustup target add x86_64-pc-windows-gnu
cargo install tauri-cli --version "^2" --locked
CI=true cargo tauri build --target x86_64-pc-windows-gnu
CI=true npx tauri build --target x86_64-pc-windows-gnu
- name: Upload artifacts
env:
RELEASE_TOKEN: ${{ secrets.RELEASE_TOKEN }}
@ -252,8 +313,7 @@ jobs:
run: |
npm ci --legacy-peer-deps
rustup target add aarch64-apple-darwin
cargo install tauri-cli --version "^2" --locked
CI=true cargo tauri build --target aarch64-apple-darwin --bundles app
CI=true npx tauri build --target aarch64-apple-darwin --bundles app
APP=$(find src-tauri/target/aarch64-apple-darwin/release/bundle/macos -maxdepth 1 -type d -name "*.app" | head -n 1)
if [ -z "$APP" ]; then
echo "ERROR: Could not find macOS app bundle"
@ -324,7 +384,7 @@ jobs:
build-linux-arm64:
needs: autotag
runs-on: linux-arm64
runs-on: linux-amd64
container:
image: rust:1.88-slim
steps:
@ -337,22 +397,30 @@ jobs:
git checkout FETCH_HEAD
- name: Install dependencies
run: |
apt-get update -qq && apt-get install -y -qq \
libwebkit2gtk-4.1-dev libssl-dev libgtk-3-dev \
libayatana-appindicator3-dev librsvg2-dev patchelf \
dpkg --add-architecture arm64
apt-get update -qq
apt-get install -y -qq \
gcc-aarch64-linux-gnu g++-aarch64-linux-gnu \
libwebkit2gtk-4.1-dev:arm64 libssl-dev:arm64 libgtk-3-dev:arm64 \
libayatana-appindicator3-dev:arm64 librsvg2-dev:arm64 patchelf \
pkg-config curl perl jq
curl -fsSL https://deb.nodesource.com/setup_22.x | bash -
apt-get install -y nodejs
- name: Build
env:
CC_aarch64_unknown_linux_gnu: aarch64-linux-gnu-gcc
CXX_aarch64_unknown_linux_gnu: aarch64-linux-gnu-g++
AR_aarch64_unknown_linux_gnu: aarch64-linux-gnu-ar
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
PKG_CONFIG_SYSROOT_DIR: /usr/aarch64-linux-gnu
PKG_CONFIG_PATH: /usr/lib/aarch64-linux-gnu/pkgconfig
PKG_CONFIG_ALLOW_CROSS: "1"
OPENSSL_NO_VENDOR: "0"
OPENSSL_STATIC: "1"
run: |
ARCH=$(uname -m)
if [ "$ARCH" != "aarch64" ] && [ "$ARCH" != "arm64" ]; then
echo "ERROR: linux-arm64 job is not running on an ARM64 host (uname -m=$ARCH)."
exit 1
fi
npm ci --legacy-peer-deps
cargo install tauri-cli --version "^2" --locked
CI=true cargo tauri build
rustup target add aarch64-unknown-linux-gnu
CI=true npx tauri build --target aarch64-unknown-linux-gnu
- name: Upload artifacts
env:
RELEASE_TOKEN: ${{ secrets.RELEASE_TOKEN }}
@ -379,7 +447,7 @@ jobs:
exit 1
fi
echo "Release ID: $RELEASE_ID"
ARTIFACTS=$(find src-tauri/target/release/bundle -type f \
ARTIFACTS=$(find src-tauri/target/aarch64-unknown-linux-gnu/release/bundle -type f \
\( -name "*.deb" -o -name "*.rpm" -o -name "*.AppImage" \))
if [ -z "$ARTIFACTS" ]; then
echo "ERROR: No Linux arm64 artifacts were found to upload."

View File

@ -139,73 +139,3 @@ jobs:
git checkout FETCH_HEAD
- run: npm ci --legacy-peer-deps
- run: npm run test:run
wiki-sync:
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/master'
container:
image: alpine:latest
steps:
- name: Install dependencies
run: apk add --no-cache git
- name: Checkout main repository
run: |
git init
git remote add origin http://172.0.0.29:3000/sarman/tftsr-devops_investigation.git
git fetch --depth=1 origin $GITHUB_SHA
git checkout FETCH_HEAD
- name: Configure git
run: |
git config --global user.email "actions@gitea.local"
git config --global user.name "Gitea Actions"
# Disable credential helper to avoid prompts
git config --global credential.helper ''
- name: Clone and sync wiki
env:
WIKI_TOKEN: ${{ secrets.Wiki }}
run: |
# Clone wiki repository with authentication
cd /tmp
if [ -n "$WIKI_TOKEN" ]; then
WIKI_URL="http://${WIKI_TOKEN}@172.0.0.29:3000/sarman/tftsr-devops_investigation.wiki.git"
else
WIKI_URL="http://172.0.0.29:3000/sarman/tftsr-devops_investigation.wiki.git"
fi
if ! git clone "$WIKI_URL" wiki 2>/dev/null; then
echo "Wiki doesn't exist yet, creating initial structure..."
mkdir -p wiki
cd wiki
git init
git checkout -b master
echo "# Wiki" > Home.md
git add Home.md
git commit -m "Initial wiki commit"
git remote add origin "$WIKI_URL"
fi
# Copy wiki files
cd /tmp/wiki
if [ -d "$GITHUB_WORKSPACE/docs/wiki" ]; then
cp -v "$GITHUB_WORKSPACE"/docs/wiki/*.md . 2>/dev/null || echo "No wiki files to copy"
fi
# Commit if there are changes
git add -A
if ! git diff --staged --quiet; then
git commit -m "docs: sync from docs/wiki/ at commit ${GITHUB_SHA:0:8}"
# Push using token authentication
echo "Pushing to wiki..."
if git push origin master; then
echo "✓ Wiki successfully synced"
else
echo "⚠ Wiki push failed - check token permissions"
exit 1
fi
else
echo "No wiki changes to commit"
fi

View File

@ -46,7 +46,7 @@ Built with **Tauri 2** (Rust + WebView), **React 18**, **TypeScript**, and **SQL
| UI | Tailwind CSS (custom shadcn-style components) |
| Database | rusqlite + `bundled-sqlcipher` (AES-256) |
| Secret storage | `tauri-plugin-stronghold` |
| State management | Zustand (persisted settings store) |
| State management | Zustand (persisted settings store with API key redaction) |
| AI providers | reqwest (async HTTP) |
| PII detection | regex + aho-corasick multi-pattern engine |
@ -270,10 +270,10 @@ The project uses **Gitea Actions** (act_runner v0.3.1) connected to the Gitea in
| Concern | Implementation |
|---|---|
| API keys / tokens | `tauri-plugin-stronghold` encrypted vault |
| API keys / tokens | AES-256-GCM encrypted at rest (backend), not persisted in browser storage |
| Database at rest | SQLCipher AES-256; key derived via PBKDF2 |
| PII before AI send | Rust-side detection + mandatory user approval in UI |
| Audit trail | Every `ai_send` / `publish` event logged with SHA-256 hash |
| Audit trail | Hash-chained audit entries (`prev_hash` + `entry_hash`) for tamper evidence |
| Network | `reqwest` with TLS; HTTP blocked by Tauri capability config |
| Capabilities | Least-privilege: scoped fs access, no arbitrary shell by default |
| CSP | Strict CSP in `tauri.conf.json`; no inline scripts |
@ -300,7 +300,8 @@ Override with the `TFTSR_DATA_DIR` environment variable.
| Variable | Default | Purpose |
|---|---|---|
| `TFTSR_DATA_DIR` | Platform data dir | Override database location |
| `TFTSR_DB_KEY` | `dev-key-change-in-prod` | Database encryption key (release builds) |
| `TFTSR_DB_KEY` | _(none)_ | Database encryption key (required in release builds) |
| `TFTSR_ENCRYPTION_KEY` | _(none)_ | Credential encryption key (required in release builds) |
| `RUST_LOG` | `info` | Tracing log level (`debug`, `info`, `warn`, `error`) |
---

View File

@ -55,13 +55,21 @@ Covers: OpenAI, Azure OpenAI, LM Studio, vLLM, **LiteLLM (AWS Bedrock)**, and an
|-------|-------|
| `config.name` | `"gemini"` |
| URL | `https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent` |
| Auth | API key as `?key=` query parameter |
| Auth | `x-goog-api-key: <api_key>` header |
| Max tokens | 4096 |
**Models:** `gemini-2.0-flash`, `gemini-2.0-pro`, `gemini-1.5-pro`, `gemini-1.5-flash`
---
## Transport Security Notes
- Provider clients use TLS certificate verification via `reqwest`
- Provider calls are configured with explicit request timeouts to avoid indefinite hangs
- Credentials are sent in headers (not URL query strings)
---
### 4. Mistral AI
| Field | Value |

View File

@ -35,7 +35,8 @@ npm install --legacy-peer-deps
| Variable | Default | Purpose |
|----------|---------|---------|
| `TFTSR_DATA_DIR` | Platform data dir | Override DB location |
| `TFTSR_DB_KEY` | `dev-key-change-in-prod` | DB encryption key (required in production) |
| `TFTSR_DB_KEY` | _(none)_ | DB encryption key (required in release builds) |
| `TFTSR_ENCRYPTION_KEY` | _(none)_ | Credential encryption key (required in release builds) |
| `RUST_LOG` | `info` | Tracing verbosity: `debug`, `info`, `warn`, `error` |
Application data is stored at:
@ -120,7 +121,7 @@ cargo tauri build
# Outputs: .deb, .rpm, .AppImage (Linux)
```
Release builds enable **SQLCipher AES-256** encryption. Set `TFTSR_DB_KEY` before building.
Release builds enforce secure key configuration. Set both `TFTSR_DB_KEY` and `TFTSR_ENCRYPTION_KEY` before building.
---

View File

@ -10,7 +10,7 @@ Before any text is sent to an AI provider, TFTSR scans it for personally identif
1. Upload log file
2. detect_pii(log_file_id)
→ Scans content with 13 regex patterns
→ Scans content with PII regex patterns (including hostname + expanded card brands)
→ Resolves overlapping matches (longest wins)
→ Returns Vec<PiiSpan> with byte offsets + replacements
@ -24,7 +24,7 @@ Before any text is sent to an AI provider, TFTSR scans it for personally identif
5. Redacted text safe to send to AI
```
## Detection Patterns (13 Types)
## Detection Patterns
| Type | Replacement | Pattern notes |
|------|-------------|---------------|
@ -33,13 +33,13 @@ Before any text is sent to an AI provider, TFTSR scans it for personally identif
| `ApiKey` | `[ApiKey]` | `api_key=`, `apikey=`, `access_token=` + 16+ char value |
| `Password` | `[Password]` | `password=`, `passwd=`, `pwd=` + non-whitespace value |
| `Ssn` | `[SSN]` | `\b\d{3}-\d{2}-\d{4}\b` |
| `CreditCard` | `[CreditCard]` | Visa/MC/Amex Luhn-format numbers |
| `CreditCard` | `[CreditCard]` | Visa/MC/Amex/Discover/JCB/Diners patterns |
| `Email` | `[Email]` | RFC-compliant email addresses |
| `MacAddress` | `[MAC]` | `XX:XX:XX:XX:XX:XX` and `XX-XX-XX-XX-XX-XX` |
| `Ipv6` | `[IPv6]` | Full and compressed IPv6 addresses |
| `Ipv4` | `[IPv4]` | Standard dotted-quad notation |
| `PhoneNumber` | `[Phone]` | US and international phone formats |
| `Hostname` | _(patterns.rs)_ | Configurable hostname patterns |
| `Hostname` | `[Hostname]` | FQDN/hostname detection for internal names |
| `UrlCredentials` | _(covered by UrlWithCredentials)_ | |
## Overlap Resolution
@ -71,7 +71,7 @@ pub struct PiiSpan {
pub pii_type: PiiType,
pub start: usize, // byte offset in original text
pub end: usize,
pub original_value: String,
pub original: String,
pub replacement: String, // e.g., "[IPv4]"
}
```
@ -111,3 +111,4 @@ write_audit_event(
- Only the redacted text is sent to AI providers
- The SHA-256 hash in the audit log allows integrity verification
- If redaction is skipped (no PII detected), the audit log still records the send
- Stored `pii_spans.original_value` metadata is cleared after redaction is finalized

View File

@ -18,20 +18,25 @@ Production builds use SQLCipher:
- **Cipher:** AES-256-CBC
- **KDF:** PBKDF2-HMAC-SHA512, 256,000 iterations
- **HMAC:** HMAC-SHA512
- **Page size:** 4096 bytes
- **Page size:** 16384 bytes
- **Key source:** `TFTSR_DB_KEY` environment variable
Debug builds use plain SQLite (no encryption) for developer convenience.
> ⚠️ **Never** use the default key (`dev-key-change-in-prod`) in a production environment.
Release builds now fail startup if `TFTSR_DB_KEY` is missing or empty.
---
## API Key Storage (Stronghold)
## Credential Encryption
AI provider API keys are stored in `tauri-plugin-stronghold` — an encrypted vault backed by the [IOTA Stronghold](https://github.com/iotaledger/stronghold.rs) library.
Integration tokens are encrypted with AES-256-GCM before persistence:
- **Key source:** `TFTSR_ENCRYPTION_KEY` (required in release builds)
- **Key derivation:** SHA-256 hash of key material to a fixed 32-byte AES key
- **Nonce:** Cryptographically secure random nonce per encryption
The vault is initialized with a password-derived key using Argon2. API keys are never written to disk in plaintext or to the SQLite database.
Release builds fail secure operations if `TFTSR_ENCRYPTION_KEY` is unset or empty.
The Stronghold plugin remains enabled and now uses a per-installation salt derived from the app data directory path hash instead of a fixed static salt.
---
@ -46,6 +51,7 @@ log file → detect_pii() → user approves spans → apply_redactions() → AI
- Original text **never leaves the machine**
- Only the redacted version is transmitted
- The SHA-256 hash of the redacted text is recorded in the audit log for integrity verification
- `pii_spans.original_value` is cleared after redaction to avoid retaining raw detected secrets in storage
- See [PII Detection](PII-Detection) for the full list of detected patterns
---
@ -66,6 +72,14 @@ write_audit_event(
The audit log is stored in the encrypted SQLite database. It cannot be deleted through the UI.
### Tamper Evidence
`audit_log` entries now include:
- `prev_hash` — hash of the previous audit entry
- `entry_hash` — SHA-256 hash of current entry payload + `prev_hash`
This creates a hash chain and makes post-hoc modification detectable.
**Audit entry fields:**
- `action` — what was done
- `entity_type` — type of record involved
@ -84,7 +98,7 @@ Defined in `src-tauri/capabilities/default.json`:
|--------|-------------------|
| `dialog` | `allow-open`, `allow-save` |
| `fs` | `read-text`, `write-text`, `read`, `write`, `mkdir` — scoped to app dir and temp |
| `shell` | `allow-execute` — for running system commands |
| `shell` | `allow-open` only |
| `http` | default — connect only to approved origins |
---
@ -109,7 +123,9 @@ HTTP is blocked by default. Only whitelisted HTTPS endpoints (and localhost for
## TLS
All outbound HTTP requests use `reqwest` with default TLS settings (TLS 1.2+ required). Certificate verification is enabled. No custom trust anchors are added.
All outbound HTTP requests use `reqwest` with certificate verification enabled and a request timeout configured for provider calls.
CI/CD currently uses internal `http://` endpoints for self-hosted Gitea release automation on a trusted LAN. Recommended hardening: migrate runners and API calls to HTTPS with internal certificates.
---
@ -120,3 +136,4 @@ All outbound HTTP requests use `reqwest` with default TLS settings (TLS 1.2+ req
- [ ] Does it store secrets? → Use Stronghold, not the SQLite DB
- [ ] Does it need filesystem access? → Scope the fs capability
- [ ] Does it need a new HTTP endpoint? → Add to CSP `connect-src`
- [ ] Does it add a new provider endpoint? → Avoid query-param secrets, use auth headers

7
src-tauri/Cargo.lock generated
View File

@ -5706,6 +5706,7 @@ dependencies = [
"tokio-test",
"tracing",
"tracing-subscriber",
"urlencoding",
"uuid",
"warp",
]
@ -6344,6 +6345,12 @@ dependencies = [
"serde_derive",
]
[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
name = "urlpattern"
version = "0.3.0"

View File

@ -42,6 +42,7 @@ aes-gcm = "0.10"
rand = "0.8"
lazy_static = "1.4"
warp = "0.3"
urlencoding = "2"
[dev-dependencies]
tokio-test = "0.4"

View File

@ -24,7 +24,6 @@
"fs:allow-temp-write-recursive",
"fs:scope-app-recursive",
"fs:scope-temp-recursive",
"shell:allow-execute",
"shell:allow-open",
"http:default"
]

View File

@ -1 +1 @@
{"default":{"identifier":"default","description":"Default capabilities for TFTSR — least-privilege","local":true,"windows":["main"],"permissions":["core:path:default","core:event:default","core:window:default","core:app:default","core:resources:default","core:menu:default","core:tray:default","dialog:allow-open","dialog:allow-save","fs:allow-read-text-file","fs:allow-write-text-file","fs:allow-read","fs:allow-write","fs:allow-mkdir","fs:allow-app-read-recursive","fs:allow-app-write-recursive","fs:allow-temp-read-recursive","fs:allow-temp-write-recursive","fs:scope-app-recursive","fs:scope-temp-recursive","shell:allow-execute","shell:allow-open","http:default"]}}
{"default":{"identifier":"default","description":"Default capabilities for TFTSR — least-privilege","local":true,"windows":["main"],"permissions":["core:path:default","core:event:default","core:window:default","core:app:default","core:resources:default","core:menu:default","core:tray:default","dialog:allow-open","dialog:allow-save","fs:allow-read-text-file","fs:allow-write-text-file","fs:allow-read","fs:allow-write","fs:allow-mkdir","fs:allow-app-read-recursive","fs:allow-app-write-recursive","fs:allow-temp-read-recursive","fs:allow-temp-write-recursive","fs:scope-app-recursive","fs:scope-temp-recursive","shell:allow-open","http:default"]}}

View File

@ -2324,24 +2324,6 @@
"Identifier": {
"description": "Permission identifier",
"oneOf": [
{
"description": "Allows reading the CLI matches\n#### This default permission set includes:\n\n- `allow-cli-matches`",
"type": "string",
"const": "cli:default",
"markdownDescription": "Allows reading the CLI matches\n#### This default permission set includes:\n\n- `allow-cli-matches`"
},
{
"description": "Enables the cli_matches command without any pre-configured scope.",
"type": "string",
"const": "cli:allow-cli-matches",
"markdownDescription": "Enables the cli_matches command without any pre-configured scope."
},
{
"description": "Denies the cli_matches command without any pre-configured scope.",
"type": "string",
"const": "cli:deny-cli-matches",
"markdownDescription": "Denies the cli_matches command without any pre-configured scope."
},
{
"description": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`",
"type": "string",
@ -6373,60 +6355,6 @@
"type": "string",
"const": "stronghold:deny-save-store-record",
"markdownDescription": "Denies the save_store_record command without any pre-configured scope."
},
{
"description": "This permission set configures which kind of\nupdater functions are exposed to the frontend.\n\n#### Granted Permissions\n\nThe full workflow from checking for updates to installing them\nis enabled.\n\n\n#### This default permission set includes:\n\n- `allow-check`\n- `allow-download`\n- `allow-install`\n- `allow-download-and-install`",
"type": "string",
"const": "updater:default",
"markdownDescription": "This permission set configures which kind of\nupdater functions are exposed to the frontend.\n\n#### Granted Permissions\n\nThe full workflow from checking for updates to installing them\nis enabled.\n\n\n#### This default permission set includes:\n\n- `allow-check`\n- `allow-download`\n- `allow-install`\n- `allow-download-and-install`"
},
{
"description": "Enables the check command without any pre-configured scope.",
"type": "string",
"const": "updater:allow-check",
"markdownDescription": "Enables the check command without any pre-configured scope."
},
{
"description": "Enables the download command without any pre-configured scope.",
"type": "string",
"const": "updater:allow-download",
"markdownDescription": "Enables the download command without any pre-configured scope."
},
{
"description": "Enables the download_and_install command without any pre-configured scope.",
"type": "string",
"const": "updater:allow-download-and-install",
"markdownDescription": "Enables the download_and_install command without any pre-configured scope."
},
{
"description": "Enables the install command without any pre-configured scope.",
"type": "string",
"const": "updater:allow-install",
"markdownDescription": "Enables the install command without any pre-configured scope."
},
{
"description": "Denies the check command without any pre-configured scope.",
"type": "string",
"const": "updater:deny-check",
"markdownDescription": "Denies the check command without any pre-configured scope."
},
{
"description": "Denies the download command without any pre-configured scope.",
"type": "string",
"const": "updater:deny-download",
"markdownDescription": "Denies the download command without any pre-configured scope."
},
{
"description": "Denies the download_and_install command without any pre-configured scope.",
"type": "string",
"const": "updater:deny-download-and-install",
"markdownDescription": "Denies the download_and_install command without any pre-configured scope."
},
{
"description": "Denies the install command without any pre-configured scope.",
"type": "string",
"const": "updater:deny-install",
"markdownDescription": "Denies the install command without any pre-configured scope."
}
]
},

View File

@ -1,4 +1,5 @@
use async_trait::async_trait;
use std::time::Duration;
use crate::ai::provider::Provider;
use crate::ai::{ChatResponse, Message, ProviderInfo, TokenUsage};
@ -29,7 +30,9 @@ impl Provider for AnthropicProvider {
messages: Vec<Message>,
config: &ProviderConfig,
) -> anyhow::Result<ChatResponse> {
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(60))
.build()?;
let url = format!(
"{}/v1/messages",
config

View File

@ -1,4 +1,5 @@
use async_trait::async_trait;
use std::time::Duration;
use crate::ai::provider::Provider;
use crate::ai::{ChatResponse, Message, ProviderInfo, TokenUsage};
@ -30,10 +31,12 @@ impl Provider for GeminiProvider {
messages: Vec<Message>,
config: &ProviderConfig,
) -> anyhow::Result<ChatResponse> {
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(60))
.build()?;
let url = format!(
"https://generativelanguage.googleapis.com/v1beta/models/{}:generateContent?key={}",
config.model, config.api_key
"https://generativelanguage.googleapis.com/v1beta/models/{}:generateContent",
config.model
);
// Map OpenAI-style messages to Gemini format
@ -79,6 +82,7 @@ impl Provider for GeminiProvider {
let resp = client
.post(&url)
.header("Content-Type", "application/json")
.header("x-goog-api-key", &config.api_key)
.json(&body)
.send()
.await?;

View File

@ -1,4 +1,5 @@
use async_trait::async_trait;
use std::time::Duration;
use crate::ai::provider::Provider;
use crate::ai::{ChatResponse, Message, ProviderInfo, TokenUsage};
@ -31,7 +32,9 @@ impl Provider for MistralProvider {
config: &ProviderConfig,
) -> anyhow::Result<ChatResponse> {
// Mistral uses OpenAI-compatible format
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(60))
.build()?;
let base_url = if config.api_url.is_empty() {
"https://api.mistral.ai/v1".to_string()
} else {

View File

@ -1,4 +1,5 @@
use async_trait::async_trait;
use std::time::Duration;
use crate::ai::provider::Provider;
use crate::ai::{ChatResponse, Message, ProviderInfo, TokenUsage};
@ -31,7 +32,9 @@ impl Provider for OllamaProvider {
messages: Vec<Message>,
config: &ProviderConfig,
) -> anyhow::Result<ChatResponse> {
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(60))
.build()?;
let base_url = if config.api_url.is_empty() {
"http://localhost:11434".to_string()
} else {

View File

@ -1,4 +1,5 @@
use async_trait::async_trait;
use std::time::Duration;
use crate::ai::provider::Provider;
use crate::ai::{ChatResponse, Message, ProviderInfo, TokenUsage};
@ -73,7 +74,9 @@ impl OpenAiProvider {
messages: Vec<Message>,
config: &ProviderConfig,
) -> anyhow::Result<ChatResponse> {
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(60))
.build()?;
// Use custom endpoint path if provided, otherwise default to /chat/completions
let endpoint_path = config
@ -145,7 +148,9 @@ impl OpenAiProvider {
messages: Vec<Message>,
config: &ProviderConfig,
) -> anyhow::Result<ChatResponse> {
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(60))
.build()?;
// Use custom endpoint path, default to empty (API URL already includes /api/v2/chat)
let endpoint_path = config.custom_endpoint_path.as_deref().unwrap_or("");

View File

@ -1,4 +1,20 @@
use crate::db::models::AuditEntry;
use sha2::{Digest, Sha256};
fn compute_entry_hash(entry: &AuditEntry, prev_hash: &str) -> String {
let payload = format!(
"{}|{}|{}|{}|{}|{}|{}|{}",
prev_hash,
entry.id,
entry.timestamp,
entry.action,
entry.entity_type,
entry.entity_id,
entry.user_id,
entry.details
);
format!("{:x}", Sha256::digest(payload.as_bytes()))
}
/// Write an audit event to the audit_log table.
pub fn write_audit_event(
@ -14,9 +30,16 @@ pub fn write_audit_event(
entity_id.to_string(),
details.to_string(),
);
let prev_hash: String = conn
.prepare(
"SELECT entry_hash FROM audit_log WHERE entry_hash <> '' ORDER BY timestamp DESC, id DESC LIMIT 1",
)?
.query_row([], |row| row.get(0))
.unwrap_or_default();
let entry_hash = compute_entry_hash(&entry, &prev_hash);
conn.execute(
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details, prev_hash, entry_hash) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)",
rusqlite::params![
entry.id,
entry.timestamp,
@ -25,6 +48,8 @@ pub fn write_audit_event(
entry.entity_id,
entry.user_id,
entry.details,
prev_hash,
entry_hash,
],
)?;
Ok(())
@ -44,7 +69,9 @@ mod tests {
entity_type TEXT NOT NULL DEFAULT '',
entity_id TEXT NOT NULL DEFAULT '',
user_id TEXT NOT NULL DEFAULT 'local',
details TEXT NOT NULL DEFAULT '{}'
details TEXT NOT NULL DEFAULT '{}',
prev_hash TEXT NOT NULL DEFAULT '',
entry_hash TEXT NOT NULL DEFAULT ''
);",
)
.unwrap();
@ -128,4 +155,26 @@ mod tests {
assert_eq!(ids.len(), 2);
assert_ne!(ids[0], ids[1]);
}
#[test]
fn test_write_audit_event_hash_chain_links_entries() {
let conn = setup_test_db();
write_audit_event(&conn, "first", "issue", "1", "{}").unwrap();
write_audit_event(&conn, "second", "issue", "2", "{}").unwrap();
let mut stmt = conn
.prepare("SELECT prev_hash, entry_hash FROM audit_log ORDER BY timestamp ASC, id ASC")
.unwrap();
let rows: Vec<(String, String)> = stmt
.query_map([], |row| Ok((row.get(0)?, row.get(1)?)))
.unwrap()
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert_eq!(rows.len(), 2);
assert_eq!(rows[0].0, "");
assert!(!rows[0].1.is_empty());
assert_eq!(rows[1].0, rows[0].1);
assert!(!rows[1].1.is_empty());
}
}

View File

@ -1,4 +1,5 @@
use tauri::State;
use tracing::warn;
use crate::ai::provider::create_provider;
use crate::ai::{AnalysisResult, ChatResponse, Message, ProviderInfo};
@ -12,22 +13,27 @@ pub async fn analyze_logs(
provider_config: ProviderConfig,
state: State<'_, AppState>,
) -> Result<AnalysisResult, String> {
// Load log file contents
// Load log file contents — only redacted files may be sent to an AI provider
let mut log_contents = String::new();
{
let db = state.db.lock().map_err(|e| e.to_string())?;
for file_id in &log_file_ids {
let mut stmt = db
.prepare("SELECT file_name, file_path FROM log_files WHERE id = ?1")
.prepare("SELECT file_name, file_path, redacted FROM log_files WHERE id = ?1")
.map_err(|e| e.to_string())?;
if let Ok((name, path)) = stmt.query_row([file_id], |row| {
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
if let Ok((name, path, redacted)) = stmt.query_row([file_id], |row| {
Ok((
row.get::<_, String>(0)?,
row.get::<_, String>(1)?,
row.get::<_, i32>(2)? != 0,
))
}) {
let redacted_path = redacted_path_for(&name, &path, redacted)?;
log_contents.push_str(&format!("--- {name} ---\n"));
if let Ok(content) = std::fs::read_to_string(&path) {
if let Ok(content) = std::fs::read_to_string(&redacted_path) {
log_contents.push_str(&content);
} else {
log_contents.push_str("[Could not read file]\n");
log_contents.push_str("[Could not read redacted file]\n");
}
log_contents.push('\n');
}
@ -55,7 +61,10 @@ pub async fn analyze_logs(
let response = provider
.chat(messages, &provider_config)
.await
.map_err(|e| e.to_string())?;
.map_err(|e| {
warn!(error = %e, "ai analyze_logs provider request failed");
"AI analysis request failed".to_string()
})?;
let content = &response.content;
let summary = extract_section(content, "SUMMARY:").unwrap_or_else(|| {
@ -81,14 +90,14 @@ pub async fn analyze_logs(
serde_json::json!({ "log_file_ids": log_file_ids, "provider": provider_config.name })
.to_string(),
);
db.execute(
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
entry.id, entry.timestamp, entry.action,
entry.entity_type, entry.entity_id, entry.user_id, entry.details
],
).map_err(|e| e.to_string())?;
crate::audit::log::write_audit_event(
&db,
&entry.action,
&entry.entity_type,
&entry.entity_id,
&entry.details,
)
.map_err(|_| "Failed to write security audit entry".to_string())?;
}
Ok(AnalysisResult {
@ -99,6 +108,17 @@ pub async fn analyze_logs(
})
}
/// Returns the path to the `.redacted` file, or an error if the file has not been redacted.
fn redacted_path_for(name: &str, path: &str, redacted: bool) -> Result<String, String> {
if !redacted {
return Err(format!(
"Log file '{name}' has not been scanned and redacted. \
Run PII detection and apply redactions before sending to AI."
));
}
Ok(format!("{path}.redacted"))
}
fn extract_section(text: &str, header: &str) -> Option<String> {
let start = text.find(header)?;
let after = &text[start + header.len()..];
@ -207,7 +227,10 @@ pub async fn chat_message(
let response = provider
.chat(messages, &provider_config)
.await
.map_err(|e| e.to_string())?;
.map_err(|e| {
warn!(error = %e, "ai chat provider request failed");
"AI provider request failed".to_string()
})?;
// Save both user message and response to DB
{
@ -258,14 +281,15 @@ pub async fn chat_message(
issue_id,
audit_details.to_string(),
);
let _ = db.execute(
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
entry.id, entry.timestamp, entry.action,
entry.entity_type, entry.entity_id, entry.user_id, entry.details
],
);
if let Err(err) = crate::audit::log::write_audit_event(
&db,
&entry.action,
&entry.entity_type,
&entry.entity_id,
&entry.details,
) {
warn!(error = %err, "failed to write ai_chat audit entry");
}
}
Ok(response)
@ -285,7 +309,10 @@ pub async fn test_provider_connection(
provider
.chat(messages, &provider_config)
.await
.map_err(|e| e.to_string())
.map_err(|e| {
warn!(error = %e, "ai test_provider_connection failed");
"Provider connection test failed".to_string()
})
}
#[tauri::command]
@ -373,6 +400,19 @@ mod tests {
assert_eq!(list, vec!["Item one", "Item two"]);
}
#[test]
fn test_redacted_path_rejects_unredacted_file() {
let err = redacted_path_for("app.log", "/data/app.log", false).unwrap_err();
assert!(err.contains("app.log"));
assert!(err.contains("redacted"));
}
#[test]
fn test_redacted_path_returns_dotredacted_suffix() {
let path = redacted_path_for("app.log", "/data/app.log", true).unwrap();
assert_eq!(path, "/data/app.log.redacted");
}
#[test]
fn test_extract_list_missing_header() {
let text = "No findings here";

View File

@ -1,20 +1,43 @@
use sha2::{Digest, Sha256};
use std::path::{Path, PathBuf};
use tauri::State;
use tracing::warn;
use crate::db::models::{AuditEntry, LogFile, PiiSpanRecord};
use crate::pii::{self, PiiDetectionResult, PiiDetector, RedactedLogFile};
use crate::state::AppState;
const MAX_LOG_FILE_BYTES: u64 = 50 * 1024 * 1024;
fn validate_log_file_path(file_path: &str) -> Result<PathBuf, String> {
let path = Path::new(file_path);
let canonical = std::fs::canonicalize(path).map_err(|_| "Unable to access selected file")?;
let metadata = std::fs::metadata(&canonical).map_err(|_| "Unable to read file metadata")?;
if !metadata.is_file() {
return Err("Selected path is not a file".to_string());
}
if metadata.len() > MAX_LOG_FILE_BYTES {
return Err(format!(
"File exceeds maximum supported size ({} MB)",
MAX_LOG_FILE_BYTES / 1024 / 1024
));
}
Ok(canonical)
}
#[tauri::command]
pub async fn upload_log_file(
issue_id: String,
file_path: String,
state: State<'_, AppState>,
) -> Result<LogFile, String> {
let path = std::path::Path::new(&file_path);
let content = std::fs::read(path).map_err(|e| e.to_string())?;
let canonical_path = validate_log_file_path(&file_path)?;
let content = std::fs::read(&canonical_path).map_err(|_| "Failed to read selected log file")?;
let content_hash = format!("{:x}", Sha256::digest(&content));
let file_name = path
let file_name = canonical_path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("unknown")
@ -28,7 +51,8 @@ pub async fn upload_log_file(
"text/plain"
};
let log_file = LogFile::new(issue_id.clone(), file_name, file_path.clone(), file_size);
let canonical_file_path = canonical_path.to_string_lossy().to_string();
let log_file = LogFile::new(issue_id.clone(), file_name, canonical_file_path, file_size);
let log_file = LogFile {
content_hash: content_hash.clone(),
mime_type: mime_type.to_string(),
@ -51,7 +75,7 @@ pub async fn upload_log_file(
log_file.redacted as i32,
],
)
.map_err(|e| e.to_string())?;
.map_err(|_| "Failed to store uploaded log metadata".to_string())?;
// Audit
let entry = AuditEntry::new(
@ -60,19 +84,15 @@ pub async fn upload_log_file(
log_file.id.clone(),
serde_json::json!({ "issue_id": issue_id, "file_name": log_file.file_name }).to_string(),
);
let _ = db.execute(
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
entry.id,
entry.timestamp,
entry.action,
entry.entity_type,
entry.entity_id,
entry.user_id,
entry.details
],
);
if let Err(err) = crate::audit::log::write_audit_event(
&db,
&entry.action,
&entry.entity_type,
&entry.entity_id,
&entry.details,
) {
warn!(error = %err, "failed to write upload_log_file audit entry");
}
Ok(log_file)
}
@ -87,10 +107,11 @@ pub async fn detect_pii(
let db = state.db.lock().map_err(|e| e.to_string())?;
db.prepare("SELECT file_path FROM log_files WHERE id = ?1")
.and_then(|mut stmt| stmt.query_row([&log_file_id], |row| row.get(0)))
.map_err(|e| e.to_string())?
.map_err(|_| "Failed to load log file metadata".to_string())?
};
let content = std::fs::read_to_string(&file_path).map_err(|e| e.to_string())?;
let content =
std::fs::read_to_string(&file_path).map_err(|_| "Failed to read log file content")?;
let detector = PiiDetector::new();
let spans = detector.detect(&content);
@ -105,10 +126,10 @@ pub async fn detect_pii(
pii_type: span.pii_type.clone(),
start_offset: span.start as i64,
end_offset: span.end as i64,
original_value: span.original.clone(),
original_value: String::new(),
replacement: span.replacement.clone(),
};
let _ = db.execute(
if let Err(err) = db.execute(
"INSERT OR REPLACE INTO pii_spans (id, log_file_id, pii_type, start_offset, end_offset, original_value, replacement) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
@ -116,7 +137,9 @@ pub async fn detect_pii(
record.start_offset, record.end_offset,
record.original_value, record.replacement
],
);
) {
warn!(error = %err, span_id = %span.id, "failed to persist pii span");
}
}
}
@ -138,10 +161,11 @@ pub async fn apply_redactions(
let db = state.db.lock().map_err(|e| e.to_string())?;
db.prepare("SELECT file_path FROM log_files WHERE id = ?1")
.and_then(|mut stmt| stmt.query_row([&log_file_id], |row| row.get(0)))
.map_err(|e| e.to_string())?
.map_err(|_| "Failed to load log file metadata".to_string())?
};
let content = std::fs::read_to_string(&file_path).map_err(|e| e.to_string())?;
let content =
std::fs::read_to_string(&file_path).map_err(|_| "Failed to read log file content")?;
// Load PII spans from DB, filtering to only approved ones
let spans: Vec<pii::PiiSpan> = {
@ -188,7 +212,8 @@ pub async fn apply_redactions(
// Save redacted file alongside original
let redacted_path = format!("{file_path}.redacted");
std::fs::write(&redacted_path, &redacted_text).map_err(|e| e.to_string())?;
std::fs::write(&redacted_path, &redacted_text)
.map_err(|_| "Failed to write redacted output file".to_string())?;
// Mark the log file as redacted in DB
{
@ -197,7 +222,12 @@ pub async fn apply_redactions(
"UPDATE log_files SET redacted = 1 WHERE id = ?1",
[&log_file_id],
)
.map_err(|e| e.to_string())?;
.map_err(|_| "Failed to mark file as redacted".to_string())?;
db.execute(
"UPDATE pii_spans SET original_value = '' WHERE log_file_id = ?1",
[&log_file_id],
)
.map_err(|_| "Failed to finalize redaction metadata".to_string())?;
}
Ok(RedactedLogFile {
@ -206,3 +236,25 @@ pub async fn apply_redactions(
data_hash,
})
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_validate_log_file_path_rejects_non_file() {
let dir = std::env::temp_dir();
let result = validate_log_file_path(dir.to_string_lossy().as_ref());
assert!(result.is_err());
}
#[test]
fn test_validate_log_file_path_accepts_small_file() {
let file_path =
std::env::temp_dir().join(format!("tftsr-analysis-test-{}.log", uuid::Uuid::now_v7()));
std::fs::write(&file_path, "hello").unwrap();
let result = validate_log_file_path(file_path.to_string_lossy().as_ref());
assert!(result.is_ok());
let _ = std::fs::remove_file(file_path);
}
}

View File

@ -488,20 +488,14 @@ pub async fn add_timeline_event(
issue_id.clone(),
serde_json::json!({ "description": description }).to_string(),
);
db.execute(
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
entry.id,
entry.timestamp,
entry.action,
entry.entity_type,
entry.entity_id,
entry.user_id,
entry.details
],
crate::audit::log::write_audit_event(
&db,
&entry.action,
&entry.entity_type,
&entry.entity_id,
&entry.details,
)
.map_err(|e| e.to_string())?;
.map_err(|_| "Failed to write security audit entry".to_string())?;
// Update issue timestamp
let now = chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string();

View File

@ -1,4 +1,5 @@
use tauri::State;
use tracing::warn;
use crate::db::models::AuditEntry;
use crate::docs::{exporter, generate_postmortem_markdown, generate_rca_markdown};
@ -60,19 +61,15 @@ pub async fn generate_rca(
doc_id,
audit_details.to_string(),
);
let _ = db.execute(
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
entry.id,
entry.timestamp,
entry.action,
entry.entity_type,
entry.entity_id,
entry.user_id,
entry.details
],
);
if let Err(err) = crate::audit::log::write_audit_event(
&db,
&entry.action,
&entry.entity_type,
&entry.entity_id,
&entry.details,
) {
warn!(error = %err, "failed to write generate_rca audit entry");
}
Ok(document)
}
@ -119,19 +116,15 @@ pub async fn generate_postmortem(
doc_id,
audit_details.to_string(),
);
let _ = db.execute(
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
entry.id,
entry.timestamp,
entry.action,
entry.entity_type,
entry.entity_id,
entry.user_id,
entry.details
],
);
if let Err(err) = crate::audit::log::write_audit_event(
&db,
&entry.action,
&entry.entity_type,
&entry.entity_id,
&entry.details,
) {
warn!(error = %err, "failed to write generate_postmortem audit entry");
}
Ok(document)
}

View File

@ -288,18 +288,12 @@ async fn handle_oauth_callback_internal(
"expires_at": expires_at,
});
db.execute(
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
uuid::Uuid::now_v7().to_string(),
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string(),
crate::audit::log::write_audit_event(
&db,
"oauth_callback_success",
"credential",
service,
"local",
audit_details.to_string(),
],
&service,
&audit_details.to_string(),
)
.map_err(|e| format!("Failed to log audit event: {e}"))?;
@ -718,22 +712,16 @@ pub async fn save_manual_token(
.map_err(|e| format!("Failed to store token: {e}"))?;
// Log audit event
db.execute(
"INSERT INTO audit_log (id, timestamp, action, entity_type, entity_id, user_id, details)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
uuid::Uuid::now_v7().to_string(),
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string(),
crate::audit::log::write_audit_event(
&db,
"manual_token_saved",
"credential",
request.service,
"local",
serde_json::json!({
&request.service,
&serde_json::json!({
"token_type": request.token_type,
"token_hash": token_hash,
})
.to_string(),
],
)
.map_err(|e| format!("Failed to log audit event: {e}"))?;

View File

@ -1,6 +1,22 @@
use rusqlite::Connection;
use std::path::Path;
fn get_db_key() -> anyhow::Result<String> {
if let Ok(key) = std::env::var("TFTSR_DB_KEY") {
if !key.trim().is_empty() {
return Ok(key);
}
}
if cfg!(debug_assertions) {
return Ok("dev-key-change-in-prod".to_string());
}
Err(anyhow::anyhow!(
"TFTSR_DB_KEY must be set in release builds"
))
}
pub fn open_encrypted_db(path: &Path, key: &str) -> anyhow::Result<Connection> {
let conn = Connection::open(path)?;
// ALL cipher settings MUST be set before the first database access.
@ -30,8 +46,7 @@ pub fn init_db(data_dir: &Path) -> anyhow::Result<Connection> {
let db_path = data_dir.join("tftsr.db");
// In dev/test mode use unencrypted DB; in production use encryption
let key =
std::env::var("TFTSR_DB_KEY").unwrap_or_else(|_| "dev-key-change-in-prod".to_string());
let key = get_db_key()?;
let conn = if cfg!(debug_assertions) {
open_dev_db(&db_path)?
@ -42,3 +57,24 @@ pub fn init_db(data_dir: &Path) -> anyhow::Result<Connection> {
crate::db::migrations::run_migrations(&conn)?;
Ok(conn)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_db_key_uses_env_var_when_present() {
std::env::set_var("TFTSR_DB_KEY", "test-db-key");
let key = get_db_key().unwrap();
assert_eq!(key, "test-db-key");
std::env::remove_var("TFTSR_DB_KEY");
}
#[test]
fn test_get_db_key_debug_fallback_for_empty_env() {
std::env::set_var("TFTSR_DB_KEY", " ");
let key = get_db_key().unwrap();
assert_eq!(key, "dev-key-change-in-prod");
std::env::remove_var("TFTSR_DB_KEY");
}
}

View File

@ -150,6 +150,11 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> {
UNIQUE(service)
);",
),
(
"012_audit_hash_chain",
"ALTER TABLE audit_log ADD COLUMN prev_hash TEXT NOT NULL DEFAULT '';
ALTER TABLE audit_log ADD COLUMN entry_hash TEXT NOT NULL DEFAULT '';",
),
];
for (name, sql) in migrations {

View File

@ -1,5 +1,6 @@
use rusqlite::OptionalExtension;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PkceChallenge {
@ -23,19 +24,11 @@ pub struct PatCredential {
/// Generate a PKCE code verifier and challenge for OAuth flows.
pub fn generate_pkce() -> PkceChallenge {
use sha2::{Digest, Sha256};
use rand::{thread_rng, RngCore};
// Generate a random 32-byte verifier
let verifier_bytes: Vec<u8> = (0..32)
.map(|_| {
let r: u8 = (std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.subsec_nanos()
% 256) as u8;
r
})
.collect();
let mut verifier_bytes = [0u8; 32];
thread_rng().fill_bytes(&mut verifier_bytes);
let code_verifier = base64_url_encode(&verifier_bytes);
let challenge_hash = Sha256::digest(code_verifier.as_bytes());
@ -162,7 +155,6 @@ pub fn get_pat(conn: &rusqlite::Connection, service: &str) -> Result<Option<Stri
}
fn hash_token(token: &str) -> String {
use sha2::{Digest, Sha256};
format!("{:x}", Sha256::digest(token.as_bytes()))
}
@ -173,10 +165,29 @@ fn base64_url_encode(data: &[u8]) -> String {
}
fn urlencoding_encode(s: &str) -> String {
s.replace(' ', "%20")
.replace('&', "%26")
.replace('=', "%3D")
.replace('+', "%2B")
urlencoding::encode(s).into_owned()
}
fn get_encryption_key_material() -> Result<String, String> {
if let Ok(key) = std::env::var("TFTSR_ENCRYPTION_KEY") {
if !key.trim().is_empty() {
return Ok(key);
}
}
if cfg!(debug_assertions) {
return Ok("dev-key-change-me-in-production-32b".to_string());
}
Err("TFTSR_ENCRYPTION_KEY must be set in release builds".to_string())
}
fn derive_aes_key() -> Result<[u8; 32], String> {
let key_material = get_encryption_key_material()?;
let digest = Sha256::digest(key_material.as_bytes());
let mut key_bytes = [0u8; 32];
key_bytes.copy_from_slice(&digest);
Ok(key_bytes)
}
/// Encrypt a token using AES-256-GCM.
@ -189,14 +200,7 @@ pub fn encrypt_token(token: &str) -> Result<String, String> {
};
use rand::{thread_rng, RngCore};
// Get encryption key from env or use default (WARNING: insecure for production)
let key_material = std::env::var("TFTSR_ENCRYPTION_KEY")
.unwrap_or_else(|_| "dev-key-change-me-in-production-32b".to_string());
let mut key_bytes = [0u8; 32];
let src = key_material.as_bytes();
let len = std::cmp::min(src.len(), 32);
key_bytes[..len].copy_from_slice(&src[..len]);
let key_bytes = derive_aes_key()?;
let cipher = Aes256Gcm::new(&key_bytes.into());
@ -242,14 +246,7 @@ pub fn decrypt_token(encrypted: &str) -> Result<String, String> {
let nonce = Nonce::from_slice(&data[..12]);
let ciphertext = &data[12..];
// Get encryption key
let key_material = std::env::var("TFTSR_ENCRYPTION_KEY")
.unwrap_or_else(|_| "dev-key-change-me-in-production-32b".to_string());
let mut key_bytes = [0u8; 32];
let src = key_material.as_bytes();
let len = std::cmp::min(src.len(), 32);
key_bytes[..len].copy_from_slice(&src[..len]);
let key_bytes = derive_aes_key()?;
let cipher = Aes256Gcm::new(&key_bytes.into());
@ -563,4 +560,20 @@ mod tests {
let retrieved = get_pat(&conn, "servicenow").unwrap();
assert_eq!(retrieved, Some("token-v2".to_string()));
}
#[test]
fn test_generate_pkce_is_not_deterministic() {
let a = generate_pkce();
let b = generate_pkce();
assert_ne!(a.code_verifier, b.code_verifier);
}
#[test]
fn test_derive_aes_key_is_stable_for_same_input() {
std::env::set_var("TFTSR_ENCRYPTION_KEY", "stable-test-key");
let k1 = derive_aes_key().unwrap();
let k2 = derive_aes_key().unwrap();
assert_eq!(k1, k2);
std::env::remove_var("TFTSR_ENCRYPTION_KEY");
}
}

View File

@ -18,6 +18,10 @@ pub struct WorkItem {
pub description: String,
}
fn escape_wiql_literal(value: &str) -> String {
value.replace('\'', "''")
}
/// Test connection to Azure DevOps by querying project info
pub async fn test_connection(config: &AzureDevOpsConfig) -> Result<ConnectionResult, String> {
let client = reqwest::Client::new();
@ -61,8 +65,9 @@ pub async fn search_work_items(
);
// Build WIQL query
let escaped_query = escape_wiql_literal(query);
let wiql = format!(
"SELECT [System.Id], [System.Title], [System.WorkItemType], [System.State] FROM WorkItems WHERE [System.Title] CONTAINS '{query}' ORDER BY [System.CreatedDate] DESC"
"SELECT [System.Id], [System.Title], [System.WorkItemType], [System.State] FROM WorkItems WHERE [System.Title] CONTAINS '{escaped_query}' ORDER BY [System.CreatedDate] DESC"
);
let body = serde_json::json!({ "query": wiql });
@ -338,6 +343,12 @@ pub async fn update_work_item(
mod tests {
use super::*;
#[test]
fn test_escape_wiql_literal_escapes_single_quotes() {
let escaped = escape_wiql_literal("can't deploy");
assert_eq!(escaped, "can''t deploy");
}
#[tokio::test]
async fn test_connection_success() {
let mut server = mockito::Server::new_async().await;

View File

@ -22,6 +22,10 @@ pub struct Page {
pub url: String,
}
fn escape_cql_literal(value: &str) -> String {
value.replace('\\', "\\\\").replace('"', "\\\"")
}
/// Test connection to Confluence by fetching current user info
pub async fn test_connection(config: &ConfluenceConfig) -> Result<ConnectionResult, String> {
let client = reqwest::Client::new();
@ -105,9 +109,11 @@ pub async fn search_pages(
config.base_url.trim_end_matches('/')
);
let mut cql = format!("text ~ \"{query}\"");
let escaped_query = escape_cql_literal(query);
let mut cql = format!("text ~ \"{escaped_query}\"");
if let Some(space) = space_key {
cql = format!("{cql} AND space = {space}");
let escaped_space = escape_cql_literal(space);
cql = format!("{cql} AND space = \"{escaped_space}\"");
}
let resp = client
@ -280,6 +286,12 @@ pub async fn update_page(
mod tests {
use super::*;
#[test]
fn test_escape_cql_literal_escapes_quotes_and_backslashes() {
let escaped = escape_cql_literal(r#"C:\logs\"prod""#);
assert_eq!(escaped, r#"C:\\logs\\\"prod\""#);
}
#[tokio::test]
async fn test_connection_success() {
let mut server = mockito::Server::new_async().await;

View File

@ -8,6 +8,7 @@ pub mod ollama;
pub mod pii;
pub mod state;
use sha2::{Digest, Sha256};
use state::AppState;
use std::sync::{Arc, Mutex};
@ -36,14 +37,17 @@ pub fn run() {
app_data_dir: data_dir.clone(),
integration_webviews: Arc::new(Mutex::new(std::collections::HashMap::new())),
};
let stronghold_salt = format!(
"tftsr-stronghold-salt-v1-{:x}",
Sha256::digest(data_dir.to_string_lossy().as_bytes())
);
tauri::Builder::default()
.plugin(
tauri_plugin_stronghold::Builder::new(|password| {
use sha2::{Digest, Sha256};
tauri_plugin_stronghold::Builder::new(move |password| {
let mut hasher = Sha256::new();
hasher.update(password);
hasher.update(b"tftsr-stronghold-salt-v1");
hasher.update(stronghold_salt.as_bytes());
hasher.finalize().to_vec()
})
.build(),

View File

@ -35,7 +35,9 @@ pub fn get_patterns() -> Vec<(PiiType, Regex)> {
// Credit card
(
PiiType::CreditCard,
Regex::new(r"\b(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|3[47][0-9]{13})\b")
Regex::new(
r"\b(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|3[47][0-9]{13}|6(?:011|5[0-9]{2})[0-9]{12}|3(?:0[0-5]|[68][0-9])[0-9]{11}|35(?:2[89]|[3-8][0-9])[0-9]{12})\b",
)
.unwrap(),
),
// Email
@ -70,5 +72,13 @@ pub fn get_patterns() -> Vec<(PiiType, Regex)> {
Regex::new(r"\b(?:\+?1[-.\s]?)?\(?[0-9]{3}\)?[-.\s]?[0-9]{3}[-.\s]?[0-9]{4}\b")
.unwrap(),
),
// Hostname / FQDN
(
PiiType::Hostname,
Regex::new(
r"\b(?:[A-Za-z0-9](?:[A-Za-z0-9\-]{0,61}[A-Za-z0-9])?\.)+[A-Za-z]{2,63}\b",
)
.unwrap(),
),
]
}

View File

@ -41,6 +41,15 @@ export const useSettingsStore = create<SettingsState>()(
?? state.ai_providers[0];
},
}),
{ name: "tftsr-settings" }
{
name: "tftsr-settings",
partialize: (state) => ({
...state,
ai_providers: state.ai_providers.map((provider) => ({
...provider,
api_key: "",
})),
}),
}
)
);

View File

@ -20,11 +20,11 @@ describe("auto-tag release cross-platform artifact handling", () => {
expect(workflow).toContain("ERROR: No Linux amd64 artifacts were found to upload.");
expect(workflow).toContain("ERROR: No Linux arm64 artifacts were found to upload.");
expect(workflow).toContain(
"ERROR: linux-arm64 job is not running on an ARM64 host (uname -m=$ARCH).",
);
expect(workflow).toContain("CI=true cargo tauri build");
expect(workflow).toContain("find src-tauri/target/release/bundle -type f");
expect(workflow).toContain("CI=true npx tauri build");
expect(workflow).toContain("find src-tauri/target/aarch64-unknown-linux-gnu/release/bundle -type f");
expect(workflow).toContain("CC_aarch64_unknown_linux_gnu: aarch64-linux-gnu-gcc");
expect(workflow).toContain("PKG_CONFIG_ALLOW_CROSS: \"1\"");
expect(workflow).toContain("aarch64-unknown-linux-gnu");
});
it("fails windows uploads when no artifacts are found", () => {

View File

@ -11,6 +11,7 @@ const mockProvider: ProviderConfig = {
describe("Settings Store", () => {
beforeEach(() => {
localStorage.clear();
useSettingsStore.setState({
theme: "dark",
ai_providers: [],
@ -43,4 +44,11 @@ describe("Settings Store", () => {
useSettingsStore.getState().setTheme("light");
expect(useSettingsStore.getState().theme).toBe("light");
});
it("does not persist API keys to localStorage", () => {
useSettingsStore.getState().addProvider(mockProvider);
const raw = localStorage.getItem("tftsr-settings");
expect(raw).toBeTruthy();
expect(raw).not.toContain("sk-test-key");
});
});