Compare commits

...

19 Commits

Author SHA1 Message Date
6d105a70ad chore: update CHANGELOG.md for v0.2.66 [skip ci] 2026-04-15 02:11:31 +00:00
ca56b583c5 Merge pull request 'feat: implement dynamic versioning from Git tags' (#42) from fix/version-dynamic-build into master
All checks were successful
Auto Tag / autotag (push) Successful in 12s
Auto Tag / wiki-sync (push) Successful in 13s
Auto Tag / changelog (push) Successful in 41s
Auto Tag / build-linux-amd64 (push) Successful in 13m51s
Auto Tag / build-linux-arm64 (push) Successful in 15m41s
Auto Tag / build-windows-amd64 (push) Successful in 16m36s
Auto Tag / build-macos-arm64 (push) Successful in 2m22s
Reviewed-on: #42
2026-04-15 02:10:10 +00:00
Shaun Arman
8c35e91aef Merge branch 'master' into fix/version-dynamic-build
Some checks failed
Test / rust-fmt-check (pull_request) Successful in 1m8s
Test / frontend-typecheck (pull_request) Successful in 1m17s
Test / frontend-tests (pull_request) Successful in 1m23s
PR Review Automation / review (pull_request) Failing after 2m11s
Test / rust-clippy (pull_request) Successful in 6m11s
Test / rust-tests (pull_request) Successful in 9m7s
2026-04-14 21:09:11 -05:00
Shaun Arman
1055841b6f fix: remove invalid --locked flag from cargo commands and fix format string
All checks were successful
Test / rust-fmt-check (pull_request) Successful in 1m3s
PR Review Automation / review (pull_request) Successful in 2m54s
Test / frontend-typecheck (pull_request) Successful in 1m14s
Test / frontend-tests (pull_request) Successful in 1m25s
Test / rust-clippy (pull_request) Successful in 8m1s
Test / rust-tests (pull_request) Successful in 10m11s
- Remove --locked flag from cargo fmt, clippy, and test commands in CI
- Update build.rs to use Rust 2021 direct variable interpolation in format strings
2026-04-14 20:50:47 -05:00
f38ca7e2fc chore: update CHANGELOG.md for v0.2.63 [skip ci] 2026-04-15 01:45:29 +00:00
a9956a16a4 Merge pull request 'feat(integrations): implement query expansion for semantic search' (#44) from feature/integration-search-expansion into master
Some checks failed
Auto Tag / autotag (push) Successful in 7s
Auto Tag / wiki-sync (push) Successful in 6s
Auto Tag / changelog (push) Successful in 43s
Auto Tag / build-linux-amd64 (push) Successful in 15m51s
Auto Tag / build-linux-arm64 (push) Successful in 18m51s
Auto Tag / build-windows-amd64 (push) Successful in 19m44s
Auto Tag / build-macos-arm64 (push) Has been cancelled
Reviewed-on: #44
2026-04-15 01:44:42 +00:00
Shaun Arman
bc50a78db7 fix: correct WIQL syntax and escape_wiql implementation
All checks were successful
Test / rust-fmt-check (pull_request) Successful in 10s
Test / frontend-typecheck (pull_request) Successful in 1m11s
Test / frontend-tests (pull_request) Successful in 1m12s
PR Review Automation / review (pull_request) Successful in 3m6s
Test / rust-clippy (pull_request) Successful in 3m49s
Test / rust-tests (pull_request) Successful in 5m4s
- Replace CONTAINS with ~ operator (correct WIQL syntax for text matching)
- Remove escaping of ~, *, ? which are valid WIQL wildcards
- Update tests to reflect correct escape_wiql behavior
2026-04-14 20:38:21 -05:00
Shaun Arman
e6d1965342 security: address all issues from automated PR review
All checks were successful
Test / rust-fmt-check (pull_request) Successful in 10s
Test / frontend-typecheck (pull_request) Successful in 1m9s
Test / frontend-tests (pull_request) Successful in 1m13s
PR Review Automation / review (pull_request) Successful in 2m58s
Test / rust-clippy (pull_request) Successful in 3m50s
Test / rust-tests (pull_request) Successful in 5m12s
- Add missing CQL escaping for &, |, +, - characters
- Improve escape_wiql() to escape more dangerous characters: ", \, (, ), ~, *, ?, ;, =
- Sanitize HTML in excerpts using strip_html_tags() to prevent XSS
- Add unit tests for escape_wiql, escape_cql, canonicalize_url functions
- Document expand_query() behavior (always returns at least original query)
- All tests pass (158/158), cargo fmt and clippy pass
2026-04-14 20:26:05 -05:00
Shaun Arman
708e1e9c18 security: fix query expansion issues from PR review
All checks were successful
Test / rust-fmt-check (pull_request) Successful in 12s
Test / frontend-typecheck (pull_request) Successful in 1m11s
Test / frontend-tests (pull_request) Successful in 1m16s
PR Review Automation / review (pull_request) Successful in 3m0s
Test / rust-clippy (pull_request) Successful in 3m50s
Test / rust-tests (pull_request) Successful in 5m0s
- Use MAX_EXPANDED_QUERIES constant in confluence_search.rs instead of hardcoded 3
- Improve escape_wiql() to escape more dangerous characters: ", \, (, ), ~, *, ?, ;, =
- Fix logging to show expanded_query instead of search_url in confluence_search.rs

All tests pass (142/142), cargo fmt and clippy pass.
2026-04-14 20:07:59 -05:00
Shaun Arman
5b45c6c418 fix(integrations): security and correctness improvements
All checks were successful
Test / rust-fmt-check (pull_request) Successful in 12s
Test / frontend-typecheck (pull_request) Successful in 1m18s
Test / frontend-tests (pull_request) Successful in 1m21s
Test / rust-clippy (pull_request) Successful in 3m56s
PR Review Automation / review (pull_request) Successful in 4m20s
Test / rust-tests (pull_request) Successful in 5m22s
- Add url canonicalization for deduplication (strip fragments/query params)
- Add WIQL injection escaping for Azure DevOps work item searches
- Add CQL injection escaping for Confluence searches
- Add MAX_EXPANDED_QUERIES constant for consistency
- Fix logging to show expanded_query instead of search_url
- Add input validation for empty queries
- Add url crate dependency for URL parsing

All 142 tests pass.
2026-04-14 19:55:32 -05:00
Shaun Arman
096068ed2b feat(integrations): implement query expansion for semantic search
All checks were successful
Test / rust-fmt-check (pull_request) Successful in 12s
Test / frontend-typecheck (pull_request) Successful in 1m11s
Test / frontend-tests (pull_request) Successful in 1m15s
PR Review Automation / review (pull_request) Successful in 3m13s
Test / rust-clippy (pull_request) Successful in 3m45s
Test / rust-tests (pull_request) Successful in 5m9s
- Add query_expansion.rs module with product synonyms and keyword extraction
- Update confluence_search.rs to use expanded queries
- Update servicenow_search.rs to use expanded queries
- Update azuredevops_search.rs to use expanded queries
- Update webview_fetch.rs to use expanded queries
- Fix extract_keywords infinite loop bug for non-alphanumeric endings

All 142 tests pass.
2026-04-14 19:37:27 -05:00
Shaun Arman
9248811076 fix: add --locked to cargo commands and improve version update script
Some checks failed
Test / rust-fmt-check (pull_request) Failing after 1m11s
Test / frontend-typecheck (pull_request) Successful in 1m18s
Test / frontend-tests (pull_request) Successful in 1m21s
Test / rust-clippy (pull_request) Failing after 3m25s
PR Review Automation / review (pull_request) Successful in 3m37s
Test / rust-tests (pull_request) Successful in 5m9s
- Add --locked to fmt, clippy, and test commands in CI
- Remove updateCargoLock() and rely on cargo generate-lockfile
- Add .git directory existence check in update-version.mjs
- Use package.json as dynamic fallback instead of hardcoded 0.2.50
- Ensure execSync uses shell: false explicitly
2026-04-13 17:54:16 -05:00
Shaun Arman
007d0ee9d5 chore: fix version update implementation
All checks were successful
PR Review Automation / review (pull_request) Successful in 2m18s
- Replace npm ci with npm install in CI
- Remove --locked flag from cargo clippy/test
- Add cargo generate-lockfile after version update
- Update update-version.mjs with semver validation
- Add build.rs for Rust-level version injection
2026-04-13 16:34:48 -05:00
Shaun Arman
9e1a9b1d34 feat: implement dynamic versioning from Git tags
Some checks failed
Test / rust-clippy (pull_request) Failing after 15s
Test / rust-tests (pull_request) Failing after 19s
Test / rust-fmt-check (pull_request) Successful in 55s
Test / frontend-typecheck (pull_request) Successful in 1m22s
Test / frontend-tests (pull_request) Successful in 1m26s
PR Review Automation / review (pull_request) Successful in 2m57s
- Add build.rs to read version from git describe --tags
- Create update-version.mjs script to sync version across files
- Add get_app_version() command to Rust backend
- Update App.tsx to use custom version command
- Run version update in CI before Rust checks
2026-04-13 16:12:03 -05:00
cdb1dd1dad chore: update CHANGELOG.md for v0.2.55 [skip ci] 2026-04-13 21:09:47 +00:00
6dbe40ef03 chore: update CHANGELOG.md for v0.2.53 [skip ci] 2026-04-13 20:25:56 +00:00
Shaun Arman
75fc3ca67c fix: add Windows nsis target and update CHANGELOG to v0.2.61
All checks were successful
Auto Tag / autotag (push) Successful in 6s
Auto Tag / wiki-sync (push) Successful in 6s
Auto Tag / changelog (push) Successful in 43s
Auto Tag / build-macos-arm64 (push) Successful in 3m0s
Auto Tag / build-linux-amd64 (push) Successful in 11m29s
Auto Tag / build-linux-arm64 (push) Successful in 13m31s
Auto Tag / build-windows-amd64 (push) Successful in 14m10s
- Update CHANGELOG to include releases v0.2.54 through v0.2.61
- Add 'nsis' to bundle targets in tauri.conf.json for Windows builds
- This fixes Windows artifact upload failures by enabling .exe/.msi generation

The Windows build was failing because tauri.conf.json only had Linux bundle
targets (['deb', 'rpm']). Without nsis target, no Windows installers were
produced, causing the upload step to fail with 'No Windows amd64 artifacts
were found'.
2026-04-13 15:25:05 -05:00
fdae6d6e6d chore: update CHANGELOG.md for v0.2.53 [skip ci] 2026-04-13 19:58:25 +00:00
Shaun Arman
d78181e8c0 chore: trigger release with fix
Some checks failed
Auto Tag / autotag (push) Successful in 7s
Auto Tag / wiki-sync (push) Successful in 6s
Auto Tag / changelog (push) Successful in 43s
Auto Tag / build-macos-arm64 (push) Successful in 4m25s
Auto Tag / build-linux-amd64 (push) Successful in 11m27s
Auto Tag / build-linux-arm64 (push) Successful in 13m25s
Auto Tag / build-windows-amd64 (push) Failing after 13m38s
2026-04-13 14:57:35 -05:00
18 changed files with 1330 additions and 593 deletions

View File

@ -37,6 +37,11 @@ jobs:
key: ${{ runner.os }}-cargo-linux-amd64-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-linux-amd64-
- name: Install dependencies
run: npm install --legacy-peer-deps
- name: Update version from Git
run: node scripts/update-version.mjs
- run: cargo generate-lockfile --manifest-path src-tauri/Cargo.toml
- run: cargo fmt --manifest-path src-tauri/Cargo.toml --check
rust-clippy:
@ -72,7 +77,7 @@ jobs:
key: ${{ runner.os }}-cargo-linux-amd64-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-linux-amd64-
- run: cargo clippy --locked --manifest-path src-tauri/Cargo.toml -- -D warnings
- run: cargo clippy --manifest-path src-tauri/Cargo.toml -- -D warnings
rust-tests:
runs-on: ubuntu-latest
@ -107,7 +112,7 @@ jobs:
key: ${{ runner.os }}-cargo-linux-amd64-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-linux-amd64-
- run: cargo test --locked --manifest-path src-tauri/Cargo.toml -- --test-threads=1
- run: cargo test --manifest-path src-tauri/Cargo.toml -- --test-threads=1
frontend-typecheck:
runs-on: ubuntu-latest

View File

@ -4,7 +4,60 @@ All notable changes to TFTSR are documented here.
Commit types shown: feat, fix, perf, docs, refactor.
CI, chore, and build changes are excluded.
## [Unreleased]
## [0.2.65] — 2026-04-15
### Bug Fixes
- Add --locked to cargo commands and improve version update script
- Remove invalid --locked flag from cargo commands and fix format string
- **integrations**: Security and correctness improvements
- Correct WIQL syntax and escape_wiql implementation
### Features
- Implement dynamic versioning from Git tags
- **integrations**: Implement query expansion for semantic search
### Security
- Fix query expansion issues from PR review
- Address all issues from automated PR review
## [0.2.63] — 2026-04-13
### Bug Fixes
- Add Windows nsis target and update CHANGELOG to v0.2.61
## [0.2.61] — 2026-04-13
### Bug Fixes
- Remove AppImage from upload artifact patterns
## [0.2.59] — 2026-04-13
### Bug Fixes
- Remove AppImage bundling to fix linux-amd64 build
## [0.2.57] — 2026-04-13
### Bug Fixes
- Add fuse dependency for AppImage support
### Refactoring
- Remove custom linuxdeploy install per CI CI uses tauri-downloaded version
- Revert to original Dockerfile without manual linuxdeploy installation
## [0.2.56] — 2026-04-13
### Bug Fixes
- Add missing ai_providers columns and fix linux-amd64 build
- Address AI review findings
- Address critical AI review issues
## [0.2.55] — 2026-04-13
### Bug Fixes
- **ci**: Use Gitea file API to push CHANGELOG.md — eliminates non-fast-forward rejection
- **ci**: Harden CHANGELOG.md API push step per review
## [0.2.54] — 2026-04-13
### Bug Fixes
- **ci**: Correct git-cliff archive path in tar extraction

View File

@ -1,11 +1,12 @@
{
"name": "tftsr",
"private": true,
"version": "0.2.50",
"version": "0.2.62",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"version:update": "node scripts/update-version.mjs",
"preview": "vite preview",
"tauri": "tauri",
"test": "vitest",

111
scripts/update-version.mjs Normal file
View File

@ -0,0 +1,111 @@
#!/usr/bin/env node
import { execSync } from 'child_process';
import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
import { resolve, dirname } from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const projectRoot = resolve(__dirname, '..');
/**
* Validate version is semver-compliant (X.Y.Z)
*/
function isValidSemver(version) {
return /^[0-9]+\.[0-9]+\.[0-9]+$/.test(version);
}
function validateGitRepo(root) {
if (!existsSync(resolve(root, '.git'))) {
throw new Error(`Not a Git repository: ${root}`);
}
}
function getVersionFromGit() {
validateGitRepo(projectRoot);
try {
const output = execSync('git describe --tags --abbrev=0', {
encoding: 'utf-8',
cwd: projectRoot,
shell: false
});
let version = output.trim();
// Remove v prefix
version = version.replace(/^v/, '');
// Validate it's a valid semver
if (!isValidSemver(version)) {
const pkgJsonVersion = getFallbackVersion();
console.warn(`Invalid version format "${version}" from git describe, using package.json fallback: ${pkgJsonVersion}`);
return pkgJsonVersion;
}
return version;
} catch (e) {
const pkgJsonVersion = getFallbackVersion();
console.warn(`Failed to get version from Git tags, using package.json fallback: ${pkgJsonVersion}`);
return pkgJsonVersion;
}
}
function getFallbackVersion() {
const pkgPath = resolve(projectRoot, 'package.json');
if (!existsSync(pkgPath)) {
return '0.2.50';
}
try {
const content = readFileSync(pkgPath, 'utf-8');
const json = JSON.parse(content);
return json.version || '0.2.50';
} catch {
return '0.2.50';
}
}
function updatePackageJson(version) {
const fullPath = resolve(projectRoot, 'package.json');
if (!existsSync(fullPath)) {
throw new Error(`File not found: ${fullPath}`);
}
const content = readFileSync(fullPath, 'utf-8');
const json = JSON.parse(content);
json.version = version;
// Write with 2-space indentation
writeFileSync(fullPath, JSON.stringify(json, null, 2) + '\n', 'utf-8');
console.log(`✓ Updated package.json to ${version}`);
}
function updateTOML(path, version) {
const fullPath = resolve(projectRoot, path);
if (!existsSync(fullPath)) {
throw new Error(`File not found: ${fullPath}`);
}
const content = readFileSync(fullPath, 'utf-8');
const lines = content.split('\n');
const output = [];
for (const line of lines) {
if (line.match(/^\s*version\s*=\s*"/)) {
output.push(`version = "${version}"`);
} else {
output.push(line);
}
}
writeFileSync(fullPath, output.join('\n') + '\n', 'utf-8');
console.log(`✓ Updated ${path} to ${version}`);
}
const version = getVersionFromGit();
console.log(`Setting version to: ${version}`);
updatePackageJson(version);
updateTOML('src-tauri/Cargo.toml', version);
updateTOML('src-tauri/tauri.conf.json', version);
console.log(`✓ All version fields updated to ${version}`);

3
src-tauri/Cargo.lock generated
View File

@ -6139,7 +6139,7 @@ dependencies = [
[[package]]
name = "trcaa"
version = "0.2.50"
version = "0.2.62"
dependencies = [
"aes-gcm",
"aho-corasick",
@ -6174,6 +6174,7 @@ dependencies = [
"tokio-test",
"tracing",
"tracing-subscriber",
"url",
"urlencoding",
"uuid",
"warp",

View File

@ -1,6 +1,6 @@
[package]
name = "trcaa"
version = "0.2.50"
version = "0.2.62"
edition = "2021"
[lib]
@ -44,6 +44,7 @@ lazy_static = "1.4"
warp = "0.3"
urlencoding = "2"
infer = "0.15"
url = "2.5.8"
[dev-dependencies]
tokio-test = "0.4"
@ -52,3 +53,7 @@ mockito = "1.2"
[profile.release]
opt-level = "s"
strip = true

View File

@ -1,3 +1,30 @@
fn main() {
let version = get_version_from_git();
println!("cargo:rustc-env=APP_VERSION={version}");
println!("cargo:rerun-if-changed=.git/refs/heads/master");
println!("cargo:rerun-if-changed=.git/refs/tags");
tauri_build::build()
}
fn get_version_from_git() -> String {
if let Ok(output) = std::process::Command::new("git")
.arg("describe")
.arg("--tags")
.arg("--abbrev=0")
.output()
{
if output.status.success() {
let version = String::from_utf8_lossy(&output.stdout)
.trim()
.trim_start_matches('v')
.to_string();
if !version.is_empty() {
return version;
}
}
}
"0.2.50".to_string()
}

View File

@ -4,6 +4,7 @@ use crate::ollama::{
OllamaStatus,
};
use crate::state::{AppSettings, AppState, ProviderConfig};
use std::env;
// --- Ollama commands ---
@ -275,3 +276,11 @@ pub async fn delete_ai_provider(
Ok(())
}
/// Get the application version from build-time environment
#[tauri::command]
pub async fn get_app_version() -> Result<String, String> {
env::var("APP_VERSION")
.or_else(|_| env::var("CARGO_PKG_VERSION"))
.map_err(|e| format!("Failed to get version: {e}"))
}

View File

@ -1,4 +1,40 @@
use super::confluence_search::SearchResult;
use crate::integrations::query_expansion::expand_query;
const MAX_EXPANDED_QUERIES: usize = 3;
fn escape_wiql(s: &str) -> String {
s.replace('\'', "''")
.replace('"', "\\\"")
.replace('\\', "\\\\")
.replace('(', "\\(")
.replace(')', "\\)")
.replace(';', "\\;")
.replace('=', "\\=")
}
/// Basic HTML tag stripping to prevent XSS in excerpts
fn strip_html_tags(html: &str) -> String {
let mut result = String::new();
let mut in_tag = false;
for ch in html.chars() {
match ch {
'<' => in_tag = true,
'>' => in_tag = false,
_ if !in_tag => result.push(ch),
_ => {}
}
}
// Clean up whitespace
result
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_string()
}
/// Search Azure DevOps Wiki for content matching the query
pub async fn search_wiki(
@ -10,6 +46,11 @@ pub async fn search_wiki(
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
let client = reqwest::Client::new();
let expanded_queries = expand_query(query);
let mut all_results = Vec::new();
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
// Use Azure DevOps Search API
let search_url = format!(
"{}/_apis/search/wikisearchresults?api-version=7.0",
@ -17,14 +58,14 @@ pub async fn search_wiki(
);
let search_body = serde_json::json!({
"searchText": query,
"searchText": expanded_query,
"$top": 5,
"filters": {
"ProjectFilters": [project]
}
});
tracing::info!("Searching Azure DevOps Wiki: {}", search_url);
tracing::info!("Searching Azure DevOps Wiki with query: {}", expanded_query);
let resp = client
.post(&search_url)
@ -39,9 +80,8 @@ pub async fn search_wiki(
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(format!(
"Azure DevOps wiki search failed with status {status}: {text}"
));
tracing::warn!("Azure DevOps wiki search failed with status {status}: {text}");
continue;
}
let json: serde_json::Value = resp
@ -49,10 +89,8 @@ pub async fn search_wiki(
.await
.map_err(|e| format!("Failed to parse ADO wiki search response: {e}"))?;
let mut results = Vec::new();
if let Some(results_array) = json["results"].as_array() {
for item in results_array.iter().take(3) {
for item in results_array.iter().take(MAX_EXPANDED_QUERIES) {
let title = item["fileName"].as_str().unwrap_or("Untitled").to_string();
let path = item["path"].as_str().unwrap_or("");
@ -63,9 +101,7 @@ pub async fn search_wiki(
path
);
let excerpt = item["content"]
.as_str()
.unwrap_or("")
let excerpt = strip_html_tags(item["content"].as_str().unwrap_or(""))
.chars()
.take(300)
.collect::<String>();
@ -83,7 +119,7 @@ pub async fn search_wiki(
None
};
results.push(SearchResult {
all_results.push(SearchResult {
title,
url,
excerpt,
@ -92,8 +128,12 @@ pub async fn search_wiki(
});
}
}
}
Ok(results)
all_results.sort_by(|a, b| a.url.cmp(&b.url));
all_results.dedup_by(|a, b| a.url == b.url);
Ok(all_results)
}
/// Fetch full wiki page content
@ -151,21 +191,30 @@ pub async fn search_work_items(
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
let client = reqwest::Client::new();
let expanded_queries = expand_query(query);
let mut all_results = Vec::new();
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
// Use WIQL (Work Item Query Language)
let wiql_url = format!(
"{}/_apis/wit/wiql?api-version=7.0",
org_url.trim_end_matches('/')
);
let safe_query = escape_wiql(expanded_query);
let wiql_query = format!(
"SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{project}' AND ([System.Title] CONTAINS '{query}' OR [System.Description] CONTAINS '{query}') ORDER BY [System.ChangedDate] DESC"
"SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{project}' AND ([System.Title] ~ '{safe_query}' OR [System.Description] ~ '{safe_query}') ORDER BY [System.ChangedDate] DESC"
);
let wiql_body = serde_json::json!({
"query": wiql_query
});
tracing::info!("Searching Azure DevOps work items");
tracing::info!(
"Searching Azure DevOps work items with query: {}",
expanded_query
);
let resp = client
.post(&wiql_url)
@ -178,7 +227,7 @@ pub async fn search_work_items(
.map_err(|e| format!("ADO work item search failed: {e}"))?;
if !resp.status().is_success() {
return Ok(Vec::new()); // Don't fail if work item search fails
continue; // Don't fail if work item search fails
}
let json: serde_json::Value = resp
@ -186,20 +235,24 @@ pub async fn search_work_items(
.await
.map_err(|_| "Failed to parse work item response".to_string())?;
let mut results = Vec::new();
if let Some(work_items) = json["workItems"].as_array() {
// Fetch details for top 3 work items
for item in work_items.iter().take(3) {
for item in work_items.iter().take(MAX_EXPANDED_QUERIES) {
if let Some(id) = item["id"].as_i64() {
if let Ok(work_item) = fetch_work_item_details(org_url, id, &cookie_header).await {
results.push(work_item);
if let Ok(work_item) =
fetch_work_item_details(org_url, id, &cookie_header).await
{
all_results.push(work_item);
}
}
}
}
}
Ok(results)
all_results.sort_by(|a, b| a.url.cmp(&b.url));
all_results.dedup_by(|a, b| a.url == b.url);
Ok(all_results)
}
/// Fetch work item details
@ -263,3 +316,53 @@ async fn fetch_work_item_details(
source: "Azure DevOps".to_string(),
})
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_escape_wiql_escapes_single_quotes() {
assert_eq!(escape_wiql("test'single"), "test''single");
}
#[test]
fn test_escape_wiql_escapes_double_quotes() {
assert_eq!(escape_wiql("test\"double"), "test\\\\\"double");
}
#[test]
fn test_escape_wiql_escapes_backslash() {
assert_eq!(escape_wiql("test\\backslash"), r#"test\\backslash"#);
}
#[test]
fn test_escape_wiql_escapes_parens() {
assert_eq!(escape_wiql("test(paren"), r#"test\(paren"#);
assert_eq!(escape_wiql("test)paren"), r#"test\)paren"#);
}
#[test]
fn test_escape_wiql_escapes_semicolon() {
assert_eq!(escape_wiql("test;semi"), r#"test\;semi"#);
}
#[test]
fn test_escape_wiql_escapes_equals() {
assert_eq!(escape_wiql("test=equal"), r#"test\=equal"#);
}
#[test]
fn test_escape_wiql_no_special_chars() {
assert_eq!(escape_wiql("simple query"), "simple query");
}
#[test]
fn test_strip_html_tags() {
let html = "<p>Hello <strong>world</strong>!</p>";
assert_eq!(strip_html_tags(html), "Hello world!");
let html2 = "<div><h1>Title</h1><p>Content</p></div>";
assert_eq!(strip_html_tags(html2), "TitleContent");
}
}

View File

@ -1,4 +1,9 @@
use serde::{Deserialize, Serialize};
use url::Url;
use super::query_expansion::expand_query;
const MAX_EXPANDED_QUERIES: usize = 3;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchResult {
@ -6,10 +11,36 @@ pub struct SearchResult {
pub url: String,
pub excerpt: String,
pub content: Option<String>,
pub source: String, // "confluence", "servicenow", "azuredevops"
pub source: String,
}
fn canonicalize_url(url: &str) -> String {
Url::parse(url)
.ok()
.map(|u| {
let mut u = u.clone();
u.set_fragment(None);
u.set_query(None);
u.to_string()
})
.unwrap_or_else(|| url.to_string())
}
fn escape_cql(s: &str) -> String {
s.replace('"', "\\\"")
.replace(')', "\\)")
.replace('(', "\\(")
.replace('~', "\\~")
.replace('&', "\\&")
.replace('|', "\\|")
.replace('+', "\\+")
.replace('-', "\\-")
}
/// Search Confluence for content matching the query
///
/// This function expands the user query with related terms, synonyms, and variations
/// to improve search coverage across Confluence spaces.
pub async fn search_confluence(
base_url: &str,
query: &str,
@ -18,14 +49,22 @@ pub async fn search_confluence(
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
let client = reqwest::Client::new();
// Use Confluence CQL search
let expanded_queries = expand_query(query);
let mut all_results = Vec::new();
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
let safe_query = escape_cql(expanded_query);
let search_url = format!(
"{}/rest/api/search?cql=text~\"{}\"&limit=5",
base_url.trim_end_matches('/'),
urlencoding::encode(query)
urlencoding::encode(&safe_query)
);
tracing::info!("Searching Confluence: {}", search_url);
tracing::info!(
"Searching Confluence with expanded query: {}",
expanded_query
);
let resp = client
.get(&search_url)
@ -38,9 +77,8 @@ pub async fn search_confluence(
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(format!(
"Confluence search failed with status {status}: {text}"
));
tracing::warn!("Confluence search failed with status {status}: {text}");
continue;
}
let json: serde_json::Value = resp
@ -48,17 +86,13 @@ pub async fn search_confluence(
.await
.map_err(|e| format!("Failed to parse Confluence search response: {e}"))?;
let mut results = Vec::new();
if let Some(results_array) = json["results"].as_array() {
for item in results_array.iter().take(3) {
// Take top 3 results
for item in results_array.iter().take(MAX_EXPANDED_QUERIES) {
let title = item["title"].as_str().unwrap_or("Untitled").to_string();
let id = item["content"]["id"].as_str();
let space_key = item["content"]["space"]["key"].as_str();
// Build URL
let url = if let (Some(id_str), Some(space)) = (id, space_key) {
format!(
"{}/display/{}/{}",
@ -70,15 +104,11 @@ pub async fn search_confluence(
base_url.to_string()
};
// Get excerpt from search result
let excerpt = item["excerpt"]
.as_str()
.unwrap_or("")
.to_string()
.replace("<span class=\"highlight\">", "")
.replace("</span>", "");
let excerpt = strip_html_tags(item["excerpt"].as_str().unwrap_or(""))
.chars()
.take(300)
.collect::<String>();
// Fetch full page content
let content = if let Some(content_id) = id {
fetch_page_content(base_url, content_id, &cookie_header)
.await
@ -87,7 +117,7 @@ pub async fn search_confluence(
None
};
results.push(SearchResult {
all_results.push(SearchResult {
title,
url,
excerpt,
@ -96,8 +126,12 @@ pub async fn search_confluence(
});
}
}
}
Ok(results)
all_results.sort_by(|a, b| canonicalize_url(&a.url).cmp(&canonicalize_url(&b.url)));
all_results.dedup_by(|a, b| canonicalize_url(&a.url) == canonicalize_url(&b.url));
Ok(all_results)
}
/// Fetch full content of a Confluence page
@ -185,4 +219,43 @@ mod tests {
let html2 = "<div><h1>Title</h1><p>Content</p></div>";
assert_eq!(strip_html_tags(html2), "TitleContent");
}
#[test]
fn test_escape_cql_escapes_special_chars() {
assert_eq!(escape_cql("test\"quote"), r#"test\"quote"#);
assert_eq!(escape_cql("test(paren"), r#"test\(paren"#);
assert_eq!(escape_cql("test)paren"), r#"test\)paren"#);
assert_eq!(escape_cql("test~tilde"), r#"test\~tilde"#);
assert_eq!(escape_cql("test&and"), r#"test\&and"#);
assert_eq!(escape_cql("test|or"), r#"test\|or"#);
assert_eq!(escape_cql("test+plus"), r#"test\+plus"#);
assert_eq!(escape_cql("test-minus"), r#"test\-minus"#);
}
#[test]
fn test_escape_cql_no_special_chars() {
assert_eq!(escape_cql("simple query"), "simple query");
}
#[test]
fn test_canonicalize_url_removes_fragment() {
assert_eq!(
canonicalize_url("https://example.com/page#section"),
"https://example.com/page"
);
}
#[test]
fn test_canonicalize_url_removes_query() {
assert_eq!(
canonicalize_url("https://example.com/page?param=value"),
"https://example.com/page"
);
}
#[test]
fn test_canonicalize_url_handles_malformed() {
// Malformed URLs fall back to original
assert_eq!(canonicalize_url("not a url"), "not a url");
}
}

View File

@ -4,6 +4,7 @@ pub mod azuredevops_search;
pub mod callback_server;
pub mod confluence;
pub mod confluence_search;
pub mod query_expansion;
pub mod servicenow;
pub mod servicenow_search;
pub mod webview_auth;

View File

@ -0,0 +1,290 @@
/// Query expansion module for integration search
///
/// This module provides functionality to expand user queries with related terms,
/// synonyms, and variations to improve search results across integrations like
/// Confluence, ServiceNow, and Azure DevOps.
use std::collections::HashSet;
/// Product name synonyms for common product variations
/// Maps common abbreviations/variants to their full names for search expansion
fn get_product_synonyms(query: &str) -> Vec<String> {
let mut synonyms = Vec::new();
// VESTA NXT related synonyms
if query.to_lowercase().contains("vesta") || query.to_lowercase().contains("vnxt") {
synonyms.extend(vec![
"VESTA NXT".to_string(),
"Vesta NXT".to_string(),
"VNXT".to_string(),
"vnxt".to_string(),
"Vesta".to_string(),
"vesta".to_string(),
"VNX".to_string(),
"vnx".to_string(),
]);
}
// Version number patterns (e.g., 1.0.12, 1.1.9)
if query.contains('.') {
// Extract version-like patterns and add variations
let version_parts: Vec<&str> = query.split('.').collect();
if version_parts.len() >= 2 {
// Add variations without dots
let version_no_dots = version_parts.join("");
synonyms.push(version_no_dots);
// Add partial versions
if version_parts.len() >= 2 {
synonyms.push(version_parts[0..2].join("."));
}
if version_parts.len() >= 3 {
synonyms.push(version_parts[0..3].join("."));
}
}
}
// Common upgrade-related terms
if query.to_lowercase().contains("upgrade") || query.to_lowercase().contains("update") {
synonyms.extend(vec![
"upgrade".to_string(),
"update".to_string(),
"migration".to_string(),
"patch".to_string(),
"version".to_string(),
"install".to_string(),
"installation".to_string(),
]);
}
// Remove duplicates and empty strings
synonyms.sort();
synonyms.dedup();
synonyms.retain(|s| !s.is_empty());
synonyms
}
/// Expand a search query with related terms for better search coverage
///
/// This function takes a user query and expands it with:
/// - Product name synonyms (e.g., "VNXT" -> "VESTA NXT", "Vesta NXT")
/// - Version number variations
/// - Related terms based on query content
///
/// # Arguments
/// * `query` - The original user query
///
/// # Returns
/// A vector of query strings to search, with the original query first
/// followed by expanded variations. Returns empty only if input is empty or
/// whitespace-only. Otherwise, always returns at least the original query.
pub fn expand_query(query: &str) -> Vec<String> {
if query.trim().is_empty() {
return Vec::new();
}
let mut expanded = vec![query.to_string()];
// Get product synonyms
let product_synonyms = get_product_synonyms(query);
expanded.extend(product_synonyms);
// Extract keywords from query for additional expansion
let keywords = extract_keywords(query);
// Add keyword variations
for keyword in keywords.iter().take(5) {
if !expanded.contains(keyword) {
expanded.push(keyword.clone());
}
}
// Add common related terms based on query content
let query_lower = query.to_lowercase();
if query_lower.contains("confluence") || query_lower.contains("documentation") {
expanded.push("docs".to_string());
expanded.push("manual".to_string());
expanded.push("guide".to_string());
}
if query_lower.contains("deploy") || query_lower.contains("deployment") {
expanded.push("deploy".to_string());
expanded.push("deployment".to_string());
expanded.push("release".to_string());
expanded.push("build".to_string());
}
if query_lower.contains("kubernetes") || query_lower.contains("k8s") {
expanded.push("kubernetes".to_string());
expanded.push("k8s".to_string());
expanded.push("pod".to_string());
expanded.push("container".to_string());
}
// Remove duplicates and empty strings
expanded.sort();
expanded.dedup();
expanded.retain(|s| !s.is_empty());
expanded
}
/// Extract important keywords from a search query
///
/// This function removes stop words and extracts meaningful terms
/// for search expansion.
///
/// # Arguments
/// * `query` - The original user query
///
/// # Returns
/// A vector of extracted keywords
fn extract_keywords(query: &str) -> Vec<String> {
let stop_words: HashSet<&str> = [
"how", "do", "i", "the", "a", "an", "is", "are", "was", "were", "be", "been", "being",
"have", "has", "had", "having", "do", "does", "did", "doing", "will", "would", "should",
"could", "can", "may", "might", "must", "to", "from", "in", "on", "at", "by", "for",
"with", "about", "as", "of", "or", "and", "but", "not", "what", "when", "where", "which",
"who", "this", "that", "these", "those", "if", "then", "else", "for", "while", "until",
"against", "between", "into", "through", "during", "before", "after", "above", "below",
"up", "down", "out", "off", "over", "under", "again", "further", "then", "once", "here",
"there", "why", "where", "all", "any", "both", "each", "few", "more", "most", "other",
"some", "such", "no", "nor", "only", "own", "same", "so", "than", "too", "very", "can",
"just", "should", "now",
]
.into_iter()
.collect();
let mut keywords = Vec::new();
let mut remaining = query.to_string();
while !remaining.is_empty() {
// Skip leading whitespace
if remaining.starts_with(char::is_whitespace) {
remaining = remaining.trim_start().to_string();
continue;
}
// Try to extract version number (e.g., 1.0.12, 1.1.9)
if remaining.starts_with(|c: char| c.is_ascii_digit()) {
let mut end_pos = 0;
let mut dot_count = 0;
for (i, c) in remaining.chars().enumerate() {
if c.is_ascii_digit() {
end_pos = i + 1;
} else if c == '.' {
end_pos = i + 1;
dot_count += 1;
} else {
break;
}
}
// Only extract if we have at least 2 dots (e.g., 1.0.12)
if dot_count >= 2 && end_pos > 0 {
let version = remaining[..end_pos].to_string();
keywords.push(version.clone());
remaining = remaining[end_pos..].to_string();
continue;
}
}
// Find word boundary - split on whitespace or non-alphanumeric
let mut split_pos = remaining.len();
for (i, c) in remaining.chars().enumerate() {
if c.is_whitespace() || !c.is_alphanumeric() {
split_pos = i;
break;
}
}
// If split_pos is 0, the string starts with a non-alphanumeric character
// Skip it and continue
if split_pos == 0 {
remaining = remaining[1..].to_string();
continue;
}
let word = remaining[..split_pos].to_lowercase();
remaining = remaining[split_pos..].to_string();
// Skip empty words, single chars, and stop words
if word.is_empty() || word.len() < 2 || stop_words.contains(word.as_str()) {
continue;
}
// Add numeric words with 3+ digits
if word.chars().all(|c| c.is_ascii_digit()) && word.len() >= 3 {
keywords.push(word.clone());
continue;
}
// Add words with at least one alphabetic character
if word.chars().any(|c| c.is_alphabetic()) {
keywords.push(word.clone());
}
}
keywords.sort();
keywords.dedup();
keywords
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_expand_query_with_product_synonyms() {
let query = "upgrade vesta nxt to 1.1.9";
let expanded = expand_query(query);
// Should contain original query
assert!(expanded.contains(&query.to_string()));
// Should contain product synonyms
assert!(expanded
.iter()
.any(|s| s.contains("vnxt") || s.contains("vnxt")));
}
#[test]
fn test_expand_query_with_version_numbers() {
let query = "version 1.0.12";
let expanded = expand_query(query);
// Should contain original query
assert!(expanded.contains(&query.to_string()));
}
#[test]
fn test_extract_keywords() {
let query = "How do I upgrade VESTA NXT from 1.0.12 to 1.1.9?";
let keywords = extract_keywords(query);
assert!(keywords.contains(&"upgrade".to_string()));
assert!(keywords.contains(&"vesta".to_string()));
assert!(keywords.contains(&"nxt".to_string()));
assert!(keywords.contains(&"1.0.12".to_string()));
assert!(keywords.contains(&"1.1.9".to_string()));
}
#[test]
fn test_product_synonyms() {
let synonyms = get_product_synonyms("vesta nxt upgrade");
// Should contain VNXT synonym
assert!(synonyms
.iter()
.any(|s| s.contains("VNXT") || s.contains("vnxt")));
}
#[test]
fn test_empty_query() {
let expanded = expand_query("");
assert!(expanded.is_empty() || expanded.contains(&"".to_string()));
}
}

View File

@ -1,4 +1,7 @@
use super::confluence_search::SearchResult;
use crate::integrations::query_expansion::expand_query;
const MAX_EXPANDED_QUERIES: usize = 3;
/// Search ServiceNow Knowledge Base for content matching the query
pub async fn search_servicenow(
@ -9,15 +12,20 @@ pub async fn search_servicenow(
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
let client = reqwest::Client::new();
let expanded_queries = expand_query(query);
let mut all_results = Vec::new();
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
// Search Knowledge Base articles
let search_url = format!(
"{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=5",
instance_url.trim_end_matches('/'),
urlencoding::encode(query),
urlencoding::encode(query)
urlencoding::encode(expanded_query),
urlencoding::encode(expanded_query)
);
tracing::info!("Searching ServiceNow: {}", search_url);
tracing::info!("Searching ServiceNow with query: {}", expanded_query);
let resp = client
.get(&search_url)
@ -30,9 +38,8 @@ pub async fn search_servicenow(
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(format!(
"ServiceNow search failed with status {status}: {text}"
));
tracing::warn!("ServiceNow search failed with status {status}: {text}");
continue;
}
let json: serde_json::Value = resp
@ -40,10 +47,8 @@ pub async fn search_servicenow(
.await
.map_err(|e| format!("Failed to parse ServiceNow search response: {e}"))?;
let mut results = Vec::new();
if let Some(result_array) = json["result"].as_array() {
for item in result_array.iter().take(3) {
for item in result_array.iter().take(MAX_EXPANDED_QUERIES) {
// Take top 3 results
let title = item["short_description"]
.as_str()
@ -74,7 +79,7 @@ pub async fn search_servicenow(
}
});
results.push(SearchResult {
all_results.push(SearchResult {
title,
url,
excerpt,
@ -83,8 +88,12 @@ pub async fn search_servicenow(
});
}
}
}
Ok(results)
all_results.sort_by(|a, b| a.url.cmp(&b.url));
all_results.dedup_by(|a, b| a.url == b.url);
Ok(all_results)
}
/// Search ServiceNow Incidents for related issues
@ -96,15 +105,23 @@ pub async fn search_incidents(
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
let client = reqwest::Client::new();
let expanded_queries = expand_query(query);
let mut all_results = Vec::new();
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
// Search incidents
let search_url = format!(
"{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true",
instance_url.trim_end_matches('/'),
urlencoding::encode(query),
urlencoding::encode(query)
urlencoding::encode(expanded_query),
urlencoding::encode(expanded_query)
);
tracing::info!("Searching ServiceNow incidents: {}", search_url);
tracing::info!(
"Searching ServiceNow incidents with query: {}",
expanded_query
);
let resp = client
.get(&search_url)
@ -115,7 +132,7 @@ pub async fn search_incidents(
.map_err(|e| format!("ServiceNow incident search failed: {e}"))?;
if !resp.status().is_success() {
return Ok(Vec::new()); // Don't fail if incident search fails
continue; // Don't fail if incident search fails
}
let json: serde_json::Value = resp
@ -123,8 +140,6 @@ pub async fn search_incidents(
.await
.map_err(|_| "Failed to parse incident response".to_string())?;
let mut results = Vec::new();
if let Some(result_array) = json["result"].as_array() {
for item in result_array.iter() {
let number = item["number"].as_str().unwrap_or("Unknown");
@ -149,7 +164,7 @@ pub async fn search_incidents(
let excerpt = content.chars().take(200).collect::<String>();
results.push(SearchResult {
all_results.push(SearchResult {
title,
url,
excerpt,
@ -158,6 +173,10 @@ pub async fn search_incidents(
});
}
}
Ok(results)
}
all_results.sort_by(|a, b| a.url.cmp(&b.url));
all_results.dedup_by(|a, b| a.url == b.url);
Ok(all_results)
}

View File

@ -6,6 +6,7 @@ use serde_json::Value;
use tauri::WebviewWindow;
use super::confluence_search::SearchResult;
use crate::integrations::query_expansion::expand_query;
/// Execute an HTTP request from within the webview context
/// This automatically includes all cookies (including HttpOnly) from the authenticated session
@ -123,9 +124,14 @@ pub async fn search_confluence_webview<R: tauri::Runtime>(
base_url: &str,
query: &str,
) -> Result<Vec<SearchResult>, String> {
let expanded_queries = expand_query(query);
let mut all_results = Vec::new();
for expanded_query in expanded_queries.iter().take(3) {
// Extract keywords from the query for better search
// Remove common words and extract important terms
let keywords = extract_keywords(query);
let keywords = extract_keywords(expanded_query);
// Build CQL query with OR logic for keywords
let cql = if keywords.len() > 1 {
@ -138,8 +144,8 @@ pub async fn search_confluence_webview<R: tauri::Runtime>(
let keyword = &keywords[0];
format!("text ~ \"{keyword}\"")
} else {
// Fallback to original query
format!("text ~ \"{query}\"")
// Fallback to expanded query
format!("text ~ \"{expanded_query}\"")
};
let search_url = format!(
@ -152,8 +158,6 @@ pub async fn search_confluence_webview<R: tauri::Runtime>(
let response = fetch_from_webview(webview_window, &search_url, "GET", None).await?;
let mut results = Vec::new();
if let Some(results_array) = response.get("results").and_then(|v| v.as_array()) {
for item in results_array.iter().take(5) {
let title = item["title"].as_str().unwrap_or("Untitled").to_string();
@ -208,7 +212,7 @@ pub async fn search_confluence_webview<R: tauri::Runtime>(
None
};
results.push(SearchResult {
all_results.push(SearchResult {
title,
url,
excerpt: excerpt.chars().take(300).collect(),
@ -217,12 +221,16 @@ pub async fn search_confluence_webview<R: tauri::Runtime>(
});
}
}
}
all_results.sort_by(|a, b| a.url.cmp(&b.url));
all_results.dedup_by(|a, b| a.url == b.url);
tracing::info!(
"Confluence webview search returned {} results",
results.len()
all_results.len()
);
Ok(results)
Ok(all_results)
}
/// Extract keywords from a search query
@ -296,17 +304,20 @@ pub async fn search_servicenow_webview<R: tauri::Runtime>(
instance_url: &str,
query: &str,
) -> Result<Vec<SearchResult>, String> {
let mut results = Vec::new();
let expanded_queries = expand_query(query);
let mut all_results = Vec::new();
for expanded_query in expanded_queries.iter().take(3) {
// Search knowledge base
let kb_url = format!(
"{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=3",
instance_url.trim_end_matches('/'),
urlencoding::encode(query),
urlencoding::encode(query)
urlencoding::encode(expanded_query),
urlencoding::encode(expanded_query)
);
tracing::info!("Executing ServiceNow KB search via webview");
tracing::info!("Executing ServiceNow KB search via webview with expanded query");
if let Ok(kb_response) = fetch_from_webview(webview_window, &kb_url, "GET", None).await {
if let Some(kb_array) = kb_response.get("result").and_then(|v| v.as_array()) {
@ -328,7 +339,7 @@ pub async fn search_servicenow_webview<R: tauri::Runtime>(
text.to_string()
});
results.push(SearchResult {
all_results.push(SearchResult {
title,
url,
excerpt,
@ -343,8 +354,8 @@ pub async fn search_servicenow_webview<R: tauri::Runtime>(
let inc_url = format!(
"{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true",
instance_url.trim_end_matches('/'),
urlencoding::encode(query),
urlencoding::encode(query)
urlencoding::encode(expanded_query),
urlencoding::encode(expanded_query)
);
if let Ok(inc_response) = fetch_from_webview(webview_window, &inc_url, "GET", None).await {
@ -366,7 +377,7 @@ pub async fn search_servicenow_webview<R: tauri::Runtime>(
let content = format!("Description: {description}\nResolution: {resolution}");
let excerpt = content.chars().take(200).collect();
results.push(SearchResult {
all_results.push(SearchResult {
title,
url,
excerpt,
@ -376,12 +387,16 @@ pub async fn search_servicenow_webview<R: tauri::Runtime>(
}
}
}
}
all_results.sort_by(|a, b| a.url.cmp(&b.url));
all_results.dedup_by(|a, b| a.url == b.url);
tracing::info!(
"ServiceNow webview search returned {} results",
results.len()
all_results.len()
);
Ok(results)
Ok(all_results)
}
/// Search Azure DevOps wiki using webview fetch
@ -391,13 +406,18 @@ pub async fn search_azuredevops_wiki_webview<R: tauri::Runtime>(
project: &str,
query: &str,
) -> Result<Vec<SearchResult>, String> {
let expanded_queries = expand_query(query);
let mut all_results = Vec::new();
for expanded_query in expanded_queries.iter().take(3) {
// Extract keywords for better search
let keywords = extract_keywords(query);
let keywords = extract_keywords(expanded_query);
let search_text = if !keywords.is_empty() {
keywords.join(" ")
} else {
query.to_string()
expanded_query.clone()
};
// Azure DevOps wiki search API
@ -415,8 +435,6 @@ pub async fn search_azuredevops_wiki_webview<R: tauri::Runtime>(
// First, get list of wikis
let wikis_response = fetch_from_webview(webview_window, &search_url, "GET", None).await?;
let mut results = Vec::new();
if let Some(wikis_array) = wikis_response.get("value").and_then(|v| v.as_array()) {
// Search each wiki
for wiki in wikis_array.iter().take(3) {
@ -445,7 +463,7 @@ pub async fn search_azuredevops_wiki_webview<R: tauri::Runtime>(
org_url,
project,
wiki_id,
&mut results,
&mut all_results,
);
} else {
// Response might be the page object itself
@ -455,18 +473,22 @@ pub async fn search_azuredevops_wiki_webview<R: tauri::Runtime>(
org_url,
project,
wiki_id,
&mut results,
&mut all_results,
);
}
}
}
}
}
all_results.sort_by(|a, b| a.url.cmp(&b.url));
all_results.dedup_by(|a, b| a.url == b.url);
tracing::info!(
"Azure DevOps wiki webview search returned {} results",
results.len()
all_results.len()
);
Ok(results)
Ok(all_results)
}
/// Recursively search through wiki pages for matching content
@ -544,8 +566,13 @@ pub async fn search_azuredevops_workitems_webview<R: tauri::Runtime>(
project: &str,
query: &str,
) -> Result<Vec<SearchResult>, String> {
let expanded_queries = expand_query(query);
let mut all_results = Vec::new();
for expanded_query in expanded_queries.iter().take(3) {
// Extract keywords
let keywords = extract_keywords(query);
let keywords = extract_keywords(expanded_query);
// Check if query contains a work item ID (pure number)
let work_item_id: Option<i64> = keywords
@ -566,7 +593,7 @@ pub async fn search_azuredevops_workitems_webview<R: tauri::Runtime>(
let search_terms = if !keywords.is_empty() {
keywords.join(" ")
} else {
query.to_string()
expanded_query.clone()
};
// Use CONTAINS for text search (case-insensitive)
@ -593,9 +620,8 @@ pub async fn search_azuredevops_workitems_webview<R: tauri::Runtime>(
tracing::debug!("WIQL query: {}", wiql_query);
tracing::debug!("Request URL: {}", wiql_url);
let wiql_response = fetch_from_webview(webview_window, &wiql_url, "POST", Some(&body)).await?;
let mut results = Vec::new();
let wiql_response =
fetch_from_webview(webview_window, &wiql_url, "POST", Some(&body)).await?;
if let Some(work_items) = wiql_response.get("workItems").and_then(|v| v.as_array()) {
// Fetch details for first 5 work items
@ -627,7 +653,8 @@ pub async fn search_azuredevops_workitems_webview<R: tauri::Runtime>(
let clean_description = strip_html_simple(description);
let excerpt = clean_description.chars().take(200).collect();
let url = format!("{}/_workitems/edit/{id}", org_url.trim_end_matches('/'));
let url =
format!("{}/_workitems/edit/{id}", org_url.trim_end_matches('/'));
let full_content = if clean_description.len() > 3000 {
format!("{}...", &clean_description[..3000])
@ -635,7 +662,7 @@ pub async fn search_azuredevops_workitems_webview<R: tauri::Runtime>(
clean_description.clone()
};
results.push(SearchResult {
all_results.push(SearchResult {
title: format!("{work_item_type} #{id}: {title}"),
url,
excerpt,
@ -647,12 +674,16 @@ pub async fn search_azuredevops_workitems_webview<R: tauri::Runtime>(
}
}
}
}
all_results.sort_by(|a, b| a.url.cmp(&b.url));
all_results.dedup_by(|a, b| a.url == b.url);
tracing::info!(
"Azure DevOps work items webview search returned {} results",
results.len()
all_results.len()
);
Ok(results)
Ok(all_results)
}
/// Add a comment to an Azure DevOps work item

View File

@ -120,6 +120,7 @@ pub fn run() {
commands::system::get_settings,
commands::system::update_settings,
commands::system::get_audit_log,
commands::system::get_app_version,
])
.run(tauri::generate_context!())
.expect("Error running Troubleshooting and RCA Assistant application");

View File

@ -6,7 +6,7 @@
"frontendDist": "../dist",
"devUrl": "http://localhost:1420",
"beforeDevCommand": "npm run dev",
"beforeBuildCommand": "npm run build"
"beforeBuildCommand": "npm run version:update && npm run build"
},
"app": {
"security": {
@ -26,7 +26,7 @@
},
"bundle": {
"active": true,
"targets": ["deb", "rpm"],
"targets": ["deb", "rpm", "nsis"],
"icon": [
"icons/32x32.png",
"icons/128x128.png",
@ -42,3 +42,6 @@
"longDescription": "Structured AI-backed assistant for IT troubleshooting, 5-whys root cause analysis, and post-mortem documentation with offline Ollama support."
}
}

View File

@ -1,5 +1,4 @@
import React, { useState, useEffect } from "react";
import { getVersion } from "@tauri-apps/api/app";
import { Routes, Route, NavLink, useLocation } from "react-router-dom";
import {
Home,
@ -15,7 +14,7 @@ import {
Moon,
} from "lucide-react";
import { useSettingsStore } from "@/stores/settingsStore";
import { loadAiProvidersCmd, testProviderConnectionCmd } from "@/lib/tauriCommands";
import { getAppVersionCmd, loadAiProvidersCmd, testProviderConnectionCmd } from "@/lib/tauriCommands";
import Dashboard from "@/pages/Dashboard";
import NewIssue from "@/pages/NewIssue";
@ -50,7 +49,7 @@ export default function App() {
void useLocation();
useEffect(() => {
getVersion().then(setAppVersion).catch(() => {});
getAppVersionCmd().then(setAppVersion).catch(() => {});
}, []);
// Load providers and auto-test active provider on startup

View File

@ -486,3 +486,8 @@ export const loadAiProvidersCmd = () =>
export const deleteAiProviderCmd = (name: string) =>
invoke<void>("delete_ai_provider", { name });
// ─── System / Version ─────────────────────────────────────────────────────────
export const getAppVersionCmd = () =>
invoke<string>("get_app_version");