diff --git a/src-tauri/src/integrations/azuredevops_search.rs b/src-tauri/src/integrations/azuredevops_search.rs index 9aabced4..c30d2c60 100644 --- a/src-tauri/src/integrations/azuredevops_search.rs +++ b/src-tauri/src/integrations/azuredevops_search.rs @@ -1,4 +1,5 @@ use super::confluence_search::SearchResult; +use crate::integrations::query_expansion::expand_query; /// Search Azure DevOps Wiki for content matching the query pub async fn search_wiki( @@ -10,90 +11,99 @@ pub async fn search_wiki( let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies); let client = reqwest::Client::new(); - // Use Azure DevOps Search API - let search_url = format!( - "{}/_apis/search/wikisearchresults?api-version=7.0", - org_url.trim_end_matches('/') - ); + let expanded_queries = expand_query(query); - let search_body = serde_json::json!({ - "searchText": query, - "$top": 5, - "filters": { - "ProjectFilters": [project] + let mut all_results = Vec::new(); + + for expanded_query in expanded_queries.iter().take(3) { + // Use Azure DevOps Search API + let search_url = format!( + "{}/_apis/search/wikisearchresults?api-version=7.0", + org_url.trim_end_matches('/') + ); + + let search_body = serde_json::json!({ + "searchText": expanded_query, + "$top": 5, + "filters": { + "ProjectFilters": [project] + } + }); + + tracing::info!( + "Searching Azure DevOps Wiki with expanded query: {}", + search_url + ); + + let resp = client + .post(&search_url) + .header("Cookie", &cookie_header) + .header("Accept", "application/json") + .header("Content-Type", "application/json") + .json(&search_body) + .send() + .await + .map_err(|e| format!("Azure DevOps wiki search failed: {e}"))?; + + if !resp.status().is_success() { + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + tracing::warn!("Azure DevOps wiki search failed with status {status}: {text}"); + continue; } - }); - tracing::info!("Searching Azure DevOps Wiki: {}", search_url); + let json: serde_json::Value = resp + .json() + .await + .map_err(|e| format!("Failed to parse ADO wiki search response: {e}"))?; - let resp = client - .post(&search_url) - .header("Cookie", &cookie_header) - .header("Accept", "application/json") - .header("Content-Type", "application/json") - .json(&search_body) - .send() - .await - .map_err(|e| format!("Azure DevOps wiki search failed: {e}"))?; + if let Some(results_array) = json["results"].as_array() { + for item in results_array.iter().take(3) { + let title = item["fileName"].as_str().unwrap_or("Untitled").to_string(); - if !resp.status().is_success() { - let status = resp.status(); - let text = resp.text().await.unwrap_or_default(); - return Err(format!( - "Azure DevOps wiki search failed with status {status}: {text}" - )); - } + let path = item["path"].as_str().unwrap_or(""); + let url = format!( + "{}/_wiki/wikis/{}/{}", + org_url.trim_end_matches('/'), + project, + path + ); - let json: serde_json::Value = resp - .json() - .await - .map_err(|e| format!("Failed to parse ADO wiki search response: {e}"))?; + let excerpt = item["content"] + .as_str() + .unwrap_or("") + .chars() + .take(300) + .collect::(); - let mut results = Vec::new(); - - if let Some(results_array) = json["results"].as_array() { - for item in results_array.iter().take(3) { - let title = item["fileName"].as_str().unwrap_or("Untitled").to_string(); - - let path = item["path"].as_str().unwrap_or(""); - let url = format!( - "{}/_wiki/wikis/{}/{}", - org_url.trim_end_matches('/'), - project, - path - ); - - let excerpt = item["content"] - .as_str() - .unwrap_or("") - .chars() - .take(300) - .collect::(); - - // Fetch full wiki page content - let content = if let Some(wiki_id) = item["wiki"]["id"].as_str() { - if let Some(page_path) = item["path"].as_str() { - fetch_wiki_page(org_url, wiki_id, page_path, &cookie_header) - .await - .ok() + // Fetch full wiki page content + let content = if let Some(wiki_id) = item["wiki"]["id"].as_str() { + if let Some(page_path) = item["path"].as_str() { + fetch_wiki_page(org_url, wiki_id, page_path, &cookie_header) + .await + .ok() + } else { + None + } } else { None - } - } else { - None - }; + }; - results.push(SearchResult { - title, - url, - excerpt, - content, - source: "Azure DevOps".to_string(), - }); + all_results.push(SearchResult { + title, + url, + excerpt, + content, + source: "Azure DevOps".to_string(), + }); + } } } - Ok(results) + all_results.sort_by(|a, b| a.url.cmp(&b.url)); + all_results.dedup_by(|a, b| a.url == b.url); + + Ok(all_results) } /// Fetch full wiki page content @@ -151,55 +161,64 @@ pub async fn search_work_items( let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies); let client = reqwest::Client::new(); - // Use WIQL (Work Item Query Language) - let wiql_url = format!( - "{}/_apis/wit/wiql?api-version=7.0", - org_url.trim_end_matches('/') - ); + let expanded_queries = expand_query(query); - let wiql_query = format!( - "SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{project}' AND ([System.Title] CONTAINS '{query}' OR [System.Description] CONTAINS '{query}') ORDER BY [System.ChangedDate] DESC" - ); + let mut all_results = Vec::new(); - let wiql_body = serde_json::json!({ - "query": wiql_query - }); + for expanded_query in expanded_queries.iter().take(3) { + // Use WIQL (Work Item Query Language) + let wiql_url = format!( + "{}/_apis/wit/wiql?api-version=7.0", + org_url.trim_end_matches('/') + ); - tracing::info!("Searching Azure DevOps work items"); + let wiql_query = format!( + "SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{project}' AND ([System.Title] CONTAINS '{expanded_query}' OR [System.Description] CONTAINS '{expanded_query}') ORDER BY [System.ChangedDate] DESC" + ); - let resp = client - .post(&wiql_url) - .header("Cookie", &cookie_header) - .header("Accept", "application/json") - .header("Content-Type", "application/json") - .json(&wiql_body) - .send() - .await - .map_err(|e| format!("ADO work item search failed: {e}"))?; + let wiql_body = serde_json::json!({ + "query": wiql_query + }); - if !resp.status().is_success() { - return Ok(Vec::new()); // Don't fail if work item search fails - } + tracing::info!("Searching Azure DevOps work items with expanded query"); - let json: serde_json::Value = resp - .json() - .await - .map_err(|_| "Failed to parse work item response".to_string())?; + let resp = client + .post(&wiql_url) + .header("Cookie", &cookie_header) + .header("Accept", "application/json") + .header("Content-Type", "application/json") + .json(&wiql_body) + .send() + .await + .map_err(|e| format!("ADO work item search failed: {e}"))?; - let mut results = Vec::new(); + if !resp.status().is_success() { + continue; // Don't fail if work item search fails + } - if let Some(work_items) = json["workItems"].as_array() { - // Fetch details for top 3 work items - for item in work_items.iter().take(3) { - if let Some(id) = item["id"].as_i64() { - if let Ok(work_item) = fetch_work_item_details(org_url, id, &cookie_header).await { - results.push(work_item); + let json: serde_json::Value = resp + .json() + .await + .map_err(|_| "Failed to parse work item response".to_string())?; + + if let Some(work_items) = json["workItems"].as_array() { + // Fetch details for top 3 work items + for item in work_items.iter().take(3) { + if let Some(id) = item["id"].as_i64() { + if let Ok(work_item) = + fetch_work_item_details(org_url, id, &cookie_header).await + { + all_results.push(work_item); + } } } } } - Ok(results) + all_results.sort_by(|a, b| a.url.cmp(&b.url)); + all_results.dedup_by(|a, b| a.url == b.url); + + Ok(all_results) } /// Fetch work item details diff --git a/src-tauri/src/integrations/confluence_search.rs b/src-tauri/src/integrations/confluence_search.rs index 1d814e2f..e3874156 100644 --- a/src-tauri/src/integrations/confluence_search.rs +++ b/src-tauri/src/integrations/confluence_search.rs @@ -1,15 +1,20 @@ use serde::{Deserialize, Serialize}; +use super::query_expansion::expand_query; + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SearchResult { pub title: String, pub url: String, pub excerpt: String, pub content: Option, - pub source: String, // "confluence", "servicenow", "azuredevops" + pub source: String, } /// Search Confluence for content matching the query +/// +/// This function expands the user query with related terms, synonyms, and variations +/// to improve search coverage across Confluence spaces. pub async fn search_confluence( base_url: &str, query: &str, @@ -18,86 +23,87 @@ pub async fn search_confluence( let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies); let client = reqwest::Client::new(); - // Use Confluence CQL search - let search_url = format!( - "{}/rest/api/search?cql=text~\"{}\"&limit=5", - base_url.trim_end_matches('/'), - urlencoding::encode(query) - ); + let expanded_queries = expand_query(query); - tracing::info!("Searching Confluence: {}", search_url); + let mut all_results = Vec::new(); - let resp = client - .get(&search_url) - .header("Cookie", &cookie_header) - .header("Accept", "application/json") - .send() - .await - .map_err(|e| format!("Confluence search request failed: {e}"))?; + for expanded_query in expanded_queries.iter().take(3) { + let search_url = format!( + "{}/rest/api/search?cql=text~\"{}\"&limit=5", + base_url.trim_end_matches('/'), + urlencoding::encode(expanded_query) + ); - if !resp.status().is_success() { - let status = resp.status(); - let text = resp.text().await.unwrap_or_default(); - return Err(format!( - "Confluence search failed with status {status}: {text}" - )); - } + tracing::info!("Searching Confluence with expanded query: {}", search_url); - let json: serde_json::Value = resp - .json() - .await - .map_err(|e| format!("Failed to parse Confluence search response: {e}"))?; + let resp = client + .get(&search_url) + .header("Cookie", &cookie_header) + .header("Accept", "application/json") + .send() + .await + .map_err(|e| format!("Confluence search request failed: {e}"))?; - let mut results = Vec::new(); + if !resp.status().is_success() { + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + tracing::warn!("Confluence search failed with status {status}: {text}"); + continue; + } - if let Some(results_array) = json["results"].as_array() { - for item in results_array.iter().take(3) { - // Take top 3 results - let title = item["title"].as_str().unwrap_or("Untitled").to_string(); + let json: serde_json::Value = resp + .json() + .await + .map_err(|e| format!("Failed to parse Confluence search response: {e}"))?; - let id = item["content"]["id"].as_str(); - let space_key = item["content"]["space"]["key"].as_str(); + if let Some(results_array) = json["results"].as_array() { + for item in results_array.iter().take(3) { + let title = item["title"].as_str().unwrap_or("Untitled").to_string(); - // Build URL - let url = if let (Some(id_str), Some(space)) = (id, space_key) { - format!( - "{}/display/{}/{}", - base_url.trim_end_matches('/'), - space, - id_str - ) - } else { - base_url.to_string() - }; + let id = item["content"]["id"].as_str(); + let space_key = item["content"]["space"]["key"].as_str(); - // Get excerpt from search result - let excerpt = item["excerpt"] - .as_str() - .unwrap_or("") - .to_string() - .replace("", "") - .replace("", ""); + let url = if let (Some(id_str), Some(space)) = (id, space_key) { + format!( + "{}/display/{}/{}", + base_url.trim_end_matches('/'), + space, + id_str + ) + } else { + base_url.to_string() + }; - // Fetch full page content - let content = if let Some(content_id) = id { - fetch_page_content(base_url, content_id, &cookie_header) - .await - .ok() - } else { - None - }; + let excerpt = item["excerpt"] + .as_str() + .unwrap_or("") + .to_string() + .replace("", "") + .replace("", ""); - results.push(SearchResult { - title, - url, - excerpt, - content, - source: "Confluence".to_string(), - }); + let content = if let Some(content_id) = id { + fetch_page_content(base_url, content_id, &cookie_header) + .await + .ok() + } else { + None + }; + + all_results.push(SearchResult { + title, + url, + excerpt, + content, + source: "Confluence".to_string(), + }); + } } } - Ok(results) + all_results.sort_by(|a, b| a.url.cmp(&b.url)); + all_results.dedup_by(|a, b| a.url == b.url); + + Ok(all_results) } /// Fetch full content of a Confluence page diff --git a/src-tauri/src/integrations/mod.rs b/src-tauri/src/integrations/mod.rs index ab81db96..24643f1a 100644 --- a/src-tauri/src/integrations/mod.rs +++ b/src-tauri/src/integrations/mod.rs @@ -4,6 +4,7 @@ pub mod azuredevops_search; pub mod callback_server; pub mod confluence; pub mod confluence_search; +pub mod query_expansion; pub mod servicenow; pub mod servicenow_search; pub mod webview_auth; diff --git a/src-tauri/src/integrations/query_expansion.rs b/src-tauri/src/integrations/query_expansion.rs new file mode 100644 index 00000000..66c13b6b --- /dev/null +++ b/src-tauri/src/integrations/query_expansion.rs @@ -0,0 +1,285 @@ +/// Query expansion module for integration search +/// +/// This module provides functionality to expand user queries with related terms, +/// synonyms, and variations to improve search results across integrations like +/// Confluence, ServiceNow, and Azure DevOps. +use std::collections::HashSet; + +/// Product name synonyms for common product variations +/// Maps common abbreviations/variants to their full names for search expansion +fn get_product_synonyms(query: &str) -> Vec { + let mut synonyms = Vec::new(); + + // VESTA NXT related synonyms + if query.to_lowercase().contains("vesta") || query.to_lowercase().contains("vnxt") { + synonyms.extend(vec![ + "VESTA NXT".to_string(), + "Vesta NXT".to_string(), + "VNXT".to_string(), + "vnxt".to_string(), + "Vesta".to_string(), + "vesta".to_string(), + "VNX".to_string(), + "vnx".to_string(), + ]); + } + + // Version number patterns (e.g., 1.0.12, 1.1.9) + if query.contains('.') { + // Extract version-like patterns and add variations + let version_parts: Vec<&str> = query.split('.').collect(); + if version_parts.len() >= 2 { + // Add variations without dots + let version_no_dots = version_parts.join(""); + synonyms.push(version_no_dots); + + // Add partial versions + if version_parts.len() >= 2 { + synonyms.push(version_parts[0..2].join(".")); + } + if version_parts.len() >= 3 { + synonyms.push(version_parts[0..3].join(".")); + } + } + } + + // Common upgrade-related terms + if query.to_lowercase().contains("upgrade") || query.to_lowercase().contains("update") { + synonyms.extend(vec![ + "upgrade".to_string(), + "update".to_string(), + "migration".to_string(), + "patch".to_string(), + "version".to_string(), + "install".to_string(), + "installation".to_string(), + ]); + } + + // Remove duplicates and empty strings + synonyms.sort(); + synonyms.dedup(); + synonyms.retain(|s| !s.is_empty()); + + synonyms +} + +/// Expand a search query with related terms for better search coverage +/// +/// This function takes a user query and expands it with: +/// - Product name synonyms (e.g., "VNXT" -> "VESTA NXT", "Vesta NXT") +/// - Version number variations +/// - Related terms based on query content +/// +/// # Arguments +/// * `query` - The original user query +/// +/// # Returns +/// A vector of query strings to search, with the original query first +/// followed by expanded variations +pub fn expand_query(query: &str) -> Vec { + let mut expanded = vec![query.to_string()]; + + // Get product synonyms + let product_synonyms = get_product_synonyms(query); + expanded.extend(product_synonyms); + + // Extract keywords from query for additional expansion + let keywords = extract_keywords(query); + + // Add keyword variations + for keyword in keywords.iter().take(5) { + if !expanded.contains(keyword) { + expanded.push(keyword.clone()); + } + } + + // Add common related terms based on query content + let query_lower = query.to_lowercase(); + + if query_lower.contains("confluence") || query_lower.contains("documentation") { + expanded.push("docs".to_string()); + expanded.push("manual".to_string()); + expanded.push("guide".to_string()); + } + + if query_lower.contains("deploy") || query_lower.contains("deployment") { + expanded.push("deploy".to_string()); + expanded.push("deployment".to_string()); + expanded.push("release".to_string()); + expanded.push("build".to_string()); + } + + if query_lower.contains("kubernetes") || query_lower.contains("k8s") { + expanded.push("kubernetes".to_string()); + expanded.push("k8s".to_string()); + expanded.push("pod".to_string()); + expanded.push("container".to_string()); + } + + // Remove duplicates and empty strings + expanded.sort(); + expanded.dedup(); + expanded.retain(|s| !s.is_empty()); + + expanded +} + +/// Extract important keywords from a search query +/// +/// This function removes stop words and extracts meaningful terms +/// for search expansion. +/// +/// # Arguments +/// * `query` - The original user query +/// +/// # Returns +/// A vector of extracted keywords +fn extract_keywords(query: &str) -> Vec { + let stop_words: HashSet<&str> = [ + "how", "do", "i", "the", "a", "an", "is", "are", "was", "were", "be", "been", "being", + "have", "has", "had", "having", "do", "does", "did", "doing", "will", "would", "should", + "could", "can", "may", "might", "must", "to", "from", "in", "on", "at", "by", "for", + "with", "about", "as", "of", "or", "and", "but", "not", "what", "when", "where", "which", + "who", "this", "that", "these", "those", "if", "then", "else", "for", "while", "until", + "against", "between", "into", "through", "during", "before", "after", "above", "below", + "up", "down", "out", "off", "over", "under", "again", "further", "then", "once", "here", + "there", "why", "where", "all", "any", "both", "each", "few", "more", "most", "other", + "some", "such", "no", "nor", "only", "own", "same", "so", "than", "too", "very", "can", + "just", "should", "now", + ] + .into_iter() + .collect(); + + let mut keywords = Vec::new(); + let mut remaining = query.to_string(); + + while !remaining.is_empty() { + // Skip leading whitespace + if remaining.starts_with(char::is_whitespace) { + remaining = remaining.trim_start().to_string(); + continue; + } + + // Try to extract version number (e.g., 1.0.12, 1.1.9) + if remaining.starts_with(|c: char| c.is_ascii_digit()) { + let mut end_pos = 0; + let mut dot_count = 0; + + for (i, c) in remaining.chars().enumerate() { + if c.is_ascii_digit() { + end_pos = i + 1; + } else if c == '.' { + end_pos = i + 1; + dot_count += 1; + } else { + break; + } + } + + // Only extract if we have at least 2 dots (e.g., 1.0.12) + if dot_count >= 2 && end_pos > 0 { + let version = remaining[..end_pos].to_string(); + keywords.push(version.clone()); + remaining = remaining[end_pos..].to_string(); + continue; + } + } + + // Find word boundary - split on whitespace or non-alphanumeric + let mut split_pos = remaining.len(); + for (i, c) in remaining.chars().enumerate() { + if c.is_whitespace() || !c.is_alphanumeric() { + split_pos = i; + break; + } + } + + // If split_pos is 0, the string starts with a non-alphanumeric character + // Skip it and continue + if split_pos == 0 { + remaining = remaining[1..].to_string(); + continue; + } + + let word = remaining[..split_pos].to_lowercase(); + remaining = remaining[split_pos..].to_string(); + + // Skip empty words, single chars, and stop words + if word.is_empty() || word.len() < 2 || stop_words.contains(word.as_str()) { + continue; + } + + // Add numeric words with 3+ digits + if word.chars().all(|c| c.is_ascii_digit()) && word.len() >= 3 { + keywords.push(word.clone()); + continue; + } + + // Add words with at least one alphabetic character + if word.chars().any(|c| c.is_alphabetic()) { + keywords.push(word.clone()); + } + } + + keywords.sort(); + keywords.dedup(); + + keywords +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_expand_query_with_product_synonyms() { + let query = "upgrade vesta nxt to 1.1.9"; + let expanded = expand_query(query); + + // Should contain original query + assert!(expanded.contains(&query.to_string())); + + // Should contain product synonyms + assert!(expanded + .iter() + .any(|s| s.contains("vnxt") || s.contains("vnxt"))); + } + + #[test] + fn test_expand_query_with_version_numbers() { + let query = "version 1.0.12"; + let expanded = expand_query(query); + + // Should contain original query + assert!(expanded.contains(&query.to_string())); + } + + #[test] + fn test_extract_keywords() { + let query = "How do I upgrade VESTA NXT from 1.0.12 to 1.1.9?"; + let keywords = extract_keywords(query); + + assert!(keywords.contains(&"upgrade".to_string())); + assert!(keywords.contains(&"vesta".to_string())); + assert!(keywords.contains(&"nxt".to_string())); + assert!(keywords.contains(&"1.0.12".to_string())); + assert!(keywords.contains(&"1.1.9".to_string())); + } + + #[test] + fn test_product_synonyms() { + let synonyms = get_product_synonyms("vesta nxt upgrade"); + + // Should contain VNXT synonym + assert!(synonyms + .iter() + .any(|s| s.contains("VNXT") || s.contains("vnxt"))); + } + + #[test] + fn test_empty_query() { + let expanded = expand_query(""); + assert!(expanded.is_empty() || expanded.contains(&"".to_string())); + } +} diff --git a/src-tauri/src/integrations/servicenow_search.rs b/src-tauri/src/integrations/servicenow_search.rs index 15e52105..5d1255e6 100644 --- a/src-tauri/src/integrations/servicenow_search.rs +++ b/src-tauri/src/integrations/servicenow_search.rs @@ -1,4 +1,5 @@ use super::confluence_search::SearchResult; +use crate::integrations::query_expansion::expand_query; /// Search ServiceNow Knowledge Base for content matching the query pub async fn search_servicenow( @@ -9,82 +10,88 @@ pub async fn search_servicenow( let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies); let client = reqwest::Client::new(); - // Search Knowledge Base articles - let search_url = format!( - "{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=5", - instance_url.trim_end_matches('/'), - urlencoding::encode(query), - urlencoding::encode(query) - ); + let expanded_queries = expand_query(query); - tracing::info!("Searching ServiceNow: {}", search_url); + let mut all_results = Vec::new(); - let resp = client - .get(&search_url) - .header("Cookie", &cookie_header) - .header("Accept", "application/json") - .send() - .await - .map_err(|e| format!("ServiceNow search request failed: {e}"))?; + for expanded_query in expanded_queries.iter().take(3) { + // Search Knowledge Base articles + let search_url = format!( + "{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=5", + instance_url.trim_end_matches('/'), + urlencoding::encode(expanded_query), + urlencoding::encode(expanded_query) + ); - if !resp.status().is_success() { - let status = resp.status(); - let text = resp.text().await.unwrap_or_default(); - return Err(format!( - "ServiceNow search failed with status {status}: {text}" - )); - } + tracing::info!("Searching ServiceNow with expanded query: {}", search_url); - let json: serde_json::Value = resp - .json() - .await - .map_err(|e| format!("Failed to parse ServiceNow search response: {e}"))?; + let resp = client + .get(&search_url) + .header("Cookie", &cookie_header) + .header("Accept", "application/json") + .send() + .await + .map_err(|e| format!("ServiceNow search request failed: {e}"))?; - let mut results = Vec::new(); + if !resp.status().is_success() { + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + tracing::warn!("ServiceNow search failed with status {status}: {text}"); + continue; + } - if let Some(result_array) = json["result"].as_array() { - for item in result_array.iter().take(3) { - // Take top 3 results - let title = item["short_description"] - .as_str() - .unwrap_or("Untitled") - .to_string(); + let json: serde_json::Value = resp + .json() + .await + .map_err(|e| format!("Failed to parse ServiceNow search response: {e}"))?; - let sys_id = item["sys_id"].as_str().unwrap_or("").to_string(); + if let Some(result_array) = json["result"].as_array() { + for item in result_array.iter().take(3) { + // Take top 3 results + let title = item["short_description"] + .as_str() + .unwrap_or("Untitled") + .to_string(); - let url = format!( - "{}/kb_view.do?sysparm_article={}", - instance_url.trim_end_matches('/'), - sys_id - ); + let sys_id = item["sys_id"].as_str().unwrap_or("").to_string(); - let excerpt = item["text"] - .as_str() - .unwrap_or("") - .chars() - .take(300) - .collect::(); + let url = format!( + "{}/kb_view.do?sysparm_article={}", + instance_url.trim_end_matches('/'), + sys_id + ); - // Get full article content - let content = item["text"].as_str().map(|text| { - if text.len() > 3000 { - format!("{}...", &text[..3000]) - } else { - text.to_string() - } - }); + let excerpt = item["text"] + .as_str() + .unwrap_or("") + .chars() + .take(300) + .collect::(); - results.push(SearchResult { - title, - url, - excerpt, - content, - source: "ServiceNow".to_string(), - }); + // Get full article content + let content = item["text"].as_str().map(|text| { + if text.len() > 3000 { + format!("{}...", &text[..3000]) + } else { + text.to_string() + } + }); + + all_results.push(SearchResult { + title, + url, + excerpt, + content, + source: "ServiceNow".to_string(), + }); + } } } - Ok(results) + all_results.sort_by(|a, b| a.url.cmp(&b.url)); + all_results.dedup_by(|a, b| a.url == b.url); + + Ok(all_results) } /// Search ServiceNow Incidents for related issues @@ -96,68 +103,78 @@ pub async fn search_incidents( let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies); let client = reqwest::Client::new(); - // Search incidents - let search_url = format!( - "{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true", - instance_url.trim_end_matches('/'), - urlencoding::encode(query), - urlencoding::encode(query) - ); + let expanded_queries = expand_query(query); - tracing::info!("Searching ServiceNow incidents: {}", search_url); + let mut all_results = Vec::new(); - let resp = client - .get(&search_url) - .header("Cookie", &cookie_header) - .header("Accept", "application/json") - .send() - .await - .map_err(|e| format!("ServiceNow incident search failed: {e}"))?; + for expanded_query in expanded_queries.iter().take(3) { + // Search incidents + let search_url = format!( + "{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true", + instance_url.trim_end_matches('/'), + urlencoding::encode(expanded_query), + urlencoding::encode(expanded_query) + ); - if !resp.status().is_success() { - return Ok(Vec::new()); // Don't fail if incident search fails - } + tracing::info!( + "Searching ServiceNow incidents with expanded query: {}", + search_url + ); - let json: serde_json::Value = resp - .json() - .await - .map_err(|_| "Failed to parse incident response".to_string())?; + let resp = client + .get(&search_url) + .header("Cookie", &cookie_header) + .header("Accept", "application/json") + .send() + .await + .map_err(|e| format!("ServiceNow incident search failed: {e}"))?; - let mut results = Vec::new(); + if !resp.status().is_success() { + continue; // Don't fail if incident search fails + } - if let Some(result_array) = json["result"].as_array() { - for item in result_array.iter() { - let number = item["number"].as_str().unwrap_or("Unknown"); - let title = format!( - "Incident {}: {}", - number, - item["short_description"].as_str().unwrap_or("No title") - ); + let json: serde_json::Value = resp + .json() + .await + .map_err(|_| "Failed to parse incident response".to_string())?; - let sys_id = item["sys_id"].as_str().unwrap_or(""); - let url = format!( - "{}/incident.do?sys_id={}", - instance_url.trim_end_matches('/'), - sys_id - ); + if let Some(result_array) = json["result"].as_array() { + for item in result_array.iter() { + let number = item["number"].as_str().unwrap_or("Unknown"); + let title = format!( + "Incident {}: {}", + number, + item["short_description"].as_str().unwrap_or("No title") + ); - let description = item["description"].as_str().unwrap_or("").to_string(); + let sys_id = item["sys_id"].as_str().unwrap_or(""); + let url = format!( + "{}/incident.do?sys_id={}", + instance_url.trim_end_matches('/'), + sys_id + ); - let resolution = item["close_notes"].as_str().unwrap_or("").to_string(); + let description = item["description"].as_str().unwrap_or("").to_string(); - let content = format!("Description: {description}\nResolution: {resolution}"); + let resolution = item["close_notes"].as_str().unwrap_or("").to_string(); - let excerpt = content.chars().take(200).collect::(); + let content = format!("Description: {description}\nResolution: {resolution}"); - results.push(SearchResult { - title, - url, - excerpt, - content: Some(content), - source: "ServiceNow".to_string(), - }); + let excerpt = content.chars().take(200).collect::(); + + all_results.push(SearchResult { + title, + url, + excerpt, + content: Some(content), + source: "ServiceNow".to_string(), + }); + } } } - Ok(results) + all_results.sort_by(|a, b| a.url.cmp(&b.url)); + all_results.dedup_by(|a, b| a.url == b.url); + + Ok(all_results) } diff --git a/src-tauri/src/integrations/webview_fetch.rs b/src-tauri/src/integrations/webview_fetch.rs index 81a89d41..90e1f577 100644 --- a/src-tauri/src/integrations/webview_fetch.rs +++ b/src-tauri/src/integrations/webview_fetch.rs @@ -6,6 +6,7 @@ use serde_json::Value; use tauri::WebviewWindow; use super::confluence_search::SearchResult; +use crate::integrations::query_expansion::expand_query; /// Execute an HTTP request from within the webview context /// This automatically includes all cookies (including HttpOnly) from the authenticated session @@ -123,106 +124,113 @@ pub async fn search_confluence_webview( base_url: &str, query: &str, ) -> Result, String> { - // Extract keywords from the query for better search - // Remove common words and extract important terms - let keywords = extract_keywords(query); + let expanded_queries = expand_query(query); - // Build CQL query with OR logic for keywords - let cql = if keywords.len() > 1 { - // Multiple keywords - search for any of them - let keyword_conditions: Vec = - keywords.iter().map(|k| format!("text ~ \"{k}\"")).collect(); - keyword_conditions.join(" OR ") - } else if !keywords.is_empty() { - // Single keyword - let keyword = &keywords[0]; - format!("text ~ \"{keyword}\"") - } else { - // Fallback to original query - format!("text ~ \"{query}\"") - }; + let mut all_results = Vec::new(); - let search_url = format!( - "{}/rest/api/search?cql={}&limit=10", - base_url.trim_end_matches('/'), - urlencoding::encode(&cql) - ); + for expanded_query in expanded_queries.iter().take(3) { + // Extract keywords from the query for better search + // Remove common words and extract important terms + let keywords = extract_keywords(expanded_query); - tracing::info!("Executing Confluence search via webview with CQL: {}", cql); + // Build CQL query with OR logic for keywords + let cql = if keywords.len() > 1 { + // Multiple keywords - search for any of them + let keyword_conditions: Vec = + keywords.iter().map(|k| format!("text ~ \"{k}\"")).collect(); + keyword_conditions.join(" OR ") + } else if !keywords.is_empty() { + // Single keyword + let keyword = &keywords[0]; + format!("text ~ \"{keyword}\"") + } else { + // Fallback to expanded query + format!("text ~ \"{expanded_query}\"") + }; - let response = fetch_from_webview(webview_window, &search_url, "GET", None).await?; + let search_url = format!( + "{}/rest/api/search?cql={}&limit=10", + base_url.trim_end_matches('/'), + urlencoding::encode(&cql) + ); - let mut results = Vec::new(); + tracing::info!("Executing Confluence search via webview with CQL: {}", cql); - if let Some(results_array) = response.get("results").and_then(|v| v.as_array()) { - for item in results_array.iter().take(5) { - let title = item["title"].as_str().unwrap_or("Untitled").to_string(); - let content_id = item["content"]["id"].as_str(); - let space_key = item["content"]["space"]["key"].as_str(); + let response = fetch_from_webview(webview_window, &search_url, "GET", None).await?; - let url = if let (Some(id), Some(space)) = (content_id, space_key) { - format!( - "{}/display/{}/{}", - base_url.trim_end_matches('/'), - space, - id - ) - } else { - base_url.to_string() - }; + if let Some(results_array) = response.get("results").and_then(|v| v.as_array()) { + for item in results_array.iter().take(5) { + let title = item["title"].as_str().unwrap_or("Untitled").to_string(); + let content_id = item["content"]["id"].as_str(); + let space_key = item["content"]["space"]["key"].as_str(); - let excerpt = item["excerpt"] - .as_str() - .unwrap_or("") - .replace("", "") - .replace("", ""); + let url = if let (Some(id), Some(space)) = (content_id, space_key) { + format!( + "{}/display/{}/{}", + base_url.trim_end_matches('/'), + space, + id + ) + } else { + base_url.to_string() + }; - // Fetch full page content - let content = if let Some(id) = content_id { - let content_url = format!( - "{}/rest/api/content/{id}?expand=body.storage", - base_url.trim_end_matches('/') - ); - if let Ok(content_resp) = - fetch_from_webview(webview_window, &content_url, "GET", None).await - { - if let Some(body) = content_resp - .get("body") - .and_then(|b| b.get("storage")) - .and_then(|s| s.get("value")) - .and_then(|v| v.as_str()) + let excerpt = item["excerpt"] + .as_str() + .unwrap_or("") + .replace("", "") + .replace("", ""); + + // Fetch full page content + let content = if let Some(id) = content_id { + let content_url = format!( + "{}/rest/api/content/{id}?expand=body.storage", + base_url.trim_end_matches('/') + ); + if let Ok(content_resp) = + fetch_from_webview(webview_window, &content_url, "GET", None).await { - let text = strip_html_simple(body); - Some(if text.len() > 3000 { - format!("{}...", &text[..3000]) + if let Some(body) = content_resp + .get("body") + .and_then(|b| b.get("storage")) + .and_then(|s| s.get("value")) + .and_then(|v| v.as_str()) + { + let text = strip_html_simple(body); + Some(if text.len() > 3000 { + format!("{}...", &text[..3000]) + } else { + text + }) } else { - text - }) + None + } } else { None } } else { None - } - } else { - None - }; + }; - results.push(SearchResult { - title, - url, - excerpt: excerpt.chars().take(300).collect(), - content, - source: "Confluence".to_string(), - }); + all_results.push(SearchResult { + title, + url, + excerpt: excerpt.chars().take(300).collect(), + content, + source: "Confluence".to_string(), + }); + } } } + all_results.sort_by(|a, b| a.url.cmp(&b.url)); + all_results.dedup_by(|a, b| a.url == b.url); + tracing::info!( "Confluence webview search returned {} results", - results.len() + all_results.len() ); - Ok(results) + Ok(all_results) } /// Extract keywords from a search query @@ -296,92 +304,99 @@ pub async fn search_servicenow_webview( instance_url: &str, query: &str, ) -> Result, String> { - let mut results = Vec::new(); + let expanded_queries = expand_query(query); - // Search knowledge base - let kb_url = format!( - "{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=3", - instance_url.trim_end_matches('/'), - urlencoding::encode(query), - urlencoding::encode(query) - ); + let mut all_results = Vec::new(); - tracing::info!("Executing ServiceNow KB search via webview"); + for expanded_query in expanded_queries.iter().take(3) { + // Search knowledge base + let kb_url = format!( + "{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=3", + instance_url.trim_end_matches('/'), + urlencoding::encode(expanded_query), + urlencoding::encode(expanded_query) + ); - if let Ok(kb_response) = fetch_from_webview(webview_window, &kb_url, "GET", None).await { - if let Some(kb_array) = kb_response.get("result").and_then(|v| v.as_array()) { - for item in kb_array { - let title = item["short_description"] - .as_str() - .unwrap_or("Untitled") - .to_string(); - let sys_id = item["sys_id"].as_str().unwrap_or(""); - let url = format!( - "{}/kb_view.do?sysparm_article={sys_id}", - instance_url.trim_end_matches('/') - ); - let text = item["text"].as_str().unwrap_or(""); - let excerpt = text.chars().take(300).collect(); - let content = Some(if text.len() > 3000 { - format!("{}...", &text[..3000]) - } else { - text.to_string() - }); + tracing::info!("Executing ServiceNow KB search via webview with expanded query"); - results.push(SearchResult { - title, - url, - excerpt, - content, - source: "ServiceNow".to_string(), - }); + if let Ok(kb_response) = fetch_from_webview(webview_window, &kb_url, "GET", None).await { + if let Some(kb_array) = kb_response.get("result").and_then(|v| v.as_array()) { + for item in kb_array { + let title = item["short_description"] + .as_str() + .unwrap_or("Untitled") + .to_string(); + let sys_id = item["sys_id"].as_str().unwrap_or(""); + let url = format!( + "{}/kb_view.do?sysparm_article={sys_id}", + instance_url.trim_end_matches('/') + ); + let text = item["text"].as_str().unwrap_or(""); + let excerpt = text.chars().take(300).collect(); + let content = Some(if text.len() > 3000 { + format!("{}...", &text[..3000]) + } else { + text.to_string() + }); + + all_results.push(SearchResult { + title, + url, + excerpt, + content, + source: "ServiceNow".to_string(), + }); + } + } + } + + // Search incidents + let inc_url = format!( + "{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true", + instance_url.trim_end_matches('/'), + urlencoding::encode(expanded_query), + urlencoding::encode(expanded_query) + ); + + if let Ok(inc_response) = fetch_from_webview(webview_window, &inc_url, "GET", None).await { + if let Some(inc_array) = inc_response.get("result").and_then(|v| v.as_array()) { + for item in inc_array { + let number = item["number"].as_str().unwrap_or("Unknown"); + let title = format!( + "Incident {}: {}", + number, + item["short_description"].as_str().unwrap_or("No title") + ); + let sys_id = item["sys_id"].as_str().unwrap_or(""); + let url = format!( + "{}/incident.do?sys_id={sys_id}", + instance_url.trim_end_matches('/') + ); + let description = item["description"].as_str().unwrap_or(""); + let resolution = item["close_notes"].as_str().unwrap_or(""); + let content = format!("Description: {description}\nResolution: {resolution}"); + let excerpt = content.chars().take(200).collect(); + + all_results.push(SearchResult { + title, + url, + excerpt, + content: Some(content), + source: "ServiceNow".to_string(), + }); + } } } } - // Search incidents - let inc_url = format!( - "{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true", - instance_url.trim_end_matches('/'), - urlencoding::encode(query), - urlencoding::encode(query) - ); - - if let Ok(inc_response) = fetch_from_webview(webview_window, &inc_url, "GET", None).await { - if let Some(inc_array) = inc_response.get("result").and_then(|v| v.as_array()) { - for item in inc_array { - let number = item["number"].as_str().unwrap_or("Unknown"); - let title = format!( - "Incident {}: {}", - number, - item["short_description"].as_str().unwrap_or("No title") - ); - let sys_id = item["sys_id"].as_str().unwrap_or(""); - let url = format!( - "{}/incident.do?sys_id={sys_id}", - instance_url.trim_end_matches('/') - ); - let description = item["description"].as_str().unwrap_or(""); - let resolution = item["close_notes"].as_str().unwrap_or(""); - let content = format!("Description: {description}\nResolution: {resolution}"); - let excerpt = content.chars().take(200).collect(); - - results.push(SearchResult { - title, - url, - excerpt, - content: Some(content), - source: "ServiceNow".to_string(), - }); - } - } - } + all_results.sort_by(|a, b| a.url.cmp(&b.url)); + all_results.dedup_by(|a, b| a.url == b.url); tracing::info!( "ServiceNow webview search returned {} results", - results.len() + all_results.len() ); - Ok(results) + Ok(all_results) } /// Search Azure DevOps wiki using webview fetch @@ -391,82 +406,89 @@ pub async fn search_azuredevops_wiki_webview( project: &str, query: &str, ) -> Result, String> { - // Extract keywords for better search - let keywords = extract_keywords(query); + let expanded_queries = expand_query(query); - let search_text = if !keywords.is_empty() { - keywords.join(" ") - } else { - query.to_string() - }; + let mut all_results = Vec::new(); - // Azure DevOps wiki search API - let search_url = format!( - "{}/{}/_apis/wiki/wikis?api-version=7.0", - org_url.trim_end_matches('/'), - urlencoding::encode(project) - ); + for expanded_query in expanded_queries.iter().take(3) { + // Extract keywords for better search + let keywords = extract_keywords(expanded_query); - tracing::info!( - "Executing Azure DevOps wiki search via webview for: {}", - search_text - ); + let search_text = if !keywords.is_empty() { + keywords.join(" ") + } else { + expanded_query.clone() + }; - // First, get list of wikis - let wikis_response = fetch_from_webview(webview_window, &search_url, "GET", None).await?; + // Azure DevOps wiki search API + let search_url = format!( + "{}/{}/_apis/wiki/wikis?api-version=7.0", + org_url.trim_end_matches('/'), + urlencoding::encode(project) + ); - let mut results = Vec::new(); + tracing::info!( + "Executing Azure DevOps wiki search via webview for: {}", + search_text + ); - if let Some(wikis_array) = wikis_response.get("value").and_then(|v| v.as_array()) { - // Search each wiki - for wiki in wikis_array.iter().take(3) { - let wiki_id = wiki["id"].as_str().unwrap_or(""); + // First, get list of wikis + let wikis_response = fetch_from_webview(webview_window, &search_url, "GET", None).await?; - if wiki_id.is_empty() { - continue; - } + if let Some(wikis_array) = wikis_response.get("value").and_then(|v| v.as_array()) { + // Search each wiki + for wiki in wikis_array.iter().take(3) { + let wiki_id = wiki["id"].as_str().unwrap_or(""); - // Search wiki pages - let pages_url = format!( - "{}/{}/_apis/wiki/wikis/{}/pages?recursionLevel=Full&includeContent=true&api-version=7.0", - org_url.trim_end_matches('/'), - urlencoding::encode(project), - urlencoding::encode(wiki_id) - ); + if wiki_id.is_empty() { + continue; + } - if let Ok(pages_response) = - fetch_from_webview(webview_window, &pages_url, "GET", None).await - { - // Try to get "page" field, or use the response itself if it's the page object - if let Some(page) = pages_response.get("page") { - search_page_recursive( - page, - &search_text, - org_url, - project, - wiki_id, - &mut results, - ); - } else { - // Response might be the page object itself - search_page_recursive( - &pages_response, - &search_text, - org_url, - project, - wiki_id, - &mut results, - ); + // Search wiki pages + let pages_url = format!( + "{}/{}/_apis/wiki/wikis/{}/pages?recursionLevel=Full&includeContent=true&api-version=7.0", + org_url.trim_end_matches('/'), + urlencoding::encode(project), + urlencoding::encode(wiki_id) + ); + + if let Ok(pages_response) = + fetch_from_webview(webview_window, &pages_url, "GET", None).await + { + // Try to get "page" field, or use the response itself if it's the page object + if let Some(page) = pages_response.get("page") { + search_page_recursive( + page, + &search_text, + org_url, + project, + wiki_id, + &mut all_results, + ); + } else { + // Response might be the page object itself + search_page_recursive( + &pages_response, + &search_text, + org_url, + project, + wiki_id, + &mut all_results, + ); + } } } } } + all_results.sort_by(|a, b| a.url.cmp(&b.url)); + all_results.dedup_by(|a, b| a.url == b.url); + tracing::info!( "Azure DevOps wiki webview search returned {} results", - results.len() + all_results.len() ); - Ok(results) + Ok(all_results) } /// Recursively search through wiki pages for matching content @@ -544,115 +566,124 @@ pub async fn search_azuredevops_workitems_webview( project: &str, query: &str, ) -> Result, String> { - // Extract keywords - let keywords = extract_keywords(query); + let expanded_queries = expand_query(query); - // Check if query contains a work item ID (pure number) - let work_item_id: Option = keywords - .iter() - .filter(|k| k.chars().all(|c| c.is_numeric())) - .filter_map(|k| k.parse::().ok()) - .next(); + let mut all_results = Vec::new(); - // Build WIQL query - let wiql_query = if let Some(id) = work_item_id { - // Search by specific ID - format!( - "SELECT [System.Id], [System.Title], [System.Description], [System.WorkItemType] \ - FROM WorkItems WHERE [System.Id] = {id}" - ) - } else { - // Search by text in title/description - let search_terms = if !keywords.is_empty() { - keywords.join(" ") + for expanded_query in expanded_queries.iter().take(3) { + // Extract keywords + let keywords = extract_keywords(expanded_query); + + // Check if query contains a work item ID (pure number) + let work_item_id: Option = keywords + .iter() + .filter(|k| k.chars().all(|c| c.is_numeric())) + .filter_map(|k| k.parse::().ok()) + .next(); + + // Build WIQL query + let wiql_query = if let Some(id) = work_item_id { + // Search by specific ID + format!( + "SELECT [System.Id], [System.Title], [System.Description], [System.WorkItemType] \ + FROM WorkItems WHERE [System.Id] = {id}" + ) } else { - query.to_string() + // Search by text in title/description + let search_terms = if !keywords.is_empty() { + keywords.join(" ") + } else { + expanded_query.clone() + }; + + // Use CONTAINS for text search (case-insensitive) + format!( + "SELECT [System.Id], [System.Title], [System.Description], [System.WorkItemType] \ + FROM WorkItems WHERE [System.TeamProject] = '{project}' \ + AND ([System.Title] CONTAINS '{search_terms}' OR [System.Description] CONTAINS '{search_terms}') \ + ORDER BY [System.ChangedDate] DESC" + ) }; - // Use CONTAINS for text search (case-insensitive) - format!( - "SELECT [System.Id], [System.Title], [System.Description], [System.WorkItemType] \ - FROM WorkItems WHERE [System.TeamProject] = '{project}' \ - AND ([System.Title] CONTAINS '{search_terms}' OR [System.Description] CONTAINS '{search_terms}') \ - ORDER BY [System.ChangedDate] DESC" - ) - }; + let wiql_url = format!( + "{}/{}/_apis/wit/wiql?api-version=7.0", + org_url.trim_end_matches('/'), + urlencoding::encode(project) + ); - let wiql_url = format!( - "{}/{}/_apis/wit/wiql?api-version=7.0", - org_url.trim_end_matches('/'), - urlencoding::encode(project) - ); + let body = serde_json::json!({ + "query": wiql_query + }) + .to_string(); - let body = serde_json::json!({ - "query": wiql_query - }) - .to_string(); + tracing::info!("Executing Azure DevOps work item search via webview"); + tracing::debug!("WIQL query: {}", wiql_query); + tracing::debug!("Request URL: {}", wiql_url); - tracing::info!("Executing Azure DevOps work item search via webview"); - tracing::debug!("WIQL query: {}", wiql_query); - tracing::debug!("Request URL: {}", wiql_url); + let wiql_response = + fetch_from_webview(webview_window, &wiql_url, "POST", Some(&body)).await?; - let wiql_response = fetch_from_webview(webview_window, &wiql_url, "POST", Some(&body)).await?; + if let Some(work_items) = wiql_response.get("workItems").and_then(|v| v.as_array()) { + // Fetch details for first 5 work items + for item in work_items.iter().take(5) { + if let Some(id) = item.get("id").and_then(|i| i.as_i64()) { + let details_url = format!( + "{}/_apis/wit/workitems/{}?api-version=7.0", + org_url.trim_end_matches('/'), + id + ); - let mut results = Vec::new(); + if let Ok(details) = + fetch_from_webview(webview_window, &details_url, "GET", None).await + { + if let Some(fields) = details.get("fields") { + let title = fields + .get("System.Title") + .and_then(|t| t.as_str()) + .unwrap_or("Untitled"); + let work_item_type = fields + .get("System.WorkItemType") + .and_then(|t| t.as_str()) + .unwrap_or("Item"); + let description = fields + .get("System.Description") + .and_then(|d| d.as_str()) + .unwrap_or(""); - if let Some(work_items) = wiql_response.get("workItems").and_then(|v| v.as_array()) { - // Fetch details for first 5 work items - for item in work_items.iter().take(5) { - if let Some(id) = item.get("id").and_then(|i| i.as_i64()) { - let details_url = format!( - "{}/_apis/wit/workitems/{}?api-version=7.0", - org_url.trim_end_matches('/'), - id - ); + let clean_description = strip_html_simple(description); + let excerpt = clean_description.chars().take(200).collect(); - if let Ok(details) = - fetch_from_webview(webview_window, &details_url, "GET", None).await - { - if let Some(fields) = details.get("fields") { - let title = fields - .get("System.Title") - .and_then(|t| t.as_str()) - .unwrap_or("Untitled"); - let work_item_type = fields - .get("System.WorkItemType") - .and_then(|t| t.as_str()) - .unwrap_or("Item"); - let description = fields - .get("System.Description") - .and_then(|d| d.as_str()) - .unwrap_or(""); + let url = + format!("{}/_workitems/edit/{id}", org_url.trim_end_matches('/')); - let clean_description = strip_html_simple(description); - let excerpt = clean_description.chars().take(200).collect(); + let full_content = if clean_description.len() > 3000 { + format!("{}...", &clean_description[..3000]) + } else { + clean_description.clone() + }; - let url = format!("{}/_workitems/edit/{id}", org_url.trim_end_matches('/')); - - let full_content = if clean_description.len() > 3000 { - format!("{}...", &clean_description[..3000]) - } else { - clean_description.clone() - }; - - results.push(SearchResult { - title: format!("{work_item_type} #{id}: {title}"), - url, - excerpt, - content: Some(full_content), - source: "Azure DevOps".to_string(), - }); + all_results.push(SearchResult { + title: format!("{work_item_type} #{id}: {title}"), + url, + excerpt, + content: Some(full_content), + source: "Azure DevOps".to_string(), + }); + } } } } } } + all_results.sort_by(|a, b| a.url.cmp(&b.url)); + all_results.dedup_by(|a, b| a.url == b.url); + tracing::info!( "Azure DevOps work items webview search returned {} results", - results.len() + all_results.len() ); - Ok(results) + Ok(all_results) } /// Add a comment to an Azure DevOps work item