diff --git a/src-tauri/src/commands/ai.rs b/src-tauri/src/commands/ai.rs index f89bc37d..9becd0eb 100644 --- a/src-tauri/src/commands/ai.rs +++ b/src-tauri/src/commands/ai.rs @@ -239,15 +239,14 @@ pub async fn chat_message( let context_message = Message { role: "system".into(), content: format!( - "INTERNAL DOCUMENTATION SOURCES:\n\n{}\n\n\ + "INTERNAL DOCUMENTATION SOURCES:\n\n{integration_context}\n\n\ Instructions: The above content is from internal company documentation systems \ (Confluence, ServiceNow, Azure DevOps). \ \n\n**IMPORTANT**: First determine if this documentation is RELEVANT to the user's question:\ \n- If the documentation directly addresses the question → Use it and cite sources with URLs\ \n- If the documentation is tangentially related but doesn't answer the question → Briefly mention what internal docs exist, then provide a complete answer using general knowledge\ \n- If the documentation is completely unrelated → Ignore it and answer using general knowledge\ - \n\nDo NOT force irrelevant internal documentation into your answer. The user needs accurate information, not forced citations.", - integration_context + \n\nDo NOT force irrelevant internal documentation into your answer. The user needs accurate information, not forced citations." ), tool_call_id: None, tool_calls: None, @@ -280,7 +279,7 @@ pub async fn chat_message( .chat(messages.clone(), &provider_config, tools.clone()) .await .map_err(|e| { - let error_msg = format!("AI provider request failed: {}", e); + let error_msg = format!("AI provider request failed: {e}"); warn!("{}", error_msg); error_msg })?; @@ -298,7 +297,7 @@ pub async fn chat_message( // Format result let result_content = match tool_result { Ok(result) => result, - Err(e) => format!("Error executing tool: {}", e), + Err(e) => format!("Error executing tool: {e}"), }; // Add tool result as a message @@ -795,7 +794,7 @@ async fn search_integration_sources( context.push_str(&format!("URL: {}\n", result.url)); if let Some(content) = &result.content { - context.push_str(&format!("Content:\n{}\n\n", content)); + context.push_str(&format!("Content:\n{content}\n\n")); } else { context.push_str(&format!("Excerpt: {}\n\n", result.excerpt)); } @@ -818,7 +817,7 @@ async fn execute_tool_call( "add_ado_comment" => { // Parse arguments let args: serde_json::Value = serde_json::from_str(&tool_call.arguments) - .map_err(|e| format!("Failed to parse tool arguments: {}", e))?; + .map_err(|e| format!("Failed to parse tool arguments: {e}"))?; let work_item_id = args .get("work_item_id") diff --git a/src-tauri/src/commands/system.rs b/src-tauri/src/commands/system.rs index ead2f9a4..40cd21e5 100644 --- a/src-tauri/src/commands/system.rs +++ b/src-tauri/src/commands/system.rs @@ -176,7 +176,7 @@ pub async fn save_ai_provider( provider.user_id, ], ) - .map_err(|e| format!("Failed to save AI provider: {}", e))?; + .map_err(|e| format!("Failed to save AI provider: {e}"))?; Ok(()) } @@ -267,7 +267,7 @@ pub async fn delete_ai_provider( let db = state.db.lock().map_err(|e| e.to_string())?; db.execute("DELETE FROM ai_providers WHERE name = ?1", [&name]) - .map_err(|e| format!("Failed to delete AI provider: {}", e))?; + .map_err(|e| format!("Failed to delete AI provider: {e}"))?; Ok(()) } diff --git a/src-tauri/src/integrations/azuredevops_search.rs b/src-tauri/src/integrations/azuredevops_search.rs index ec534eb4..9aabced4 100644 --- a/src-tauri/src/integrations/azuredevops_search.rs +++ b/src-tauri/src/integrations/azuredevops_search.rs @@ -34,21 +34,20 @@ pub async fn search_wiki( .json(&search_body) .send() .await - .map_err(|e| format!("Azure DevOps wiki search failed: {}", e))?; + .map_err(|e| format!("Azure DevOps wiki search failed: {e}"))?; if !resp.status().is_success() { let status = resp.status(); let text = resp.text().await.unwrap_or_default(); return Err(format!( - "Azure DevOps wiki search failed with status {}: {}", - status, text + "Azure DevOps wiki search failed with status {status}: {text}" )); } let json: serde_json::Value = resp .json() .await - .map_err(|e| format!("Failed to parse ADO wiki search response: {}", e))?; + .map_err(|e| format!("Failed to parse ADO wiki search response: {e}"))?; let mut results = Vec::new(); @@ -118,16 +117,17 @@ async fn fetch_wiki_page( .header("Accept", "application/json") .send() .await - .map_err(|e| format!("Failed to fetch wiki page: {}", e))?; + .map_err(|e| format!("Failed to fetch wiki page: {e}"))?; if !resp.status().is_success() { - return Err(format!("Failed to fetch wiki page: {}", resp.status())); + let status = resp.status(); + return Err(format!("Failed to fetch wiki page: {status}")); } let json: serde_json::Value = resp .json() .await - .map_err(|e| format!("Failed to parse wiki page: {}", e))?; + .map_err(|e| format!("Failed to parse wiki page: {e}"))?; let content = json["content"].as_str().unwrap_or("").to_string(); @@ -158,8 +158,7 @@ pub async fn search_work_items( ); let wiql_query = format!( - "SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{}' AND ([System.Title] CONTAINS '{}' OR [System.Description] CONTAINS '{}') ORDER BY [System.ChangedDate] DESC", - project, query, query + "SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{project}' AND ([System.Title] CONTAINS '{query}' OR [System.Description] CONTAINS '{query}') ORDER BY [System.ChangedDate] DESC" ); let wiql_body = serde_json::json!({ @@ -176,7 +175,7 @@ pub async fn search_work_items( .json(&wiql_body) .send() .await - .map_err(|e| format!("ADO work item search failed: {}", e))?; + .map_err(|e| format!("ADO work item search failed: {e}"))?; if !resp.status().is_success() { return Ok(Vec::new()); // Don't fail if work item search fails @@ -222,16 +221,17 @@ async fn fetch_work_item_details( .header("Accept", "application/json") .send() .await - .map_err(|e| format!("Failed to fetch work item: {}", e))?; + .map_err(|e| format!("Failed to fetch work item: {e}"))?; if !resp.status().is_success() { - return Err(format!("Failed to fetch work item: {}", resp.status())); + let status = resp.status(); + return Err(format!("Failed to fetch work item: {status}")); } let json: serde_json::Value = resp .json() .await - .map_err(|e| format!("Failed to parse work item: {}", e))?; + .map_err(|e| format!("Failed to parse work item: {e}"))?; let fields = &json["fields"]; let title = format!( @@ -251,7 +251,7 @@ async fn fetch_work_item_details( .to_string(); let state = fields["System.State"].as_str().unwrap_or("Unknown"); - let content = format!("State: {}\n\nDescription: {}", state, description); + let content = format!("State: {state}\n\nDescription: {description}"); let excerpt = content.chars().take(200).collect::(); diff --git a/src-tauri/src/integrations/confluence_search.rs b/src-tauri/src/integrations/confluence_search.rs index ce8834e0..1d814e2f 100644 --- a/src-tauri/src/integrations/confluence_search.rs +++ b/src-tauri/src/integrations/confluence_search.rs @@ -33,21 +33,20 @@ pub async fn search_confluence( .header("Accept", "application/json") .send() .await - .map_err(|e| format!("Confluence search request failed: {}", e))?; + .map_err(|e| format!("Confluence search request failed: {e}"))?; if !resp.status().is_success() { let status = resp.status(); let text = resp.text().await.unwrap_or_default(); return Err(format!( - "Confluence search failed with status {}: {}", - status, text + "Confluence search failed with status {status}: {text}" )); } let json: serde_json::Value = resp .json() .await - .map_err(|e| format!("Failed to parse Confluence search response: {}", e))?; + .map_err(|e| format!("Failed to parse Confluence search response: {e}"))?; let mut results = Vec::new(); @@ -120,16 +119,17 @@ async fn fetch_page_content( .header("Accept", "application/json") .send() .await - .map_err(|e| format!("Failed to fetch page content: {}", e))?; + .map_err(|e| format!("Failed to fetch page content: {e}"))?; if !resp.status().is_success() { - return Err(format!("Failed to fetch page: {}", resp.status())); + let status = resp.status(); + return Err(format!("Failed to fetch page: {status}")); } let json: serde_json::Value = resp .json() .await - .map_err(|e| format!("Failed to parse page content: {}", e))?; + .map_err(|e| format!("Failed to parse page content: {e}"))?; // Extract plain text from HTML storage format let html = json["body"]["storage"]["value"] diff --git a/src-tauri/src/integrations/servicenow_search.rs b/src-tauri/src/integrations/servicenow_search.rs index 187908f1..15e52105 100644 --- a/src-tauri/src/integrations/servicenow_search.rs +++ b/src-tauri/src/integrations/servicenow_search.rs @@ -25,21 +25,20 @@ pub async fn search_servicenow( .header("Accept", "application/json") .send() .await - .map_err(|e| format!("ServiceNow search request failed: {}", e))?; + .map_err(|e| format!("ServiceNow search request failed: {e}"))?; if !resp.status().is_success() { let status = resp.status(); let text = resp.text().await.unwrap_or_default(); return Err(format!( - "ServiceNow search failed with status {}: {}", - status, text + "ServiceNow search failed with status {status}: {text}" )); } let json: serde_json::Value = resp .json() .await - .map_err(|e| format!("Failed to parse ServiceNow search response: {}", e))?; + .map_err(|e| format!("Failed to parse ServiceNow search response: {e}"))?; let mut results = Vec::new(); @@ -113,7 +112,7 @@ pub async fn search_incidents( .header("Accept", "application/json") .send() .await - .map_err(|e| format!("ServiceNow incident search failed: {}", e))?; + .map_err(|e| format!("ServiceNow incident search failed: {e}"))?; if !resp.status().is_success() { return Ok(Vec::new()); // Don't fail if incident search fails @@ -146,7 +145,7 @@ pub async fn search_incidents( let resolution = item["close_notes"].as_str().unwrap_or("").to_string(); - let content = format!("Description: {}\nResolution: {}", description, resolution); + let content = format!("Description: {description}\nResolution: {resolution}"); let excerpt = content.chars().take(200).collect::(); diff --git a/src-tauri/src/integrations/webview_auth.rs b/src-tauri/src/integrations/webview_auth.rs index 9b99f0a8..3247cbaa 100644 --- a/src-tauri/src/integrations/webview_auth.rs +++ b/src-tauri/src/integrations/webview_auth.rs @@ -60,8 +60,8 @@ pub async fn authenticate_with_webview( tracing::info!("Creating webview window with label: {}", webview_label); let parsed_url = login_url.parse().map_err(|e| { - let err_msg = format!("Failed to parse URL '{}': {}", login_url, e); - tracing::error!("{}", err_msg); + let err_msg = format!("Failed to parse URL '{login_url}': {e}"); + tracing::error!("{err_msg}"); err_msg })?; diff --git a/src-tauri/src/integrations/webview_fetch.rs b/src-tauri/src/integrations/webview_fetch.rs index 5cedba84..96f0843c 100644 --- a/src-tauri/src/integrations/webview_fetch.rs +++ b/src-tauri/src/integrations/webview_fetch.rs @@ -21,7 +21,7 @@ pub async fn fetch_from_webview( // For POST/PUT with JSON body ( "headers: { 'Accept': 'application/json', 'Content-Type': 'application/json' }", - format!(", body: JSON.stringify({})", b), + format!(", body: JSON.stringify({b})"), ) } else { // For GET requests @@ -34,13 +34,13 @@ pub async fn fetch_from_webview( let fetch_script = format!( r#" (async function() {{ - const requestId = '{}'; + const requestId = '{request_id}'; try {{ - const response = await fetch('{}', {{ - method: '{}', - {}, - credentials: 'include'{} + const response = await fetch('{url}', {{ + method: '{method}', + {headers_js}, + credentials: 'include'{body_js} }}); if (!response.ok) {{ @@ -59,14 +59,13 @@ pub async fn fetch_from_webview( }})); }} }})(); - "#, - request_id, url, method, headers_js, body_js + "# ); // Execute the fetch webview_window .eval(&fetch_script) - .map_err(|e| format!("Failed to execute fetch: {}", e))?; + .map_err(|e| format!("Failed to execute fetch: {e}"))?; // Poll for result by checking window URL/hash for i in 0..50 { @@ -77,7 +76,7 @@ pub async fn fetch_from_webview( let url_string = url_str.to_string(); // Check for success - let success_marker = format!("#trcaa-success-{}-", request_id); + let success_marker = format!("#trcaa-success-{request_id}-"); if url_string.contains(&success_marker) { // Extract the JSON from the hash if let Some(json_start) = url_string.find(&success_marker) { @@ -96,7 +95,7 @@ pub async fn fetch_from_webview( } // Check for error - let error_marker = format!("#trcaa-error-{}-", request_id); + let error_marker = format!("#trcaa-error-{request_id}-"); if url_string.contains(&error_marker) { if let Some(json_start) = url_string.find(&error_marker) { let json_encoded = &url_string[json_start + error_marker.len()..]; @@ -104,7 +103,7 @@ pub async fn fetch_from_webview( // Clear the hash webview_window.eval("window.location.hash = '';").ok(); - return Err(format!("Webview fetch error: {}", decoded)); + return Err(format!("Webview fetch error: {decoded}")); } } } @@ -133,15 +132,16 @@ pub async fn search_confluence_webview( // Multiple keywords - search for any of them let keyword_conditions: Vec = keywords .iter() - .map(|k| format!("text ~ \"{}\"", k)) + .map(|k| format!("text ~ \"{k}\"")) .collect(); keyword_conditions.join(" OR ") } else if !keywords.is_empty() { // Single keyword - format!("text ~ \"{}\"", keywords[0]) + let keyword = &keywords[0]; + format!("text ~ \"{keyword}\"") } else { // Fallback to original query - format!("text ~ \"{}\"", query) + format!("text ~ \"{query}\"") }; let search_url = format!( @@ -182,9 +182,8 @@ pub async fn search_confluence_webview( // Fetch full page content let content = if let Some(id) = content_id { let content_url = format!( - "{}/rest/api/content/{}?expand=body.storage", - base_url.trim_end_matches('/'), - id + "{}/rest/api/content/{id}?expand=body.storage", + base_url.trim_end_matches('/') ); if let Ok(content_resp) = fetch_from_webview(webview_window, &content_url, "GET", None).await @@ -320,9 +319,8 @@ pub async fn search_servicenow_webview( .to_string(); let sys_id = item["sys_id"].as_str().unwrap_or(""); let url = format!( - "{}/kb_view.do?sysparm_article={}", - instance_url.trim_end_matches('/'), - sys_id + "{}/kb_view.do?sysparm_article={sys_id}", + instance_url.trim_end_matches('/') ); let text = item["text"].as_str().unwrap_or(""); let excerpt = text.chars().take(300).collect(); @@ -362,13 +360,12 @@ pub async fn search_servicenow_webview( ); let sys_id = item["sys_id"].as_str().unwrap_or(""); let url = format!( - "{}/incident.do?sys_id={}", - instance_url.trim_end_matches('/'), - sys_id + "{}/incident.do?sys_id={sys_id}", + instance_url.trim_end_matches('/') ); let description = item["description"].as_str().unwrap_or(""); let resolution = item["close_notes"].as_str().unwrap_or(""); - let content = format!("Description: {}\nResolution: {}", description, resolution); + let content = format!("Description: {description}\nResolution: {resolution}"); let excerpt = content.chars().take(200).collect(); results.push(SearchResult { @@ -479,7 +476,7 @@ fn search_page_recursive( page: &Value, search_text: &str, org_url: &str, - project: &str, + _project: &str, wiki_id: &str, results: &mut Vec, ) { @@ -509,7 +506,7 @@ fn search_page_recursive( // Create excerpt from first occurrence let excerpt = if let Some(pos) = - content_lower.find(&search_lower.split_whitespace().next().unwrap_or("")) + content_lower.find(search_lower.split_whitespace().next().unwrap_or("")) { let start = pos.saturating_sub(50); let end = (pos + 200).min(content.len()); @@ -537,7 +534,7 @@ fn search_page_recursive( // Recurse into subpages if let Some(subpages) = page.get("subPages").and_then(|s| s.as_array()) { for subpage in subpages { - search_page_recursive(subpage, search_text, org_url, project, wiki_id, results); + search_page_recursive(subpage, search_text, org_url, _project, wiki_id, results); } } } @@ -633,7 +630,7 @@ pub async fn search_azuredevops_workitems_webview( let excerpt = clean_description.chars().take(200).collect(); let url = - format!("{}/_workitems/edit/{}", org_url.trim_end_matches('/'), id); + format!("{}/_workitems/edit/{id}", org_url.trim_end_matches('/')); let full_content = if clean_description.len() > 3000 { format!("{}...", &clean_description[..3000]) @@ -642,7 +639,7 @@ pub async fn search_azuredevops_workitems_webview( }; results.push(SearchResult { - title: format!("{} #{}: {}", work_item_type, id, title), + title: format!("{work_item_type} #{id}: {title}"), url, excerpt, content: Some(full_content), @@ -669,9 +666,8 @@ pub async fn add_azuredevops_comment_webview( comment_text: &str, ) -> Result { let comment_url = format!( - "{}/_apis/wit/workitems/{}/comments?api-version=7.0", - org_url.trim_end_matches('/'), - work_item_id + "{}/_apis/wit/workitems/{work_item_id}/comments?api-version=7.0", + org_url.trim_end_matches('/') ); let body = serde_json::json!({ @@ -690,9 +686,7 @@ pub async fn add_azuredevops_comment_webview( .ok_or_else(|| "Failed to get comment ID from response".to_string())?; tracing::info!( - "Successfully added comment {} to work item {}", - comment_id, - work_item_id + "Successfully added comment {comment_id} to work item {work_item_id}" ); - Ok(format!("Comment added successfully (ID: {})", comment_id)) + Ok(format!("Comment added successfully (ID: {comment_id})")) }