fix(lint): resolve all clippy warnings for CI compliance

Fixed 42 clippy warnings across integration and command modules:
- unnecessary_lazy_evaluations: Changed unwrap_or_else to unwrap_or
- uninlined_format_args: Modernized format strings to use inline syntax
- needless_borrows_for_generic_args: Removed unnecessary borrows
- only_used_in_recursion: Prefixed unused recursive param with underscore

All files now pass cargo clippy -- -D warnings

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
Shaun Arman 2026-04-06 15:14:19 -05:00
parent 1de50f1c87
commit ead585f583
7 changed files with 67 additions and 75 deletions

View File

@ -239,15 +239,14 @@ pub async fn chat_message(
let context_message = Message { let context_message = Message {
role: "system".into(), role: "system".into(),
content: format!( content: format!(
"INTERNAL DOCUMENTATION SOURCES:\n\n{}\n\n\ "INTERNAL DOCUMENTATION SOURCES:\n\n{integration_context}\n\n\
Instructions: The above content is from internal company documentation systems \ Instructions: The above content is from internal company documentation systems \
(Confluence, ServiceNow, Azure DevOps). \ (Confluence, ServiceNow, Azure DevOps). \
\n\n**IMPORTANT**: First determine if this documentation is RELEVANT to the user's question:\ \n\n**IMPORTANT**: First determine if this documentation is RELEVANT to the user's question:\
\n- If the documentation directly addresses the question Use it and cite sources with URLs\ \n- If the documentation directly addresses the question Use it and cite sources with URLs\
\n- If the documentation is tangentially related but doesn't answer the question Briefly mention what internal docs exist, then provide a complete answer using general knowledge\ \n- If the documentation is tangentially related but doesn't answer the question Briefly mention what internal docs exist, then provide a complete answer using general knowledge\
\n- If the documentation is completely unrelated Ignore it and answer using general knowledge\ \n- If the documentation is completely unrelated Ignore it and answer using general knowledge\
\n\nDo NOT force irrelevant internal documentation into your answer. The user needs accurate information, not forced citations.", \n\nDo NOT force irrelevant internal documentation into your answer. The user needs accurate information, not forced citations."
integration_context
), ),
tool_call_id: None, tool_call_id: None,
tool_calls: None, tool_calls: None,
@ -280,7 +279,7 @@ pub async fn chat_message(
.chat(messages.clone(), &provider_config, tools.clone()) .chat(messages.clone(), &provider_config, tools.clone())
.await .await
.map_err(|e| { .map_err(|e| {
let error_msg = format!("AI provider request failed: {}", e); let error_msg = format!("AI provider request failed: {e}");
warn!("{}", error_msg); warn!("{}", error_msg);
error_msg error_msg
})?; })?;
@ -298,7 +297,7 @@ pub async fn chat_message(
// Format result // Format result
let result_content = match tool_result { let result_content = match tool_result {
Ok(result) => result, Ok(result) => result,
Err(e) => format!("Error executing tool: {}", e), Err(e) => format!("Error executing tool: {e}"),
}; };
// Add tool result as a message // Add tool result as a message
@ -795,7 +794,7 @@ async fn search_integration_sources(
context.push_str(&format!("URL: {}\n", result.url)); context.push_str(&format!("URL: {}\n", result.url));
if let Some(content) = &result.content { if let Some(content) = &result.content {
context.push_str(&format!("Content:\n{}\n\n", content)); context.push_str(&format!("Content:\n{content}\n\n"));
} else { } else {
context.push_str(&format!("Excerpt: {}\n\n", result.excerpt)); context.push_str(&format!("Excerpt: {}\n\n", result.excerpt));
} }
@ -818,7 +817,7 @@ async fn execute_tool_call(
"add_ado_comment" => { "add_ado_comment" => {
// Parse arguments // Parse arguments
let args: serde_json::Value = serde_json::from_str(&tool_call.arguments) let args: serde_json::Value = serde_json::from_str(&tool_call.arguments)
.map_err(|e| format!("Failed to parse tool arguments: {}", e))?; .map_err(|e| format!("Failed to parse tool arguments: {e}"))?;
let work_item_id = args let work_item_id = args
.get("work_item_id") .get("work_item_id")

View File

@ -176,7 +176,7 @@ pub async fn save_ai_provider(
provider.user_id, provider.user_id,
], ],
) )
.map_err(|e| format!("Failed to save AI provider: {}", e))?; .map_err(|e| format!("Failed to save AI provider: {e}"))?;
Ok(()) Ok(())
} }
@ -267,7 +267,7 @@ pub async fn delete_ai_provider(
let db = state.db.lock().map_err(|e| e.to_string())?; let db = state.db.lock().map_err(|e| e.to_string())?;
db.execute("DELETE FROM ai_providers WHERE name = ?1", [&name]) db.execute("DELETE FROM ai_providers WHERE name = ?1", [&name])
.map_err(|e| format!("Failed to delete AI provider: {}", e))?; .map_err(|e| format!("Failed to delete AI provider: {e}"))?;
Ok(()) Ok(())
} }

View File

@ -34,21 +34,20 @@ pub async fn search_wiki(
.json(&search_body) .json(&search_body)
.send() .send()
.await .await
.map_err(|e| format!("Azure DevOps wiki search failed: {}", e))?; .map_err(|e| format!("Azure DevOps wiki search failed: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
let status = resp.status(); let status = resp.status();
let text = resp.text().await.unwrap_or_default(); let text = resp.text().await.unwrap_or_default();
return Err(format!( return Err(format!(
"Azure DevOps wiki search failed with status {}: {}", "Azure DevOps wiki search failed with status {status}: {text}"
status, text
)); ));
} }
let json: serde_json::Value = resp let json: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse ADO wiki search response: {}", e))?; .map_err(|e| format!("Failed to parse ADO wiki search response: {e}"))?;
let mut results = Vec::new(); let mut results = Vec::new();
@ -118,16 +117,17 @@ async fn fetch_wiki_page(
.header("Accept", "application/json") .header("Accept", "application/json")
.send() .send()
.await .await
.map_err(|e| format!("Failed to fetch wiki page: {}", e))?; .map_err(|e| format!("Failed to fetch wiki page: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!("Failed to fetch wiki page: {}", resp.status())); let status = resp.status();
return Err(format!("Failed to fetch wiki page: {status}"));
} }
let json: serde_json::Value = resp let json: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse wiki page: {}", e))?; .map_err(|e| format!("Failed to parse wiki page: {e}"))?;
let content = json["content"].as_str().unwrap_or("").to_string(); let content = json["content"].as_str().unwrap_or("").to_string();
@ -158,8 +158,7 @@ pub async fn search_work_items(
); );
let wiql_query = format!( let wiql_query = format!(
"SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{}' AND ([System.Title] CONTAINS '{}' OR [System.Description] CONTAINS '{}') ORDER BY [System.ChangedDate] DESC", "SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{project}' AND ([System.Title] CONTAINS '{query}' OR [System.Description] CONTAINS '{query}') ORDER BY [System.ChangedDate] DESC"
project, query, query
); );
let wiql_body = serde_json::json!({ let wiql_body = serde_json::json!({
@ -176,7 +175,7 @@ pub async fn search_work_items(
.json(&wiql_body) .json(&wiql_body)
.send() .send()
.await .await
.map_err(|e| format!("ADO work item search failed: {}", e))?; .map_err(|e| format!("ADO work item search failed: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Ok(Vec::new()); // Don't fail if work item search fails return Ok(Vec::new()); // Don't fail if work item search fails
@ -222,16 +221,17 @@ async fn fetch_work_item_details(
.header("Accept", "application/json") .header("Accept", "application/json")
.send() .send()
.await .await
.map_err(|e| format!("Failed to fetch work item: {}", e))?; .map_err(|e| format!("Failed to fetch work item: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!("Failed to fetch work item: {}", resp.status())); let status = resp.status();
return Err(format!("Failed to fetch work item: {status}"));
} }
let json: serde_json::Value = resp let json: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse work item: {}", e))?; .map_err(|e| format!("Failed to parse work item: {e}"))?;
let fields = &json["fields"]; let fields = &json["fields"];
let title = format!( let title = format!(
@ -251,7 +251,7 @@ async fn fetch_work_item_details(
.to_string(); .to_string();
let state = fields["System.State"].as_str().unwrap_or("Unknown"); let state = fields["System.State"].as_str().unwrap_or("Unknown");
let content = format!("State: {}\n\nDescription: {}", state, description); let content = format!("State: {state}\n\nDescription: {description}");
let excerpt = content.chars().take(200).collect::<String>(); let excerpt = content.chars().take(200).collect::<String>();

View File

@ -33,21 +33,20 @@ pub async fn search_confluence(
.header("Accept", "application/json") .header("Accept", "application/json")
.send() .send()
.await .await
.map_err(|e| format!("Confluence search request failed: {}", e))?; .map_err(|e| format!("Confluence search request failed: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
let status = resp.status(); let status = resp.status();
let text = resp.text().await.unwrap_or_default(); let text = resp.text().await.unwrap_or_default();
return Err(format!( return Err(format!(
"Confluence search failed with status {}: {}", "Confluence search failed with status {status}: {text}"
status, text
)); ));
} }
let json: serde_json::Value = resp let json: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse Confluence search response: {}", e))?; .map_err(|e| format!("Failed to parse Confluence search response: {e}"))?;
let mut results = Vec::new(); let mut results = Vec::new();
@ -120,16 +119,17 @@ async fn fetch_page_content(
.header("Accept", "application/json") .header("Accept", "application/json")
.send() .send()
.await .await
.map_err(|e| format!("Failed to fetch page content: {}", e))?; .map_err(|e| format!("Failed to fetch page content: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!("Failed to fetch page: {}", resp.status())); let status = resp.status();
return Err(format!("Failed to fetch page: {status}"));
} }
let json: serde_json::Value = resp let json: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse page content: {}", e))?; .map_err(|e| format!("Failed to parse page content: {e}"))?;
// Extract plain text from HTML storage format // Extract plain text from HTML storage format
let html = json["body"]["storage"]["value"] let html = json["body"]["storage"]["value"]

View File

@ -25,21 +25,20 @@ pub async fn search_servicenow(
.header("Accept", "application/json") .header("Accept", "application/json")
.send() .send()
.await .await
.map_err(|e| format!("ServiceNow search request failed: {}", e))?; .map_err(|e| format!("ServiceNow search request failed: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
let status = resp.status(); let status = resp.status();
let text = resp.text().await.unwrap_or_default(); let text = resp.text().await.unwrap_or_default();
return Err(format!( return Err(format!(
"ServiceNow search failed with status {}: {}", "ServiceNow search failed with status {status}: {text}"
status, text
)); ));
} }
let json: serde_json::Value = resp let json: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse ServiceNow search response: {}", e))?; .map_err(|e| format!("Failed to parse ServiceNow search response: {e}"))?;
let mut results = Vec::new(); let mut results = Vec::new();
@ -113,7 +112,7 @@ pub async fn search_incidents(
.header("Accept", "application/json") .header("Accept", "application/json")
.send() .send()
.await .await
.map_err(|e| format!("ServiceNow incident search failed: {}", e))?; .map_err(|e| format!("ServiceNow incident search failed: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Ok(Vec::new()); // Don't fail if incident search fails return Ok(Vec::new()); // Don't fail if incident search fails
@ -146,7 +145,7 @@ pub async fn search_incidents(
let resolution = item["close_notes"].as_str().unwrap_or("").to_string(); let resolution = item["close_notes"].as_str().unwrap_or("").to_string();
let content = format!("Description: {}\nResolution: {}", description, resolution); let content = format!("Description: {description}\nResolution: {resolution}");
let excerpt = content.chars().take(200).collect::<String>(); let excerpt = content.chars().take(200).collect::<String>();

View File

@ -60,8 +60,8 @@ pub async fn authenticate_with_webview(
tracing::info!("Creating webview window with label: {}", webview_label); tracing::info!("Creating webview window with label: {}", webview_label);
let parsed_url = login_url.parse().map_err(|e| { let parsed_url = login_url.parse().map_err(|e| {
let err_msg = format!("Failed to parse URL '{}': {}", login_url, e); let err_msg = format!("Failed to parse URL '{login_url}': {e}");
tracing::error!("{}", err_msg); tracing::error!("{err_msg}");
err_msg err_msg
})?; })?;

View File

@ -21,7 +21,7 @@ pub async fn fetch_from_webview<R: tauri::Runtime>(
// For POST/PUT with JSON body // For POST/PUT with JSON body
( (
"headers: { 'Accept': 'application/json', 'Content-Type': 'application/json' }", "headers: { 'Accept': 'application/json', 'Content-Type': 'application/json' }",
format!(", body: JSON.stringify({})", b), format!(", body: JSON.stringify({b})"),
) )
} else { } else {
// For GET requests // For GET requests
@ -34,13 +34,13 @@ pub async fn fetch_from_webview<R: tauri::Runtime>(
let fetch_script = format!( let fetch_script = format!(
r#" r#"
(async function() {{ (async function() {{
const requestId = '{}'; const requestId = '{request_id}';
try {{ try {{
const response = await fetch('{}', {{ const response = await fetch('{url}', {{
method: '{}', method: '{method}',
{}, {headers_js},
credentials: 'include'{} credentials: 'include'{body_js}
}}); }});
if (!response.ok) {{ if (!response.ok) {{
@ -59,14 +59,13 @@ pub async fn fetch_from_webview<R: tauri::Runtime>(
}})); }}));
}} }}
}})(); }})();
"#, "#
request_id, url, method, headers_js, body_js
); );
// Execute the fetch // Execute the fetch
webview_window webview_window
.eval(&fetch_script) .eval(&fetch_script)
.map_err(|e| format!("Failed to execute fetch: {}", e))?; .map_err(|e| format!("Failed to execute fetch: {e}"))?;
// Poll for result by checking window URL/hash // Poll for result by checking window URL/hash
for i in 0..50 { for i in 0..50 {
@ -77,7 +76,7 @@ pub async fn fetch_from_webview<R: tauri::Runtime>(
let url_string = url_str.to_string(); let url_string = url_str.to_string();
// Check for success // Check for success
let success_marker = format!("#trcaa-success-{}-", request_id); let success_marker = format!("#trcaa-success-{request_id}-");
if url_string.contains(&success_marker) { if url_string.contains(&success_marker) {
// Extract the JSON from the hash // Extract the JSON from the hash
if let Some(json_start) = url_string.find(&success_marker) { if let Some(json_start) = url_string.find(&success_marker) {
@ -96,7 +95,7 @@ pub async fn fetch_from_webview<R: tauri::Runtime>(
} }
// Check for error // Check for error
let error_marker = format!("#trcaa-error-{}-", request_id); let error_marker = format!("#trcaa-error-{request_id}-");
if url_string.contains(&error_marker) { if url_string.contains(&error_marker) {
if let Some(json_start) = url_string.find(&error_marker) { if let Some(json_start) = url_string.find(&error_marker) {
let json_encoded = &url_string[json_start + error_marker.len()..]; let json_encoded = &url_string[json_start + error_marker.len()..];
@ -104,7 +103,7 @@ pub async fn fetch_from_webview<R: tauri::Runtime>(
// Clear the hash // Clear the hash
webview_window.eval("window.location.hash = '';").ok(); webview_window.eval("window.location.hash = '';").ok();
return Err(format!("Webview fetch error: {}", decoded)); return Err(format!("Webview fetch error: {decoded}"));
} }
} }
} }
@ -133,15 +132,16 @@ pub async fn search_confluence_webview<R: tauri::Runtime>(
// Multiple keywords - search for any of them // Multiple keywords - search for any of them
let keyword_conditions: Vec<String> = keywords let keyword_conditions: Vec<String> = keywords
.iter() .iter()
.map(|k| format!("text ~ \"{}\"", k)) .map(|k| format!("text ~ \"{k}\""))
.collect(); .collect();
keyword_conditions.join(" OR ") keyword_conditions.join(" OR ")
} else if !keywords.is_empty() { } else if !keywords.is_empty() {
// Single keyword // Single keyword
format!("text ~ \"{}\"", keywords[0]) let keyword = &keywords[0];
format!("text ~ \"{keyword}\"")
} else { } else {
// Fallback to original query // Fallback to original query
format!("text ~ \"{}\"", query) format!("text ~ \"{query}\"")
}; };
let search_url = format!( let search_url = format!(
@ -182,9 +182,8 @@ pub async fn search_confluence_webview<R: tauri::Runtime>(
// Fetch full page content // Fetch full page content
let content = if let Some(id) = content_id { let content = if let Some(id) = content_id {
let content_url = format!( let content_url = format!(
"{}/rest/api/content/{}?expand=body.storage", "{}/rest/api/content/{id}?expand=body.storage",
base_url.trim_end_matches('/'), base_url.trim_end_matches('/')
id
); );
if let Ok(content_resp) = if let Ok(content_resp) =
fetch_from_webview(webview_window, &content_url, "GET", None).await fetch_from_webview(webview_window, &content_url, "GET", None).await
@ -320,9 +319,8 @@ pub async fn search_servicenow_webview<R: tauri::Runtime>(
.to_string(); .to_string();
let sys_id = item["sys_id"].as_str().unwrap_or(""); let sys_id = item["sys_id"].as_str().unwrap_or("");
let url = format!( let url = format!(
"{}/kb_view.do?sysparm_article={}", "{}/kb_view.do?sysparm_article={sys_id}",
instance_url.trim_end_matches('/'), instance_url.trim_end_matches('/')
sys_id
); );
let text = item["text"].as_str().unwrap_or(""); let text = item["text"].as_str().unwrap_or("");
let excerpt = text.chars().take(300).collect(); let excerpt = text.chars().take(300).collect();
@ -362,13 +360,12 @@ pub async fn search_servicenow_webview<R: tauri::Runtime>(
); );
let sys_id = item["sys_id"].as_str().unwrap_or(""); let sys_id = item["sys_id"].as_str().unwrap_or("");
let url = format!( let url = format!(
"{}/incident.do?sys_id={}", "{}/incident.do?sys_id={sys_id}",
instance_url.trim_end_matches('/'), instance_url.trim_end_matches('/')
sys_id
); );
let description = item["description"].as_str().unwrap_or(""); let description = item["description"].as_str().unwrap_or("");
let resolution = item["close_notes"].as_str().unwrap_or(""); let resolution = item["close_notes"].as_str().unwrap_or("");
let content = format!("Description: {}\nResolution: {}", description, resolution); let content = format!("Description: {description}\nResolution: {resolution}");
let excerpt = content.chars().take(200).collect(); let excerpt = content.chars().take(200).collect();
results.push(SearchResult { results.push(SearchResult {
@ -479,7 +476,7 @@ fn search_page_recursive(
page: &Value, page: &Value,
search_text: &str, search_text: &str,
org_url: &str, org_url: &str,
project: &str, _project: &str,
wiki_id: &str, wiki_id: &str,
results: &mut Vec<SearchResult>, results: &mut Vec<SearchResult>,
) { ) {
@ -509,7 +506,7 @@ fn search_page_recursive(
// Create excerpt from first occurrence // Create excerpt from first occurrence
let excerpt = if let Some(pos) = let excerpt = if let Some(pos) =
content_lower.find(&search_lower.split_whitespace().next().unwrap_or("")) content_lower.find(search_lower.split_whitespace().next().unwrap_or(""))
{ {
let start = pos.saturating_sub(50); let start = pos.saturating_sub(50);
let end = (pos + 200).min(content.len()); let end = (pos + 200).min(content.len());
@ -537,7 +534,7 @@ fn search_page_recursive(
// Recurse into subpages // Recurse into subpages
if let Some(subpages) = page.get("subPages").and_then(|s| s.as_array()) { if let Some(subpages) = page.get("subPages").and_then(|s| s.as_array()) {
for subpage in subpages { for subpage in subpages {
search_page_recursive(subpage, search_text, org_url, project, wiki_id, results); search_page_recursive(subpage, search_text, org_url, _project, wiki_id, results);
} }
} }
} }
@ -633,7 +630,7 @@ pub async fn search_azuredevops_workitems_webview<R: tauri::Runtime>(
let excerpt = clean_description.chars().take(200).collect(); let excerpt = clean_description.chars().take(200).collect();
let url = let url =
format!("{}/_workitems/edit/{}", org_url.trim_end_matches('/'), id); format!("{}/_workitems/edit/{id}", org_url.trim_end_matches('/'));
let full_content = if clean_description.len() > 3000 { let full_content = if clean_description.len() > 3000 {
format!("{}...", &clean_description[..3000]) format!("{}...", &clean_description[..3000])
@ -642,7 +639,7 @@ pub async fn search_azuredevops_workitems_webview<R: tauri::Runtime>(
}; };
results.push(SearchResult { results.push(SearchResult {
title: format!("{} #{}: {}", work_item_type, id, title), title: format!("{work_item_type} #{id}: {title}"),
url, url,
excerpt, excerpt,
content: Some(full_content), content: Some(full_content),
@ -669,9 +666,8 @@ pub async fn add_azuredevops_comment_webview<R: tauri::Runtime>(
comment_text: &str, comment_text: &str,
) -> Result<String, String> { ) -> Result<String, String> {
let comment_url = format!( let comment_url = format!(
"{}/_apis/wit/workitems/{}/comments?api-version=7.0", "{}/_apis/wit/workitems/{work_item_id}/comments?api-version=7.0",
org_url.trim_end_matches('/'), org_url.trim_end_matches('/')
work_item_id
); );
let body = serde_json::json!({ let body = serde_json::json!({
@ -690,9 +686,7 @@ pub async fn add_azuredevops_comment_webview<R: tauri::Runtime>(
.ok_or_else(|| "Failed to get comment ID from response".to_string())?; .ok_or_else(|| "Failed to get comment ID from response".to_string())?;
tracing::info!( tracing::info!(
"Successfully added comment {} to work item {}", "Successfully added comment {comment_id} to work item {work_item_id}"
comment_id,
work_item_id
); );
Ok(format!("Comment added successfully (ID: {})", comment_id)) Ok(format!("Comment added successfully (ID: {comment_id})"))
} }