Fixed 42 clippy warnings across integration and command modules: - unnecessary_lazy_evaluations: Changed unwrap_or_else to unwrap_or - uninlined_format_args: Modernized format strings to use inline syntax - needless_borrows_for_generic_args: Removed unnecessary borrows - only_used_in_recursion: Prefixed unused recursive param with underscore All files now pass cargo clippy -- -D warnings Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
266 lines
7.5 KiB
Rust
266 lines
7.5 KiB
Rust
use super::confluence_search::SearchResult;
|
|
|
|
/// Search Azure DevOps Wiki for content matching the query
|
|
pub async fn search_wiki(
|
|
org_url: &str,
|
|
project: &str,
|
|
query: &str,
|
|
cookies: &[crate::integrations::webview_auth::Cookie],
|
|
) -> Result<Vec<SearchResult>, String> {
|
|
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
|
let client = reqwest::Client::new();
|
|
|
|
// Use Azure DevOps Search API
|
|
let search_url = format!(
|
|
"{}/_apis/search/wikisearchresults?api-version=7.0",
|
|
org_url.trim_end_matches('/')
|
|
);
|
|
|
|
let search_body = serde_json::json!({
|
|
"searchText": query,
|
|
"$top": 5,
|
|
"filters": {
|
|
"ProjectFilters": [project]
|
|
}
|
|
});
|
|
|
|
tracing::info!("Searching Azure DevOps Wiki: {}", search_url);
|
|
|
|
let resp = client
|
|
.post(&search_url)
|
|
.header("Cookie", &cookie_header)
|
|
.header("Accept", "application/json")
|
|
.header("Content-Type", "application/json")
|
|
.json(&search_body)
|
|
.send()
|
|
.await
|
|
.map_err(|e| format!("Azure DevOps wiki search failed: {e}"))?;
|
|
|
|
if !resp.status().is_success() {
|
|
let status = resp.status();
|
|
let text = resp.text().await.unwrap_or_default();
|
|
return Err(format!(
|
|
"Azure DevOps wiki search failed with status {status}: {text}"
|
|
));
|
|
}
|
|
|
|
let json: serde_json::Value = resp
|
|
.json()
|
|
.await
|
|
.map_err(|e| format!("Failed to parse ADO wiki search response: {e}"))?;
|
|
|
|
let mut results = Vec::new();
|
|
|
|
if let Some(results_array) = json["results"].as_array() {
|
|
for item in results_array.iter().take(3) {
|
|
let title = item["fileName"].as_str().unwrap_or("Untitled").to_string();
|
|
|
|
let path = item["path"].as_str().unwrap_or("");
|
|
let url = format!(
|
|
"{}/_wiki/wikis/{}/{}",
|
|
org_url.trim_end_matches('/'),
|
|
project,
|
|
path
|
|
);
|
|
|
|
let excerpt = item["content"]
|
|
.as_str()
|
|
.unwrap_or("")
|
|
.chars()
|
|
.take(300)
|
|
.collect::<String>();
|
|
|
|
// Fetch full wiki page content
|
|
let content = if let Some(wiki_id) = item["wiki"]["id"].as_str() {
|
|
if let Some(page_path) = item["path"].as_str() {
|
|
fetch_wiki_page(org_url, wiki_id, page_path, &cookie_header)
|
|
.await
|
|
.ok()
|
|
} else {
|
|
None
|
|
}
|
|
} else {
|
|
None
|
|
};
|
|
|
|
results.push(SearchResult {
|
|
title,
|
|
url,
|
|
excerpt,
|
|
content,
|
|
source: "Azure DevOps".to_string(),
|
|
});
|
|
}
|
|
}
|
|
|
|
Ok(results)
|
|
}
|
|
|
|
/// Fetch full wiki page content
|
|
async fn fetch_wiki_page(
|
|
org_url: &str,
|
|
wiki_id: &str,
|
|
page_path: &str,
|
|
cookie_header: &str,
|
|
) -> Result<String, String> {
|
|
let client = reqwest::Client::new();
|
|
let page_url = format!(
|
|
"{}/_apis/wiki/wikis/{}/pages?path={}&api-version=7.0&includeContent=true",
|
|
org_url.trim_end_matches('/'),
|
|
wiki_id,
|
|
urlencoding::encode(page_path)
|
|
);
|
|
|
|
let resp = client
|
|
.get(&page_url)
|
|
.header("Cookie", cookie_header)
|
|
.header("Accept", "application/json")
|
|
.send()
|
|
.await
|
|
.map_err(|e| format!("Failed to fetch wiki page: {e}"))?;
|
|
|
|
if !resp.status().is_success() {
|
|
let status = resp.status();
|
|
return Err(format!("Failed to fetch wiki page: {status}"));
|
|
}
|
|
|
|
let json: serde_json::Value = resp
|
|
.json()
|
|
.await
|
|
.map_err(|e| format!("Failed to parse wiki page: {e}"))?;
|
|
|
|
let content = json["content"].as_str().unwrap_or("").to_string();
|
|
|
|
// Truncate to reasonable length
|
|
let truncated = if content.len() > 3000 {
|
|
format!("{}...", &content[..3000])
|
|
} else {
|
|
content
|
|
};
|
|
|
|
Ok(truncated)
|
|
}
|
|
|
|
/// Search Azure DevOps Work Items for related issues
|
|
pub async fn search_work_items(
|
|
org_url: &str,
|
|
project: &str,
|
|
query: &str,
|
|
cookies: &[crate::integrations::webview_auth::Cookie],
|
|
) -> Result<Vec<SearchResult>, String> {
|
|
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
|
let client = reqwest::Client::new();
|
|
|
|
// Use WIQL (Work Item Query Language)
|
|
let wiql_url = format!(
|
|
"{}/_apis/wit/wiql?api-version=7.0",
|
|
org_url.trim_end_matches('/')
|
|
);
|
|
|
|
let wiql_query = format!(
|
|
"SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{project}' AND ([System.Title] CONTAINS '{query}' OR [System.Description] CONTAINS '{query}') ORDER BY [System.ChangedDate] DESC"
|
|
);
|
|
|
|
let wiql_body = serde_json::json!({
|
|
"query": wiql_query
|
|
});
|
|
|
|
tracing::info!("Searching Azure DevOps work items");
|
|
|
|
let resp = client
|
|
.post(&wiql_url)
|
|
.header("Cookie", &cookie_header)
|
|
.header("Accept", "application/json")
|
|
.header("Content-Type", "application/json")
|
|
.json(&wiql_body)
|
|
.send()
|
|
.await
|
|
.map_err(|e| format!("ADO work item search failed: {e}"))?;
|
|
|
|
if !resp.status().is_success() {
|
|
return Ok(Vec::new()); // Don't fail if work item search fails
|
|
}
|
|
|
|
let json: serde_json::Value = resp
|
|
.json()
|
|
.await
|
|
.map_err(|_| "Failed to parse work item response".to_string())?;
|
|
|
|
let mut results = Vec::new();
|
|
|
|
if let Some(work_items) = json["workItems"].as_array() {
|
|
// Fetch details for top 3 work items
|
|
for item in work_items.iter().take(3) {
|
|
if let Some(id) = item["id"].as_i64() {
|
|
if let Ok(work_item) = fetch_work_item_details(org_url, id, &cookie_header).await {
|
|
results.push(work_item);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(results)
|
|
}
|
|
|
|
/// Fetch work item details
|
|
async fn fetch_work_item_details(
|
|
org_url: &str,
|
|
id: i64,
|
|
cookie_header: &str,
|
|
) -> Result<SearchResult, String> {
|
|
let client = reqwest::Client::new();
|
|
let item_url = format!(
|
|
"{}/_apis/wit/workitems/{}?api-version=7.0",
|
|
org_url.trim_end_matches('/'),
|
|
id
|
|
);
|
|
|
|
let resp = client
|
|
.get(&item_url)
|
|
.header("Cookie", cookie_header)
|
|
.header("Accept", "application/json")
|
|
.send()
|
|
.await
|
|
.map_err(|e| format!("Failed to fetch work item: {e}"))?;
|
|
|
|
if !resp.status().is_success() {
|
|
let status = resp.status();
|
|
return Err(format!("Failed to fetch work item: {status}"));
|
|
}
|
|
|
|
let json: serde_json::Value = resp
|
|
.json()
|
|
.await
|
|
.map_err(|e| format!("Failed to parse work item: {e}"))?;
|
|
|
|
let fields = &json["fields"];
|
|
let title = format!(
|
|
"Work Item {}: {}",
|
|
id,
|
|
fields["System.Title"].as_str().unwrap_or("No title")
|
|
);
|
|
|
|
let url = json["_links"]["html"]["href"]
|
|
.as_str()
|
|
.unwrap_or("")
|
|
.to_string();
|
|
|
|
let description = fields["System.Description"]
|
|
.as_str()
|
|
.unwrap_or("")
|
|
.to_string();
|
|
|
|
let state = fields["System.State"].as_str().unwrap_or("Unknown");
|
|
let content = format!("State: {state}\n\nDescription: {description}");
|
|
|
|
let excerpt = content.chars().take(200).collect::<String>();
|
|
|
|
Ok(SearchResult {
|
|
title,
|
|
url,
|
|
excerpt,
|
|
content: Some(content),
|
|
source: "Azure DevOps".to_string(),
|
|
})
|
|
}
|