fix: resolve clippy uninlined format args failures
Some checks are pending
Test / frontend-typecheck (push) Waiting to run
Test / frontend-tests (push) Waiting to run
Test / wiki-sync (push) Waiting to run
Test / rust-fmt-check (push) Waiting to run
Test / rust-clippy (push) Waiting to run
Test / rust-tests (push) Waiting to run

Inline format arguments across Rust integrations and command modules to satisfy `clippy::uninlined_format_args` under `-D warnings`, including edge cases introduced by field access interpolation. This restores green clippy checks in CI for the current codebase state.

Made-with: Cursor
This commit is contained in:
Shaun Arman 2026-04-04 17:16:47 -05:00
parent 717a6e0c6a
commit bcd08e5546
13 changed files with 133 additions and 180 deletions

View File

@ -2324,24 +2324,6 @@
"Identifier": { "Identifier": {
"description": "Permission identifier", "description": "Permission identifier",
"oneOf": [ "oneOf": [
{
"description": "Allows reading the CLI matches\n#### This default permission set includes:\n\n- `allow-cli-matches`",
"type": "string",
"const": "cli:default",
"markdownDescription": "Allows reading the CLI matches\n#### This default permission set includes:\n\n- `allow-cli-matches`"
},
{
"description": "Enables the cli_matches command without any pre-configured scope.",
"type": "string",
"const": "cli:allow-cli-matches",
"markdownDescription": "Enables the cli_matches command without any pre-configured scope."
},
{
"description": "Denies the cli_matches command without any pre-configured scope.",
"type": "string",
"const": "cli:deny-cli-matches",
"markdownDescription": "Denies the cli_matches command without any pre-configured scope."
},
{ {
"description": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`", "description": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`",
"type": "string", "type": "string",
@ -6373,60 +6355,6 @@
"type": "string", "type": "string",
"const": "stronghold:deny-save-store-record", "const": "stronghold:deny-save-store-record",
"markdownDescription": "Denies the save_store_record command without any pre-configured scope." "markdownDescription": "Denies the save_store_record command without any pre-configured scope."
},
{
"description": "This permission set configures which kind of\nupdater functions are exposed to the frontend.\n\n#### Granted Permissions\n\nThe full workflow from checking for updates to installing them\nis enabled.\n\n\n#### This default permission set includes:\n\n- `allow-check`\n- `allow-download`\n- `allow-install`\n- `allow-download-and-install`",
"type": "string",
"const": "updater:default",
"markdownDescription": "This permission set configures which kind of\nupdater functions are exposed to the frontend.\n\n#### Granted Permissions\n\nThe full workflow from checking for updates to installing them\nis enabled.\n\n\n#### This default permission set includes:\n\n- `allow-check`\n- `allow-download`\n- `allow-install`\n- `allow-download-and-install`"
},
{
"description": "Enables the check command without any pre-configured scope.",
"type": "string",
"const": "updater:allow-check",
"markdownDescription": "Enables the check command without any pre-configured scope."
},
{
"description": "Enables the download command without any pre-configured scope.",
"type": "string",
"const": "updater:allow-download",
"markdownDescription": "Enables the download command without any pre-configured scope."
},
{
"description": "Enables the download_and_install command without any pre-configured scope.",
"type": "string",
"const": "updater:allow-download-and-install",
"markdownDescription": "Enables the download_and_install command without any pre-configured scope."
},
{
"description": "Enables the install command without any pre-configured scope.",
"type": "string",
"const": "updater:allow-install",
"markdownDescription": "Enables the install command without any pre-configured scope."
},
{
"description": "Denies the check command without any pre-configured scope.",
"type": "string",
"const": "updater:deny-check",
"markdownDescription": "Denies the check command without any pre-configured scope."
},
{
"description": "Denies the download command without any pre-configured scope.",
"type": "string",
"const": "updater:deny-download",
"markdownDescription": "Denies the download command without any pre-configured scope."
},
{
"description": "Denies the download_and_install command without any pre-configured scope.",
"type": "string",
"const": "updater:deny-download-and-install",
"markdownDescription": "Denies the download_and_install command without any pre-configured scope."
},
{
"description": "Denies the install command without any pre-configured scope.",
"type": "string",
"const": "updater:deny-install",
"markdownDescription": "Denies the install command without any pre-configured scope."
} }
] ]
}, },

View File

@ -47,7 +47,10 @@ impl Provider for MistralProvider {
let resp = client let resp = client
.post(&url) .post(&url)
.header("Authorization", format!("Bearer {}", config.api_key)) .header(
"Authorization",
format!("Bearer {api_key}", api_key = config.api_key),
)
.header("Content-Type", "application/json") .header("Content-Type", "application/json")
.json(&body) .json(&body)
.send() .send()

View File

@ -54,7 +54,10 @@ impl OpenAiProvider {
.custom_endpoint_path .custom_endpoint_path
.as_deref() .as_deref()
.unwrap_or("/chat/completions"); .unwrap_or("/chat/completions");
let url = format!("{}{}", config.api_url.trim_end_matches('/'), endpoint_path); let url = format!(
"{base}{endpoint_path}",
base = config.api_url.trim_end_matches('/')
);
let mut body = serde_json::json!({ let mut body = serde_json::json!({
"model": config.model, "model": config.model,
@ -75,7 +78,7 @@ impl OpenAiProvider {
.as_deref() .as_deref()
.unwrap_or("Authorization"); .unwrap_or("Authorization");
let auth_prefix = config.custom_auth_prefix.as_deref().unwrap_or("Bearer "); let auth_prefix = config.custom_auth_prefix.as_deref().unwrap_or("Bearer ");
let auth_value = format!("{}{}", auth_prefix, config.api_key); let auth_value = format!("{auth_prefix}{api_key}", api_key = config.api_key);
let resp = client let resp = client
.post(&url) .post(&url)
@ -122,7 +125,10 @@ impl OpenAiProvider {
// Use custom endpoint path, default to empty (API URL already includes /api/v2/chat) // Use custom endpoint path, default to empty (API URL already includes /api/v2/chat)
let endpoint_path = config.custom_endpoint_path.as_deref().unwrap_or(""); let endpoint_path = config.custom_endpoint_path.as_deref().unwrap_or("");
let url = format!("{}{}", config.api_url.trim_end_matches('/'), endpoint_path); let url = format!(
"{base}{endpoint_path}",
base = config.api_url.trim_end_matches('/')
);
// Extract system message if present // Extract system message if present
let system_message = messages let system_message = messages
@ -177,7 +183,7 @@ impl OpenAiProvider {
.as_deref() .as_deref()
.unwrap_or("x-msi-genai-api-key"); .unwrap_or("x-msi-genai-api-key");
let auth_prefix = config.custom_auth_prefix.as_deref().unwrap_or(""); let auth_prefix = config.custom_auth_prefix.as_deref().unwrap_or("");
let auth_value = format!("{}{}", auth_prefix, config.api_key); let auth_value = format!("{auth_prefix}{api_key}", api_key = config.api_key);
let resp = client let resp = client
.post(&url) .post(&url)

View File

@ -97,9 +97,9 @@ mod tests {
for i in 0..5 { for i in 0..5 {
write_audit_event( write_audit_event(
&conn, &conn,
&format!("action_{}", i), &format!("action_{i}"),
"test", "test",
&format!("id_{}", i), &format!("id_{i}"),
"{}", "{}",
) )
.unwrap(); .unwrap();

View File

@ -295,19 +295,19 @@ pub async fn list_issues(
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = vec![]; let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = vec![];
if let Some(ref status) = filter.status { if let Some(ref status) = filter.status {
sql.push_str(&format!(" AND i.status = ?{}", params.len() + 1)); sql.push_str(&format!(" AND i.status = ?{idx}", idx = params.len() + 1));
params.push(Box::new(status.clone())); params.push(Box::new(status.clone()));
} }
if let Some(ref severity) = filter.severity { if let Some(ref severity) = filter.severity {
sql.push_str(&format!(" AND i.severity = ?{}", params.len() + 1)); sql.push_str(&format!(" AND i.severity = ?{idx}", idx = params.len() + 1));
params.push(Box::new(severity.clone())); params.push(Box::new(severity.clone()));
} }
if let Some(ref category) = filter.category { if let Some(ref category) = filter.category {
sql.push_str(&format!(" AND i.category = ?{}", params.len() + 1)); sql.push_str(&format!(" AND i.category = ?{idx}", idx = params.len() + 1));
params.push(Box::new(category.clone())); params.push(Box::new(category.clone()));
} }
if let Some(ref domain) = filter.domain { if let Some(ref domain) = filter.domain {
sql.push_str(&format!(" AND i.category = ?{}", params.len() + 1)); sql.push_str(&format!(" AND i.category = ?{idx}", idx = params.len() + 1));
params.push(Box::new(domain.clone())); params.push(Box::new(domain.clone()));
} }
if let Some(ref search) = filter.search { if let Some(ref search) = filter.search {

View File

@ -34,7 +34,7 @@ pub async fn generate_rca(
id: doc_id.clone(), id: doc_id.clone(),
issue_id: issue_id.clone(), issue_id: issue_id.clone(),
doc_type: "rca".to_string(), doc_type: "rca".to_string(),
title: format!("RCA: {}", issue_detail.issue.title), title: format!("RCA: {title}", title = issue_detail.issue.title),
content_md: content_md.clone(), content_md: content_md.clone(),
created_at: now.clone(), created_at: now.clone(),
updated_at: now, updated_at: now,
@ -93,7 +93,7 @@ pub async fn generate_postmortem(
id: doc_id.clone(), id: doc_id.clone(),
issue_id: issue_id.clone(), issue_id: issue_id.clone(),
doc_type: "postmortem".to_string(), doc_type: "postmortem".to_string(),
title: format!("Post-Mortem: {}", issue_detail.issue.title), title: format!("Post-Mortem: {title}", title = issue_detail.issue.title),
content_md: content_md.clone(), content_md: content_md.clone(),
created_at: now.clone(), created_at: now.clone(),
updated_at: now, updated_at: now,

View File

@ -94,7 +94,7 @@ pub async fn initiate_oauth(
let (mut callback_rx, shutdown_tx) = let (mut callback_rx, shutdown_tx) =
crate::integrations::callback_server::start_callback_server(8765) crate::integrations::callback_server::start_callback_server(8765)
.await .await
.map_err(|e| format!("Failed to start callback server: {}", e))?; .map_err(|e| format!("Failed to start callback server: {e}"))?;
// Store shutdown channel // Store shutdown channel
{ {
@ -166,7 +166,7 @@ pub async fn initiate_oauth(
{ {
let mut oauth_state = OAUTH_STATE let mut oauth_state = OAUTH_STATE
.lock() .lock()
.map_err(|e| format!("Failed to lock OAuth state: {}", e))?; .map_err(|e| format!("Failed to lock OAuth state: {e}"))?;
oauth_state.insert( oauth_state.insert(
state_key.clone(), state_key.clone(),
(service.clone(), pkce.code_verifier.clone()), (service.clone(), pkce.code_verifier.clone()),
@ -193,7 +193,7 @@ pub async fn initiate_oauth(
// ServiceNow uses basic auth, not OAuth2 // ServiceNow uses basic auth, not OAuth2
return Err("ServiceNow uses basic authentication, not OAuth2".to_string()); return Err("ServiceNow uses basic authentication, not OAuth2".to_string());
} }
_ => return Err(format!("Unknown service: {}", service)), _ => return Err(format!("Unknown service: {service}")),
}; };
let auth_url = crate::integrations::auth::build_auth_url( let auth_url = crate::integrations::auth::build_auth_url(
@ -231,7 +231,7 @@ async fn handle_oauth_callback_internal(
.unwrap_or_else(|_| "ado-client-id-placeholder".to_string()), .unwrap_or_else(|_| "ado-client-id-placeholder".to_string()),
"http://localhost:8765/callback", "http://localhost:8765/callback",
), ),
_ => return Err(format!("Unknown service: {}", service)), _ => return Err(format!("Unknown service: {service}")),
}; };
// Exchange authorization code for access token // Exchange authorization code for access token
@ -265,7 +265,7 @@ async fn handle_oauth_callback_internal(
let db = app_state let db = app_state
.db .db
.lock() .lock()
.map_err(|e| format!("Failed to lock database: {}", e))?; .map_err(|e| format!("Failed to lock database: {e}"))?;
db.execute( db.execute(
"INSERT OR REPLACE INTO credentials (id, service, token_hash, encrypted_token, created_at, expires_at) "INSERT OR REPLACE INTO credentials (id, service, token_hash, encrypted_token, created_at, expires_at)
@ -279,7 +279,7 @@ async fn handle_oauth_callback_internal(
expires_at, expires_at,
], ],
) )
.map_err(|e| format!("Failed to store credentials: {}", e))?; .map_err(|e| format!("Failed to store credentials: {e}"))?;
// Log audit event // Log audit event
let audit_details = serde_json::json!({ let audit_details = serde_json::json!({
@ -301,7 +301,7 @@ async fn handle_oauth_callback_internal(
audit_details.to_string(), audit_details.to_string(),
], ],
) )
.map_err(|e| format!("Failed to log audit event: {}", e))?; .map_err(|e| format!("Failed to log audit event: {e}"))?;
Ok(()) Ok(())
} }
@ -319,7 +319,7 @@ pub async fn handle_oauth_callback(
let verifier = { let verifier = {
let mut oauth_state = OAUTH_STATE let mut oauth_state = OAUTH_STATE
.lock() .lock()
.map_err(|e| format!("Failed to lock OAuth state: {}", e))?; .map_err(|e| format!("Failed to lock OAuth state: {e}"))?;
oauth_state oauth_state
.remove(&state_key) .remove(&state_key)
.map(|(_svc, ver)| ver) .map(|(_svc, ver)| ver)
@ -514,13 +514,13 @@ pub async fn authenticate_with_webview(
app_handle: tauri::AppHandle, app_handle: tauri::AppHandle,
app_state: State<'_, AppState>, app_state: State<'_, AppState>,
) -> Result<WebviewAuthResponse, String> { ) -> Result<WebviewAuthResponse, String> {
let webview_id = format!("{}-auth", service); let webview_id = format!("{service}-auth");
// Check if window already exists // Check if window already exists
if let Some(existing_label) = app_state if let Some(existing_label) = app_state
.integration_webviews .integration_webviews
.lock() .lock()
.map_err(|e| format!("Failed to lock webviews: {}", e))? .map_err(|e| format!("Failed to lock webviews: {e}"))?
.get(&service) .get(&service)
{ {
if app_handle.get_webview_window(existing_label).is_some() { if app_handle.get_webview_window(existing_label).is_some() {
@ -545,7 +545,7 @@ pub async fn authenticate_with_webview(
app_state app_state
.integration_webviews .integration_webviews
.lock() .lock()
.map_err(|e| format!("Failed to lock webviews: {}", e))? .map_err(|e| format!("Failed to lock webviews: {e}"))?
.insert(service.clone(), webview_id.clone()); .insert(service.clone(), webview_id.clone());
Ok(WebviewAuthResponse { Ok(WebviewAuthResponse {
@ -582,8 +582,8 @@ pub async fn extract_cookies_from_webview(
} }
// Encrypt and store cookies in database // Encrypt and store cookies in database
let cookies_json = serde_json::to_string(&cookies) let cookies_json =
.map_err(|e| format!("Failed to serialize cookies: {}", e))?; serde_json::to_string(&cookies).map_err(|e| format!("Failed to serialize cookies: {e}"))?;
let encrypted_cookies = crate::integrations::auth::encrypt_token(&cookies_json)?; let encrypted_cookies = crate::integrations::auth::encrypt_token(&cookies_json)?;
let token_hash = { let token_hash = {
@ -597,7 +597,7 @@ pub async fn extract_cookies_from_webview(
let db = app_state let db = app_state
.db .db
.lock() .lock()
.map_err(|e| format!("Failed to lock database: {}", e))?; .map_err(|e| format!("Failed to lock database: {e}"))?;
db.execute( db.execute(
"INSERT OR REPLACE INTO credentials (id, service, token_hash, encrypted_token, created_at, expires_at) "INSERT OR REPLACE INTO credentials (id, service, token_hash, encrypted_token, created_at, expires_at)
@ -611,18 +611,18 @@ pub async fn extract_cookies_from_webview(
None::<String>, // Cookies don't have explicit expiry None::<String>, // Cookies don't have explicit expiry
], ],
) )
.map_err(|e| format!("Failed to store cookies: {}", e))?; .map_err(|e| format!("Failed to store cookies: {e}"))?;
// Close the webview window // Close the webview window
if let Some(webview) = app_handle.get_webview_window(&webview_id) { if let Some(webview) = app_handle.get_webview_window(&webview_id) {
webview webview
.close() .close()
.map_err(|e| format!("Failed to close webview: {}", e))?; .map_err(|e| format!("Failed to close webview: {e}"))?;
} }
Ok(ConnectionResult { Ok(ConnectionResult {
success: true, success: true,
message: format!("{} authentication saved successfully", service), message: format!("{service} authentication saved successfully"),
}) })
} }
@ -669,7 +669,12 @@ pub async fn save_manual_token(
}; };
crate::integrations::servicenow::test_connection(&config).await crate::integrations::servicenow::test_connection(&config).await
} }
_ => return Err(format!("Unknown service: {}", request.service)), _ => {
return Err(format!(
"Unknown service: {service}",
service = request.service
))
}
}; };
// If test fails, don't save the token // If test fails, don't save the token
@ -698,7 +703,7 @@ pub async fn save_manual_token(
let db = app_state let db = app_state
.db .db
.lock() .lock()
.map_err(|e| format!("Failed to lock database: {}", e))?; .map_err(|e| format!("Failed to lock database: {e}"))?;
db.execute( db.execute(
"INSERT OR REPLACE INTO credentials (id, service, token_hash, encrypted_token, created_at, expires_at) "INSERT OR REPLACE INTO credentials (id, service, token_hash, encrypted_token, created_at, expires_at)
@ -712,7 +717,7 @@ pub async fn save_manual_token(
None::<String>, None::<String>,
], ],
) )
.map_err(|e| format!("Failed to store token: {}", e))?; .map_err(|e| format!("Failed to store token: {e}"))?;
// Log audit event // Log audit event
db.execute( db.execute(
@ -732,11 +737,14 @@ pub async fn save_manual_token(
.to_string(), .to_string(),
], ],
) )
.map_err(|e| format!("Failed to log audit event: {}", e))?; .map_err(|e| format!("Failed to log audit event: {e}"))?;
Ok(ConnectionResult { Ok(ConnectionResult {
success: true, success: true,
message: format!("{} token saved and validated successfully", request.service), message: format!(
"{service} token saved and validated successfully",
service = request.service
),
}) })
} }
@ -757,7 +765,7 @@ pub async fn get_fresh_cookies_from_webview(
let webviews = app_state let webviews = app_state
.integration_webviews .integration_webviews
.lock() .lock()
.map_err(|e| format!("Failed to lock webviews: {}", e))?; .map_err(|e| format!("Failed to lock webviews: {e}"))?;
match webviews.get(service) { match webviews.get(service) {
Some(label) => label.clone(), Some(label) => label.clone(),
@ -773,7 +781,7 @@ pub async fn get_fresh_cookies_from_webview(
app_state app_state
.integration_webviews .integration_webviews
.lock() .lock()
.map_err(|e| format!("Failed to lock webviews: {}", e))? .map_err(|e| format!("Failed to lock webviews: {e}"))?
.remove(service); .remove(service);
return Ok(None); return Ok(None);
} }
@ -814,7 +822,7 @@ pub async fn save_integration_config(
let db = app_state let db = app_state
.db .db
.lock() .lock()
.map_err(|e| format!("Failed to lock database: {}", e))?; .map_err(|e| format!("Failed to lock database: {e}"))?;
db.execute( db.execute(
"INSERT OR REPLACE INTO integration_config "INSERT OR REPLACE INTO integration_config
@ -829,7 +837,7 @@ pub async fn save_integration_config(
config.space_key, config.space_key,
], ],
) )
.map_err(|e| format!("Failed to save integration config: {}", e))?; .map_err(|e| format!("Failed to save integration config: {e}"))?;
Ok(()) Ok(())
} }
@ -843,11 +851,11 @@ pub async fn get_integration_config(
let db = app_state let db = app_state
.db .db
.lock() .lock()
.map_err(|e| format!("Failed to lock database: {}", e))?; .map_err(|e| format!("Failed to lock database: {e}"))?;
let mut stmt = db let mut stmt = db
.prepare("SELECT service, base_url, username, project_name, space_key FROM integration_config WHERE service = ?1") .prepare("SELECT service, base_url, username, project_name, space_key FROM integration_config WHERE service = ?1")
.map_err(|e| format!("Failed to prepare query: {}", e))?; .map_err(|e| format!("Failed to prepare query: {e}"))?;
let config = stmt let config = stmt
.query_row([&service], |row| { .query_row([&service], |row| {
@ -860,7 +868,7 @@ pub async fn get_integration_config(
}) })
}) })
.optional() .optional()
.map_err(|e| format!("Failed to query integration config: {}", e))?; .map_err(|e| format!("Failed to query integration config: {e}"))?;
Ok(config) Ok(config)
} }
@ -873,13 +881,13 @@ pub async fn get_all_integration_configs(
let db = app_state let db = app_state
.db .db
.lock() .lock()
.map_err(|e| format!("Failed to lock database: {}", e))?; .map_err(|e| format!("Failed to lock database: {e}"))?;
let mut stmt = db let mut stmt = db
.prepare( .prepare(
"SELECT service, base_url, username, project_name, space_key FROM integration_config", "SELECT service, base_url, username, project_name, space_key FROM integration_config",
) )
.map_err(|e| format!("Failed to prepare query: {}", e))?; .map_err(|e| format!("Failed to prepare query: {e}"))?;
let configs = stmt let configs = stmt
.query_map([], |row| { .query_map([], |row| {
@ -891,9 +899,9 @@ pub async fn get_all_integration_configs(
space_key: row.get(4)?, space_key: row.get(4)?,
}) })
}) })
.map_err(|e| format!("Failed to query integration configs: {}", e))? .map_err(|e| format!("Failed to query integration configs: {e}"))?
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
.map_err(|e| format!("Failed to collect integration configs: {}", e))?; .map_err(|e| format!("Failed to collect integration configs: {e}"))?;
Ok(configs) Ok(configs)
} }

View File

@ -98,20 +98,23 @@ pub async fn get_audit_log(
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = vec![]; let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = vec![];
if let Some(ref action) = filter.action { if let Some(ref action) = filter.action {
sql.push_str(&format!(" AND action = ?{}", params.len() + 1)); sql.push_str(&format!(" AND action = ?{idx}", idx = params.len() + 1));
params.push(Box::new(action.clone())); params.push(Box::new(action.clone()));
} }
if let Some(ref entity_type) = filter.entity_type { if let Some(ref entity_type) = filter.entity_type {
sql.push_str(&format!(" AND entity_type = ?{}", params.len() + 1)); sql.push_str(&format!(
" AND entity_type = ?{idx}",
idx = params.len() + 1
));
params.push(Box::new(entity_type.clone())); params.push(Box::new(entity_type.clone()));
} }
if let Some(ref entity_id) = filter.entity_id { if let Some(ref entity_id) = filter.entity_id {
sql.push_str(&format!(" AND entity_id = ?{}", params.len() + 1)); sql.push_str(&format!(" AND entity_id = ?{idx}", idx = params.len() + 1));
params.push(Box::new(entity_id.clone())); params.push(Box::new(entity_id.clone()));
} }
sql.push_str(" ORDER BY timestamp DESC"); sql.push_str(" ORDER BY timestamp DESC");
sql.push_str(&format!(" LIMIT ?{}", params.len() + 1)); sql.push_str(&format!(" LIMIT ?{idx}", idx = params.len() + 1));
params.push(Box::new(limit)); params.push(Box::new(limit));
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect(); let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();

View File

@ -88,7 +88,7 @@ pub async fn exchange_code(
.form(&params) .form(&params)
.send() .send()
.await .await
.map_err(|e| format!("Failed to send token exchange request: {}", e))?; .map_err(|e| format!("Failed to send token exchange request: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -101,7 +101,7 @@ pub async fn exchange_code(
let body: serde_json::Value = resp let body: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse token response: {}", e))?; .map_err(|e| format!("Failed to parse token response: {e}"))?;
let access_token = body["access_token"] let access_token = body["access_token"]
.as_str() .as_str()
@ -208,7 +208,7 @@ pub fn encrypt_token(token: &str) -> Result<String, String> {
// Encrypt // Encrypt
let ciphertext = cipher let ciphertext = cipher
.encrypt(nonce, token.as_bytes()) .encrypt(nonce, token.as_bytes())
.map_err(|e| format!("Encryption failed: {}", e))?; .map_err(|e| format!("Encryption failed: {e}"))?;
// Prepend nonce to ciphertext // Prepend nonce to ciphertext
let mut result = nonce_bytes.to_vec(); let mut result = nonce_bytes.to_vec();
@ -232,7 +232,7 @@ pub fn decrypt_token(encrypted: &str) -> Result<String, String> {
use base64::Engine; use base64::Engine;
let data = STANDARD let data = STANDARD
.decode(encrypted) .decode(encrypted)
.map_err(|e| format!("Base64 decode failed: {}", e))?; .map_err(|e| format!("Base64 decode failed: {e}"))?;
if data.len() < 12 { if data.len() < 12 {
return Err("Invalid encrypted data: too short".to_string()); return Err("Invalid encrypted data: too short".to_string());
@ -256,9 +256,9 @@ pub fn decrypt_token(encrypted: &str) -> Result<String, String> {
// Decrypt // Decrypt
let plaintext = cipher let plaintext = cipher
.decrypt(nonce, ciphertext) .decrypt(nonce, ciphertext)
.map_err(|e| format!("Decryption failed: {}", e))?; .map_err(|e| format!("Decryption failed: {e}"))?;
String::from_utf8(plaintext).map_err(|e| format!("Invalid UTF-8: {}", e)) String::from_utf8(plaintext).map_err(|e| format!("Invalid UTF-8: {e}"))
} }
#[cfg(test)] #[cfg(test)]

View File

@ -32,7 +32,7 @@ pub async fn test_connection(config: &AzureDevOpsConfig) -> Result<ConnectionRes
.bearer_auth(&config.access_token) .bearer_auth(&config.access_token)
.send() .send()
.await .await
.map_err(|e| format!("Connection failed: {}", e))?; .map_err(|e| format!("Connection failed: {e}"))?;
if resp.status().is_success() { if resp.status().is_success() {
Ok(ConnectionResult { Ok(ConnectionResult {
@ -42,7 +42,10 @@ pub async fn test_connection(config: &AzureDevOpsConfig) -> Result<ConnectionRes
} else { } else {
Ok(ConnectionResult { Ok(ConnectionResult {
success: false, success: false,
message: format!("Connection failed with status: {}", resp.status()), message: format!(
"Connection failed with status: {status}",
status = resp.status()
),
}) })
} }
} }
@ -74,7 +77,7 @@ pub async fn search_work_items(
.json(&body) .json(&body)
.send() .send()
.await .await
.map_err(|e| format!("WIQL query failed: {}", e))?; .map_err(|e| format!("WIQL query failed: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -87,7 +90,7 @@ pub async fn search_work_items(
let wiql_result: serde_json::Value = resp let wiql_result: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse WIQL response: {}", e))?; .map_err(|e| format!("Failed to parse WIQL response: {e}"))?;
let work_item_refs = wiql_result["workItems"] let work_item_refs = wiql_result["workItems"]
.as_array() .as_array()
@ -119,7 +122,7 @@ pub async fn search_work_items(
.bearer_auth(&config.access_token) .bearer_auth(&config.access_token)
.send() .send()
.await .await
.map_err(|e| format!("Failed to fetch work item details: {}", e))?; .map_err(|e| format!("Failed to fetch work item details: {e}"))?;
if !detail_resp.status().is_success() { if !detail_resp.status().is_success() {
return Err(format!( return Err(format!(
@ -131,7 +134,7 @@ pub async fn search_work_items(
let details: serde_json::Value = detail_resp let details: serde_json::Value = detail_resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse work item details: {}", e))?; .map_err(|e| format!("Failed to parse work item details: {e}"))?;
let work_items = details["value"] let work_items = details["value"]
.as_array() .as_array()
@ -199,7 +202,7 @@ pub async fn create_work_item(
.json(&operations) .json(&operations)
.send() .send()
.await .await
.map_err(|e| format!("Failed to create work item: {}", e))?; .map_err(|e| format!("Failed to create work item: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -212,7 +215,7 @@ pub async fn create_work_item(
let result: serde_json::Value = resp let result: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let work_item_id = result["id"].as_i64().unwrap_or(0); let work_item_id = result["id"].as_i64().unwrap_or(0);
let work_item_url = format!( let work_item_url = format!(
@ -223,7 +226,7 @@ pub async fn create_work_item(
Ok(TicketResult { Ok(TicketResult {
id: work_item_id.to_string(), id: work_item_id.to_string(),
ticket_number: format!("#{}", work_item_id), ticket_number: format!("#{work_item_id}"),
url: work_item_url, url: work_item_url,
}) })
} }
@ -246,7 +249,7 @@ pub async fn get_work_item(
.bearer_auth(&config.access_token) .bearer_auth(&config.access_token)
.send() .send()
.await .await
.map_err(|e| format!("Failed to get work item: {}", e))?; .map_err(|e| format!("Failed to get work item: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -259,7 +262,7 @@ pub async fn get_work_item(
let result: serde_json::Value = resp let result: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
Ok(WorkItem { Ok(WorkItem {
id: result["id"] id: result["id"]
@ -305,7 +308,7 @@ pub async fn update_work_item(
.json(&updates) .json(&updates)
.send() .send()
.await .await
.map_err(|e| format!("Failed to update work item: {}", e))?; .map_err(|e| format!("Failed to update work item: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -318,7 +321,7 @@ pub async fn update_work_item(
let result: serde_json::Value = resp let result: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let updated_work_item_id = result["id"].as_i64().unwrap_or(work_item_id); let updated_work_item_id = result["id"].as_i64().unwrap_or(work_item_id);
let work_item_url = format!( let work_item_url = format!(
@ -329,7 +332,7 @@ pub async fn update_work_item(
Ok(TicketResult { Ok(TicketResult {
id: updated_work_item_id.to_string(), id: updated_work_item_id.to_string(),
ticket_number: format!("#{}", updated_work_item_id), ticket_number: format!("#{updated_work_item_id}"),
url: work_item_url, url: work_item_url,
}) })
} }

View File

@ -35,7 +35,7 @@ pub async fn test_connection(config: &ConfluenceConfig) -> Result<ConnectionResu
.bearer_auth(&config.access_token) .bearer_auth(&config.access_token)
.send() .send()
.await .await
.map_err(|e| format!("Connection failed: {}", e))?; .map_err(|e| format!("Connection failed: {e}"))?;
if resp.status().is_success() { if resp.status().is_success() {
Ok(ConnectionResult { Ok(ConnectionResult {
@ -45,7 +45,10 @@ pub async fn test_connection(config: &ConfluenceConfig) -> Result<ConnectionResu
} else { } else {
Ok(ConnectionResult { Ok(ConnectionResult {
success: false, success: false,
message: format!("Connection failed with status: {}", resp.status()), message: format!(
"Connection failed with status: {status}",
status = resp.status()
),
}) })
} }
} }
@ -61,7 +64,7 @@ pub async fn list_spaces(config: &ConfluenceConfig) -> Result<Vec<Space>, String
.query(&[("limit", "100")]) .query(&[("limit", "100")])
.send() .send()
.await .await
.map_err(|e| format!("Failed to list spaces: {}", e))?; .map_err(|e| format!("Failed to list spaces: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -74,7 +77,7 @@ pub async fn list_spaces(config: &ConfluenceConfig) -> Result<Vec<Space>, String
let body: serde_json::Value = resp let body: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let spaces = body["results"] let spaces = body["results"]
.as_array() .as_array()
@ -105,7 +108,7 @@ pub async fn search_pages(
let mut cql = format!("text ~ \"{}\"", query); let mut cql = format!("text ~ \"{}\"", query);
if let Some(space) = space_key { if let Some(space) = space_key {
cql = format!("{} AND space = {}", cql, space); cql = format!("{cql} AND space = {space}");
} }
let resp = client let resp = client
@ -114,7 +117,7 @@ pub async fn search_pages(
.query(&[("cql", &cql), ("limit", &"50".to_string())]) .query(&[("cql", &cql), ("limit", &"50".to_string())])
.send() .send()
.await .await
.map_err(|e| format!("Search failed: {}", e))?; .map_err(|e| format!("Search failed: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -127,7 +130,7 @@ pub async fn search_pages(
let body: serde_json::Value = resp let body: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let pages = body["results"] let pages = body["results"]
.as_array() .as_array()
@ -140,7 +143,7 @@ pub async fn search_pages(
id: page_id.to_string(), id: page_id.to_string(),
title: p["title"].as_str()?.to_string(), title: p["title"].as_str()?.to_string(),
space_key: p["space"]["key"].as_str()?.to_string(), space_key: p["space"]["key"].as_str()?.to_string(),
url: format!("{}/pages/viewpage.action?pageId={}", base_url, page_id), url: format!("{base_url}/pages/viewpage.action?pageId={page_id}"),
}) })
}) })
.collect(); .collect();
@ -182,7 +185,7 @@ pub async fn publish_page(
.json(&body) .json(&body)
.send() .send()
.await .await
.map_err(|e| format!("Failed to publish page: {}", e))?; .map_err(|e| format!("Failed to publish page: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -195,7 +198,7 @@ pub async fn publish_page(
let result: serde_json::Value = resp let result: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let page_id = result["id"].as_str().unwrap_or(""); let page_id = result["id"].as_str().unwrap_or("");
let page_url = format!( let page_url = format!(
@ -245,7 +248,7 @@ pub async fn update_page(
.json(&body) .json(&body)
.send() .send()
.await .await
.map_err(|e| format!("Failed to update page: {}", e))?; .map_err(|e| format!("Failed to update page: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -258,7 +261,7 @@ pub async fn update_page(
let result: serde_json::Value = resp let result: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let updated_page_id = result["id"].as_str().unwrap_or(page_id); let updated_page_id = result["id"].as_str().unwrap_or(page_id);
let page_url = format!( let page_url = format!(

View File

@ -34,7 +34,7 @@ pub async fn test_connection(config: &ServiceNowConfig) -> Result<ConnectionResu
.query(&[("sysparm_limit", "1")]) .query(&[("sysparm_limit", "1")])
.send() .send()
.await .await
.map_err(|e| format!("Connection failed: {}", e))?; .map_err(|e| format!("Connection failed: {e}"))?;
if resp.status().is_success() { if resp.status().is_success() {
Ok(ConnectionResult { Ok(ConnectionResult {
@ -44,7 +44,10 @@ pub async fn test_connection(config: &ServiceNowConfig) -> Result<ConnectionResu
} else { } else {
Ok(ConnectionResult { Ok(ConnectionResult {
success: false, success: false,
message: format!("Connection failed with status: {}", resp.status()), message: format!(
"Connection failed with status: {status}",
status = resp.status()
),
}) })
} }
} }
@ -60,7 +63,7 @@ pub async fn search_incidents(
config.instance_url.trim_end_matches('/') config.instance_url.trim_end_matches('/')
); );
let sysparm_query = format!("short_descriptionLIKE{}", query); let sysparm_query = format!("short_descriptionLIKE{query}");
let resp = client let resp = client
.get(&url) .get(&url)
@ -71,7 +74,7 @@ pub async fn search_incidents(
]) ])
.send() .send()
.await .await
.map_err(|e| format!("Search failed: {}", e))?; .map_err(|e| format!("Search failed: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -84,7 +87,7 @@ pub async fn search_incidents(
let body: serde_json::Value = resp let body: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let incidents = body["result"] let incidents = body["result"]
.as_array() .as_array()
@ -134,7 +137,7 @@ pub async fn create_incident(
.json(&body) .json(&body)
.send() .send()
.await .await
.map_err(|e| format!("Failed to create incident: {}", e))?; .map_err(|e| format!("Failed to create incident: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -147,7 +150,7 @@ pub async fn create_incident(
let result: serde_json::Value = resp let result: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let incident_number = result["result"]["number"].as_str().unwrap_or(""); let incident_number = result["result"]["number"].as_str().unwrap_or("");
let sys_id = result["result"]["sys_id"].as_str().unwrap_or(""); let sys_id = result["result"]["sys_id"].as_str().unwrap_or("");
@ -198,13 +201,13 @@ pub async fn get_incident(
.basic_auth(&config.username, Some(&config.password)); .basic_auth(&config.username, Some(&config.password));
if use_query { if use_query {
request = request.query(&[("sysparm_query", &format!("number={}", incident_id))]); request = request.query(&[("sysparm_query", &format!("number={incident_id}"))]);
} }
let resp = request let resp = request
.send() .send()
.await .await
.map_err(|e| format!("Failed to get incident: {}", e))?; .map_err(|e| format!("Failed to get incident: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -217,7 +220,7 @@ pub async fn get_incident(
let body: serde_json::Value = resp let body: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let incident_data = if use_query { let incident_data = if use_query {
// Query response has "result" array // Query response has "result" array
@ -273,7 +276,7 @@ pub async fn update_incident(
.json(&updates) .json(&updates)
.send() .send()
.await .await
.map_err(|e| format!("Failed to update incident: {}", e))?; .map_err(|e| format!("Failed to update incident: {e}"))?;
if !resp.status().is_success() { if !resp.status().is_success() {
return Err(format!( return Err(format!(
@ -286,7 +289,7 @@ pub async fn update_incident(
let result: serde_json::Value = resp let result: serde_json::Value = resp
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse response: {}", e))?; .map_err(|e| format!("Failed to parse response: {e}"))?;
let incident_number = result["result"]["number"].as_str().unwrap_or(""); let incident_number = result["result"]["number"].as_str().unwrap_or("");
let updated_sys_id = result["result"]["sys_id"].as_str().unwrap_or(sys_id); let updated_sys_id = result["result"]["sys_id"].as_str().unwrap_or(sys_id);

View File

@ -32,7 +32,7 @@ pub async fn authenticate_with_webview(
format!("{}/_signin", base_url.trim_end_matches('/')) format!("{}/_signin", base_url.trim_end_matches('/'))
} }
"servicenow" => format!("{}/login.do", base_url.trim_end_matches('/')), "servicenow" => format!("{}/login.do", base_url.trim_end_matches('/')),
_ => return Err(format!("Unknown service: {}", service)), _ => return Err(format!("Unknown service: {service}")),
}; };
tracing::info!( tracing::info!(
@ -42,17 +42,13 @@ pub async fn authenticate_with_webview(
); );
// Create persistent browser window (stays open for browsing and fresh cookie extraction) // Create persistent browser window (stays open for browsing and fresh cookie extraction)
let webview_label = format!("{}-auth", service); let webview_label = format!("{service}-auth");
let webview = WebviewWindowBuilder::new( let webview = WebviewWindowBuilder::new(
&app_handle, &app_handle,
&webview_label, &webview_label,
WebviewUrl::External( WebviewUrl::External(login_url.parse().map_err(|e| format!("Invalid URL: {e}"))?),
login_url
.parse()
.map_err(|e| format!("Invalid URL: {}", e))?,
),
) )
.title(format!("{} Browser (TFTSR)", service)) .title(format!("{service} Browser (TFTSR)"))
.inner_size(1000.0, 800.0) .inner_size(1000.0, 800.0)
.min_inner_size(800.0, 600.0) .min_inner_size(800.0, 600.0)
.resizable(true) .resizable(true)
@ -60,7 +56,7 @@ pub async fn authenticate_with_webview(
.focused(true) .focused(true)
.visible(true) .visible(true)
.build() .build()
.map_err(|e| format!("Failed to create webview: {}", e))?; .map_err(|e| format!("Failed to create webview: {e}"))?;
// Focus the window // Focus the window
webview webview
@ -147,7 +143,7 @@ pub async fn extract_cookies_via_ipc<R: tauri::Runtime>(
match serde_json::from_str::<serde_json::Value>(payload_str) { match serde_json::from_str::<serde_json::Value>(payload_str) {
Ok(payload) => { Ok(payload) => {
if let Some(error_msg) = payload.get("error").and_then(|e| e.as_str()) { if let Some(error_msg) = payload.get("error").and_then(|e| e.as_str()) {
let _ = tx.try_send(Err(format!("JavaScript error: {}", error_msg))); let _ = tx.try_send(Err(format!("JavaScript error: {error_msg}")));
return; return;
} }
@ -159,7 +155,7 @@ pub async fn extract_cookies_via_ipc<R: tauri::Runtime>(
} }
Err(e) => { Err(e) => {
tracing::error!("Failed to parse cookies: {}", e); tracing::error!("Failed to parse cookies: {}", e);
let _ = tx.try_send(Err(format!("Failed to parse cookies: {}", e))); let _ = tx.try_send(Err(format!("Failed to parse cookies: {e}")));
} }
} }
} else { } else {
@ -168,7 +164,7 @@ pub async fn extract_cookies_via_ipc<R: tauri::Runtime>(
} }
Err(e) => { Err(e) => {
tracing::error!("Failed to parse event payload: {}", e); tracing::error!("Failed to parse event payload: {}", e);
let _ = tx.try_send(Err(format!("Failed to parse event payload: {}", e))); let _ = tx.try_send(Err(format!("Failed to parse event payload: {e}")));
} }
} }
}); });
@ -176,7 +172,7 @@ pub async fn extract_cookies_via_ipc<R: tauri::Runtime>(
// Inject the script into the webview // Inject the script into the webview
webview_window webview_window
.eval(cookie_extraction_script) .eval(cookie_extraction_script)
.map_err(|e| format!("Failed to inject cookie extraction script: {}", e))?; .map_err(|e| format!("Failed to inject cookie extraction script: {e}"))?;
tracing::info!("Cookie extraction script injected, waiting for response..."); tracing::info!("Cookie extraction script injected, waiting for response...");
@ -199,7 +195,7 @@ pub async fn extract_cookies_via_ipc<R: tauri::Runtime>(
pub fn cookies_to_header(cookies: &[Cookie]) -> String { pub fn cookies_to_header(cookies: &[Cookie]) -> String {
cookies cookies
.iter() .iter()
.map(|c| format!("{}={}", c.name, c.value)) .map(|c| format!("{name}={value}", name = c.name, value = c.value))
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join("; ") .join("; ")
} }