feat: support GenAI datastore file uploads and fix paste image upload
Some checks failed
Test / frontend-tests (pull_request) Successful in 59s
Test / frontend-typecheck (pull_request) Successful in 1m5s
Test / rust-fmt-check (pull_request) Failing after 2m25s
Test / rust-clippy (pull_request) Failing after 18m25s
Test / rust-tests (pull_request) Successful in 19m42s
Some checks failed
Test / frontend-tests (pull_request) Successful in 59s
Test / frontend-typecheck (pull_request) Successful in 1m5s
Test / rust-fmt-check (pull_request) Failing after 2m25s
Test / rust-clippy (pull_request) Failing after 18m25s
Test / rust-tests (pull_request) Successful in 19m42s
- Add use_datastore_upload field to ProviderConfig for enabling datastore uploads - Add upload_file_to_datastore and upload_file_to_datastore_any commands - Add upload_log_file_by_content and upload_image_attachment_by_content commands for drag-and-drop without file paths - Add multipart/form-data support for file uploads to GenAI datastore - Add support for image/bmp MIME type in image validation - Add x-generic-api-key header support for GenAI API authentication This addresses: - Paste fails to attach screenshot (clipboard) - File upload fails with 500 error when using GenAI API - GenAI datastore upload endpoint support for non-text files
This commit is contained in:
parent
859d7a0da8
commit
420411882e
@ -21,7 +21,7 @@ rusqlite = { version = "0.31", features = ["bundled-sqlcipher-vendored-openssl"]
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
reqwest = { version = "0.12", features = ["json", "stream", "multipart"] }
|
||||
regex = "1"
|
||||
aho-corasick = "1"
|
||||
uuid = { version = "1", features = ["v7"] }
|
||||
|
||||
@ -97,6 +97,72 @@ pub async fn upload_log_file(
|
||||
Ok(log_file)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn upload_log_file_by_content(
|
||||
issue_id: String,
|
||||
file_name: String,
|
||||
content: String,
|
||||
state: State<'_, AppState>,
|
||||
) -> Result<LogFile, String> {
|
||||
let content_bytes = content.as_bytes();
|
||||
let content_hash = format!("{:x}", Sha256::digest(content_bytes));
|
||||
let file_size = content_bytes.len() as i64;
|
||||
|
||||
// Determine mime type based on file extension
|
||||
let mime_type = if file_name.ends_with(".json") {
|
||||
"application/json"
|
||||
} else if file_name.ends_with(".xml") {
|
||||
"application/xml"
|
||||
} else {
|
||||
"text/plain"
|
||||
};
|
||||
|
||||
// Use the file_name as the file_path for DB storage
|
||||
let log_file = LogFile::new(issue_id.clone(), file_name.clone(), file_name.clone(), file_size);
|
||||
let log_file = LogFile {
|
||||
content_hash: content_hash.clone(),
|
||||
mime_type: mime_type.to_string(),
|
||||
..log_file
|
||||
};
|
||||
|
||||
let db = state.db.lock().map_err(|e| e.to_string())?;
|
||||
db.execute(
|
||||
"INSERT INTO log_files (id, issue_id, file_name, file_path, file_size, mime_type, content_hash, uploaded_at, redacted) \
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)",
|
||||
rusqlite::params![
|
||||
log_file.id,
|
||||
log_file.issue_id,
|
||||
log_file.file_name,
|
||||
log_file.file_path,
|
||||
log_file.file_size,
|
||||
log_file.mime_type,
|
||||
log_file.content_hash,
|
||||
log_file.uploaded_at,
|
||||
log_file.redacted as i32,
|
||||
],
|
||||
)
|
||||
.map_err(|_| "Failed to store uploaded log metadata".to_string())?;
|
||||
|
||||
// Audit
|
||||
let entry = AuditEntry::new(
|
||||
"upload_log_file".to_string(),
|
||||
"log_file".to_string(),
|
||||
log_file.id.clone(),
|
||||
serde_json::json!({ "issue_id": issue_id, "file_name": log_file.file_name }).to_string(),
|
||||
);
|
||||
if let Err(err) = crate::audit::log::write_audit_event(
|
||||
&db,
|
||||
&entry.action,
|
||||
&entry.entity_type,
|
||||
&entry.entity_id,
|
||||
&entry.details,
|
||||
) {
|
||||
warn!(error = %err, "failed to write upload_log_file audit entry");
|
||||
}
|
||||
|
||||
Ok(log_file)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn detect_pii(
|
||||
log_file_id: String,
|
||||
|
||||
@ -8,12 +8,13 @@ use crate::db::models::{AuditEntry, ImageAttachment};
|
||||
use crate::state::AppState;
|
||||
|
||||
const MAX_IMAGE_FILE_BYTES: u64 = 10 * 1024 * 1024;
|
||||
const SUPPORTED_IMAGE_MIME_TYPES: [&str; 5] = [
|
||||
const SUPPORTED_IMAGE_MIME_TYPES: [&str; 6] = [
|
||||
"image/png",
|
||||
"image/jpeg",
|
||||
"image/gif",
|
||||
"image/webp",
|
||||
"image/svg+xml",
|
||||
"image/bmp",
|
||||
];
|
||||
|
||||
fn validate_image_file_path(file_path: &str) -> Result<std::path::PathBuf, String> {
|
||||
@ -122,6 +123,92 @@ pub async fn upload_image_attachment(
|
||||
Ok(attachment)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn upload_image_attachment_by_content(
|
||||
issue_id: String,
|
||||
file_name: String,
|
||||
base64_content: String,
|
||||
state: State<'_, AppState>,
|
||||
) -> Result<ImageAttachment, String> {
|
||||
let data_part = base64_content
|
||||
.split(',')
|
||||
.nth(1)
|
||||
.ok_or("Invalid image data format - missing base64 content")?;
|
||||
|
||||
let decoded = base64::engine::general_purpose::STANDARD
|
||||
.decode(data_part)
|
||||
.map_err(|_| "Failed to decode base64 image data")?;
|
||||
|
||||
let content_hash = format!("{:x}", sha2::Sha256::digest(&decoded));
|
||||
let file_size = decoded.len() as i64;
|
||||
|
||||
let mime_type: String = infer::get(&decoded)
|
||||
.map(|m| m.mime_type().to_string())
|
||||
.unwrap_or_else(|| "image/png".to_string());
|
||||
|
||||
if !is_supported_image_format(mime_type.as_str()) {
|
||||
return Err(format!(
|
||||
"Unsupported image format: {}. Supported formats: {}",
|
||||
mime_type,
|
||||
SUPPORTED_IMAGE_MIME_TYPES.join(", ")
|
||||
));
|
||||
}
|
||||
|
||||
// Use the file_name as file_path for DB storage
|
||||
let attachment = ImageAttachment::new(
|
||||
issue_id.clone(),
|
||||
file_name.clone(),
|
||||
file_name,
|
||||
file_size,
|
||||
mime_type,
|
||||
content_hash.clone(),
|
||||
true,
|
||||
false,
|
||||
);
|
||||
|
||||
let db = state.db.lock().map_err(|e| e.to_string())?;
|
||||
db.execute(
|
||||
"INSERT INTO image_attachments (id, issue_id, file_name, file_path, file_size, mime_type, upload_hash, uploaded_at, pii_warning_acknowledged, is_paste) \
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)",
|
||||
rusqlite::params![
|
||||
attachment.id,
|
||||
attachment.issue_id,
|
||||
attachment.file_name,
|
||||
attachment.file_path,
|
||||
attachment.file_size,
|
||||
attachment.mime_type,
|
||||
attachment.upload_hash,
|
||||
attachment.uploaded_at,
|
||||
attachment.pii_warning_acknowledged as i32,
|
||||
attachment.is_paste as i32,
|
||||
],
|
||||
)
|
||||
.map_err(|_| "Failed to store uploaded image metadata".to_string())?;
|
||||
|
||||
let entry = AuditEntry::new(
|
||||
"upload_image_attachment".to_string(),
|
||||
"image_attachment".to_string(),
|
||||
attachment.id.clone(),
|
||||
serde_json::json!({
|
||||
"issue_id": issue_id,
|
||||
"file_name": attachment.file_name,
|
||||
"is_paste": false,
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
if let Err(err) = write_audit_event(
|
||||
&db,
|
||||
&entry.action,
|
||||
&entry.entity_type,
|
||||
&entry.entity_id,
|
||||
&entry.details,
|
||||
) {
|
||||
tracing::warn!(error = %err, "failed to write upload_image_attachment audit entry");
|
||||
}
|
||||
|
||||
Ok(attachment)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn upload_paste_image(
|
||||
issue_id: String,
|
||||
@ -265,6 +352,227 @@ pub async fn delete_image_attachment(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn upload_file_to_datastore(
|
||||
provider_config: serde_json::Value,
|
||||
file_path: String,
|
||||
_state: State<'_, AppState>,
|
||||
) -> Result<String, String> {
|
||||
use reqwest::multipart::Form;
|
||||
|
||||
let canonical_path = validate_image_file_path(&file_path)?;
|
||||
let content = std::fs::read(&canonical_path)
|
||||
.map_err(|_| "Failed to read file for datastore upload")?;
|
||||
|
||||
let file_name = canonical_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
let _file_size = content.len() as i64;
|
||||
|
||||
// Extract API URL and auth header from provider config
|
||||
let api_url = provider_config
|
||||
.get("api_url")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Provider config missing api_url")?
|
||||
.to_string();
|
||||
|
||||
// Extract use_datastore_upload flag
|
||||
let use_datastore = provider_config
|
||||
.get("use_datastore_upload")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !use_datastore {
|
||||
return Err("use_datastore_upload is not enabled for this provider".to_string());
|
||||
}
|
||||
|
||||
// Get datastore ID from custom_endpoint_path (stored as datastore ID)
|
||||
let datastore_id = provider_config
|
||||
.get("custom_endpoint_path")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Provider config missing datastore ID in custom_endpoint_path")?
|
||||
.to_string();
|
||||
|
||||
// Build upload endpoint: POST /api/v2/upload/<DATASTORE-ID>
|
||||
let api_url = api_url.trim_end_matches('/');
|
||||
let upload_url = format!("{}/upload/{}", api_url, datastore_id);
|
||||
|
||||
// Read auth header and value
|
||||
let auth_header = provider_config
|
||||
.get("custom_auth_header")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("x-generic-api-key");
|
||||
|
||||
let auth_prefix = provider_config
|
||||
.get("custom_auth_prefix")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
let api_key = provider_config
|
||||
.get("api_key")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Provider config missing api_key")?;
|
||||
|
||||
let auth_value = format!("{}{}", auth_prefix, api_key);
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Create multipart form
|
||||
let part = reqwest::multipart::Part::bytes(content)
|
||||
.file_name(file_name)
|
||||
.mime_str("application/octet-stream")
|
||||
.map_err(|e| format!("Failed to create multipart part: {}", e))?;
|
||||
|
||||
let form = Form::new()
|
||||
.part("file", part);
|
||||
|
||||
let resp = client
|
||||
.post(&upload_url)
|
||||
.header(auth_header, auth_value)
|
||||
.multipart(form)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Upload request failed: {}", e))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let text = resp.text().await.unwrap_or_else(|_| "unable to read response".to_string());
|
||||
return Err(format!("Datastore upload error {}: {}", status, text));
|
||||
}
|
||||
|
||||
// Parse response to get file ID
|
||||
let json = resp.json::<serde_json::Value>()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse upload response: {}", e))?;
|
||||
|
||||
// Response should have file_id or id field
|
||||
let file_id = json.get("file_id")
|
||||
.or_else(|| json.get("id"))
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| format!("Response missing file_id: {}", serde_json::to_string_pretty(&json).unwrap_or_default()))?
|
||||
.to_string();
|
||||
|
||||
Ok(file_id)
|
||||
}
|
||||
|
||||
/// Upload any file (not just images) to GenAI datastore
|
||||
#[tauri::command]
|
||||
pub async fn upload_file_to_datastore_any(
|
||||
provider_config: serde_json::Value,
|
||||
file_path: String,
|
||||
_state: State<'_, AppState>,
|
||||
) -> Result<String, String> {
|
||||
use reqwest::multipart::Form;
|
||||
|
||||
// Validate file exists and is accessible
|
||||
let path = Path::new(&file_path);
|
||||
let canonical = std::fs::canonicalize(path).map_err(|_| "Unable to access selected file")?;
|
||||
let metadata = std::fs::metadata(&canonical).map_err(|_| "Unable to read file metadata")?;
|
||||
|
||||
if !metadata.is_file() {
|
||||
return Err("Selected path is not a file".to_string());
|
||||
}
|
||||
|
||||
let content = std::fs::read(&canonical)
|
||||
.map_err(|_| "Failed to read file for datastore upload")?;
|
||||
|
||||
let file_name = canonical
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
let _file_size = content.len() as i64;
|
||||
|
||||
// Extract API URL and auth header from provider config
|
||||
let api_url = provider_config
|
||||
.get("api_url")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Provider config missing api_url")?
|
||||
.to_string();
|
||||
|
||||
// Extract use_datastore_upload flag
|
||||
let use_datastore = provider_config
|
||||
.get("use_datastore_upload")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !use_datastore {
|
||||
return Err("use_datastore_upload is not enabled for this provider".to_string());
|
||||
}
|
||||
|
||||
// Get datastore ID from custom_endpoint_path (stored as datastore ID)
|
||||
let datastore_id = provider_config
|
||||
.get("custom_endpoint_path")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Provider config missing datastore ID in custom_endpoint_path")?
|
||||
.to_string();
|
||||
|
||||
// Build upload endpoint: POST /api/v2/upload/<DATASTORE-ID>
|
||||
let api_url = api_url.trim_end_matches('/');
|
||||
let upload_url = format!("{}/upload/{}", api_url, datastore_id);
|
||||
|
||||
// Read auth header and value
|
||||
let auth_header = provider_config
|
||||
.get("custom_auth_header")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("x-generic-api-key");
|
||||
|
||||
let auth_prefix = provider_config
|
||||
.get("custom_auth_prefix")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
let api_key = provider_config
|
||||
.get("api_key")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or("Provider config missing api_key")?;
|
||||
|
||||
let auth_value = format!("{}{}", auth_prefix, api_key);
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Create multipart form
|
||||
let part = reqwest::multipart::Part::bytes(content)
|
||||
.file_name(file_name)
|
||||
.mime_str("application/octet-stream")
|
||||
.map_err(|e| format!("Failed to create multipart part: {}", e))?;
|
||||
|
||||
let form = Form::new()
|
||||
.part("file", part);
|
||||
|
||||
let resp = client
|
||||
.post(&upload_url)
|
||||
.header(auth_header, auth_value)
|
||||
.multipart(form)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Upload request failed: {}", e))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let text = resp.text().await.unwrap_or_else(|_| "unable to read response".to_string());
|
||||
return Err(format!("Datastore upload error {}: {}", status, text));
|
||||
}
|
||||
|
||||
// Parse response to get file ID
|
||||
let json = resp.json::<serde_json::Value>()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse upload response: {}", e))?;
|
||||
|
||||
// Response should have file_id or id field
|
||||
let file_id = json.get("file_id")
|
||||
.or_else(|| json.get("id"))
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| format!("Response missing file_id: {}", serde_json::to_string_pretty(&json).unwrap_or_default()))?
|
||||
.to_string();
|
||||
|
||||
Ok(file_id)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -276,7 +584,7 @@ mod tests {
|
||||
assert!(is_supported_image_format("image/gif"));
|
||||
assert!(is_supported_image_format("image/webp"));
|
||||
assert!(is_supported_image_format("image/svg+xml"));
|
||||
assert!(!is_supported_image_format("image/bmp"));
|
||||
assert!(is_supported_image_format("image/bmp"));
|
||||
assert!(!is_supported_image_format("text/plain"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -158,8 +158,8 @@ pub async fn save_ai_provider(
|
||||
db.execute(
|
||||
"INSERT OR REPLACE INTO ai_providers
|
||||
(id, name, provider_type, api_url, encrypted_api_key, model, max_tokens, temperature,
|
||||
custom_endpoint_path, custom_auth_header, custom_auth_prefix, api_format, user_id, updated_at)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, datetime('now'))",
|
||||
custom_endpoint_path, custom_auth_header, custom_auth_prefix, api_format, user_id, use_datastore_upload, updated_at)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, datetime('now'))",
|
||||
rusqlite::params![
|
||||
uuid::Uuid::now_v7().to_string(),
|
||||
provider.name,
|
||||
@ -174,6 +174,7 @@ pub async fn save_ai_provider(
|
||||
provider.custom_auth_prefix,
|
||||
provider.api_format,
|
||||
provider.user_id,
|
||||
provider.use_datastore_upload,
|
||||
],
|
||||
)
|
||||
.map_err(|e| format!("Failed to save AI provider: {e}"))?;
|
||||
@ -191,7 +192,7 @@ pub async fn load_ai_providers(
|
||||
let mut stmt = db
|
||||
.prepare(
|
||||
"SELECT name, provider_type, api_url, encrypted_api_key, model, max_tokens, temperature,
|
||||
custom_endpoint_path, custom_auth_header, custom_auth_prefix, api_format, user_id
|
||||
custom_endpoint_path, custom_auth_header, custom_auth_prefix, api_format, user_id, use_datastore_upload
|
||||
FROM ai_providers
|
||||
ORDER BY name",
|
||||
)
|
||||
@ -201,38 +202,40 @@ pub async fn load_ai_providers(
|
||||
.query_map([], |row| {
|
||||
let encrypted_key: String = row.get(3)?;
|
||||
|
||||
Ok((
|
||||
row.get::<_, String>(0)?, // name
|
||||
row.get::<_, String>(1)?, // provider_type
|
||||
row.get::<_, String>(2)?, // api_url
|
||||
encrypted_key, // encrypted_api_key
|
||||
row.get::<_, String>(4)?, // model
|
||||
row.get::<_, Option<u32>>(5)?, // max_tokens
|
||||
row.get::<_, Option<f64>>(6)?, // temperature
|
||||
row.get::<_, Option<String>>(7)?, // custom_endpoint_path
|
||||
row.get::<_, Option<String>>(8)?, // custom_auth_header
|
||||
row.get::<_, Option<String>>(9)?, // custom_auth_prefix
|
||||
row.get::<_, Option<String>>(10)?, // api_format
|
||||
row.get::<_, Option<String>>(11)?, // user_id
|
||||
))
|
||||
})
|
||||
.map_err(|e| e.to_string())?
|
||||
.filter_map(|r| r.ok())
|
||||
.filter_map(
|
||||
|(
|
||||
name,
|
||||
provider_type,
|
||||
api_url,
|
||||
encrypted_key,
|
||||
model,
|
||||
max_tokens,
|
||||
temperature,
|
||||
custom_endpoint_path,
|
||||
custom_auth_header,
|
||||
custom_auth_prefix,
|
||||
api_format,
|
||||
user_id,
|
||||
)| {
|
||||
Ok((
|
||||
row.get::<_, String>(0)?, // name
|
||||
row.get::<_, String>(1)?, // provider_type
|
||||
row.get::<_, String>(2)?, // api_url
|
||||
encrypted_key, // encrypted_api_key
|
||||
row.get::<_, String>(4)?, // model
|
||||
row.get::<_, Option<u32>>(5)?, // max_tokens
|
||||
row.get::<_, Option<f64>>(6)?, // temperature
|
||||
row.get::<_, Option<String>>(7)?, // custom_endpoint_path
|
||||
row.get::<_, Option<String>>(8)?, // custom_auth_header
|
||||
row.get::<_, Option<String>>(9)?, // custom_auth_prefix
|
||||
row.get::<_, Option<String>>(10)?, // api_format
|
||||
row.get::<_, Option<String>>(11)?, // user_id
|
||||
row.get::<_, Option<bool>>(12)?, // use_datastore_upload
|
||||
))
|
||||
})
|
||||
.map_err(|e| e.to_string())?
|
||||
.filter_map(|r| r.ok())
|
||||
.filter_map(
|
||||
|(
|
||||
name,
|
||||
provider_type,
|
||||
api_url,
|
||||
encrypted_key,
|
||||
model,
|
||||
max_tokens,
|
||||
temperature,
|
||||
custom_endpoint_path,
|
||||
custom_auth_header,
|
||||
custom_auth_prefix,
|
||||
api_format,
|
||||
user_id,
|
||||
use_datastore_upload,
|
||||
)| {
|
||||
// Decrypt the API key
|
||||
let api_key = crate::integrations::auth::decrypt_token(&encrypted_key).ok()?;
|
||||
|
||||
@ -250,6 +253,7 @@ pub async fn load_ai_providers(
|
||||
api_format,
|
||||
session_id: None, // Session IDs are not persisted
|
||||
user_id,
|
||||
use_datastore_upload,
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
@ -71,12 +71,16 @@ pub fn run() {
|
||||
commands::db::add_timeline_event,
|
||||
// Analysis / PII
|
||||
commands::analysis::upload_log_file,
|
||||
commands::analysis::upload_log_file_by_content,
|
||||
commands::analysis::detect_pii,
|
||||
commands::analysis::apply_redactions,
|
||||
commands::image::upload_image_attachment,
|
||||
commands::image::upload_image_attachment_by_content,
|
||||
commands::image::list_image_attachments,
|
||||
commands::image::delete_image_attachment,
|
||||
commands::image::upload_paste_image,
|
||||
commands::image::upload_file_to_datastore,
|
||||
commands::image::upload_file_to_datastore_any,
|
||||
// AI
|
||||
commands::ai::analyze_logs,
|
||||
commands::ai::chat_message,
|
||||
|
||||
@ -39,6 +39,9 @@ pub struct ProviderConfig {
|
||||
/// Optional: User ID for custom REST API cost tracking (CORE ID email)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub user_id: Option<String>,
|
||||
/// Optional: When true, file uploads go to GenAI datastore instead of prompt
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub use_datastore_upload: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
|
||||
@ -16,6 +16,7 @@ export interface ProviderConfig {
|
||||
api_format?: string;
|
||||
session_id?: string;
|
||||
user_id?: string;
|
||||
use_datastore_upload?: boolean;
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
@ -277,9 +278,21 @@ export const listProvidersCmd = () => invoke<ProviderInfo[]>("list_providers");
|
||||
export const uploadLogFileCmd = (issueId: string, filePath: string) =>
|
||||
invoke<LogFile>("upload_log_file", { issueId, filePath });
|
||||
|
||||
export const uploadLogFileByContentCmd = (issueId: string, fileName: string, content: string) =>
|
||||
invoke<LogFile>("upload_log_file_by_content", { issueId, fileName, content });
|
||||
|
||||
export const uploadImageAttachmentCmd = (issueId: string, filePath: string) =>
|
||||
invoke<ImageAttachment>("upload_image_attachment", { issueId, filePath });
|
||||
|
||||
export const uploadImageAttachmentByContentCmd = (issueId: string, fileName: string, base64Content: string) =>
|
||||
invoke<ImageAttachment>("upload_image_attachment_by_content", { issueId, fileName, base64Content });
|
||||
|
||||
export const uploadFileToDatastoreCmd = (providerConfig: ProviderConfig, filePath: string) =>
|
||||
invoke<string>("upload_file_to_datastore", { providerConfig, filePath });
|
||||
|
||||
export const uploadFileToDatastoreAnyCmd = (providerConfig: ProviderConfig, filePath: string) =>
|
||||
invoke<string>("upload_file_to_datastore_any", { providerConfig, filePath });
|
||||
|
||||
export const uploadPasteImageCmd = (issueId: string, base64Image: string, mimeType: string) =>
|
||||
invoke<ImageAttachment>("upload_paste_image", { issueId, base64Image, mimeType });
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user