fix: address AI review findings
All checks were successful
Test / rust-fmt-check (pull_request) Successful in 15s
Test / frontend-typecheck (pull_request) Successful in 1m21s
Test / frontend-tests (pull_request) Successful in 1m25s
PR Review Automation / review (pull_request) Successful in 3m32s
Test / rust-clippy (pull_request) Successful in 4m1s
Test / rust-tests (pull_request) Successful in 5m18s

- Add -L flag to curl for linuxdeploy redirects
- Split migration 015 into 015_add_use_datastore_upload and 016_add_created_at
- Use separate execute calls for ALTER TABLE statements
- Add idempotency test for migration 015
- Use bool type for use_datastore_upload instead of i64
This commit is contained in:
Shaun Arman 2026-04-13 08:38:43 -05:00
parent 84c69fbea8
commit 2d7aac8413
2 changed files with 56 additions and 13 deletions

View File

@ -26,7 +26,8 @@ RUN apt-get update -qq \
&& rm -rf /var/lib/apt/lists/*
# Install linuxdeploy for AppImage bundling (required for Tauri 2.x)
RUN curl -fsSL https://github.com/tauri-apps/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage -o /usr/local/bin/linuxdeploy \
# Download linuxdeploy from the official repository and extract AppImage
RUN curl -fsSL -L https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage -o /usr/local/bin/linuxdeploy \
&& chmod +x /usr/local/bin/linuxdeploy
RUN rustup target add x86_64-unknown-linux-gnu \

View File

@ -192,9 +192,12 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> {
);",
),
(
"015_add_ai_provider_missing_columns",
"ALTER TABLE ai_providers ADD COLUMN use_datastore_upload INTEGER DEFAULT 0;
ALTER TABLE ai_providers ADD COLUMN created_at TEXT NOT NULL DEFAULT (datetime('now'));",
"015_add_use_datastore_upload",
"ALTER TABLE ai_providers ADD COLUMN use_datastore_upload INTEGER DEFAULT 0",
),
(
"016_add_created_at",
"ALTER TABLE ai_providers ADD COLUMN created_at TEXT NOT NULL DEFAULT (datetime('now'))",
),
];
@ -206,21 +209,29 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> {
if !already_applied {
// FTS5 virtual table creation can be skipped if FTS5 is not compiled in
// Also handle column-already-exists errors for migration 015
if let Err(e) = conn.execute_batch(sql) {
if name.contains("fts") {
// Also handle column-already-exists errors for migrations 015-016
if name.contains("fts") {
if let Err(e) = conn.execute_batch(sql) {
tracing::warn!("FTS5 not available, skipping: {e}");
} else if *name == "015_add_ai_provider_missing_columns" {
// Skip error if columns already exist (e.g., from earlier migration or manual creation)
}
} else if name.ends_with("_add_use_datastore_upload")
|| name.ends_with("_add_created_at")
{
// Use execute for ALTER TABLE (SQLite only allows one statement per command)
// Skip error if column already exists
if let Err(e) = conn.execute(sql, []) {
let err_str = e.to_string();
if err_str.contains("duplicate column name")
|| err_str.contains("has no column named")
{
tracing::info!("Columns may already exist, skipping migration 015: {e}");
tracing::info!("Column may already exist, skipping migration {name}: {e}");
} else {
return Err(e.into());
}
} else {
}
} else {
// Use execute_batch for other migrations (FTS5, CREATE TABLE, etc.)
if let Err(e) = conn.execute_batch(sql) {
return Err(e.into());
}
}
@ -646,7 +657,7 @@ mod tests {
)
.unwrap();
let (name, use_datastore_upload, created_at): (String, i64, String) = conn
let (name, use_datastore_upload, created_at): (String, bool, String) = conn
.query_row(
"SELECT name, use_datastore_upload, created_at FROM ai_providers WHERE name = ?1",
["Test Provider"],
@ -655,7 +666,38 @@ mod tests {
.unwrap();
assert_eq!(name, "Test Provider");
assert_eq!(use_datastore_upload, 0);
assert!(!use_datastore_upload);
assert!(created_at.len() > 0);
}
#[test]
fn test_idempotent_add_missing_columns() {
let conn = Connection::open_in_memory().unwrap();
// Create table with both columns already present (simulating prior migration run)
conn.execute_batch(
"CREATE TABLE IF NOT EXISTS ai_providers (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
provider_type TEXT NOT NULL,
api_url TEXT NOT NULL,
encrypted_api_key TEXT NOT NULL,
model TEXT NOT NULL,
max_tokens INTEGER,
temperature REAL,
custom_endpoint_path TEXT,
custom_auth_header TEXT,
custom_auth_prefix TEXT,
api_format TEXT,
user_id TEXT,
use_datastore_upload INTEGER DEFAULT 0,
created_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);",
)
.unwrap();
// Should not fail even though columns already exist
run_migrations(&conn).unwrap();
}
}