Compare commits

...

4 Commits

Author SHA1 Message Date
05f87a7bff Merge pull request 'fix: add missing ai_providers columns and fix linux-amd64 build' (#41) from fix/ai-provider-migration-issue into master
Some checks failed
Auto Tag / autotag (push) Successful in 14s
Auto Tag / wiki-sync (push) Successful in 14s
Build CI Docker Images / windows-cross (push) Successful in 11s
Build CI Docker Images / linux-arm64 (push) Successful in 10s
Auto Tag / changelog (push) Successful in 54s
Auto Tag / build-macos-arm64 (push) Successful in 2m57s
Auto Tag / build-linux-amd64 (push) Failing after 13m36s
Auto Tag / build-linux-arm64 (push) Successful in 15m7s
Auto Tag / build-windows-amd64 (push) Successful in 15m35s
Build CI Docker Images / linux-amd64 (push) Failing after 7s
Reviewed-on: #41
2026-04-13 17:00:50 +00:00
Shaun Arman
8e1d43da43 fix: address critical AI review issues
All checks were successful
Test / rust-fmt-check (pull_request) Successful in 28s
Test / frontend-typecheck (pull_request) Successful in 1m29s
Test / frontend-tests (pull_request) Successful in 1m31s
PR Review Automation / review (pull_request) Successful in 3m28s
Test / rust-clippy (pull_request) Successful in 4m29s
Test / rust-tests (pull_request) Successful in 5m42s
- Fix linuxdeploy AppImage extraction using --appimage-extract
- Remove 'has no column named' from duplicate column error handling
- Use strftime instead of datetime for created_at default format
2026-04-13 08:50:34 -05:00
Shaun Arman
2d7aac8413 fix: address AI review findings
All checks were successful
Test / rust-fmt-check (pull_request) Successful in 15s
Test / frontend-typecheck (pull_request) Successful in 1m21s
Test / frontend-tests (pull_request) Successful in 1m25s
PR Review Automation / review (pull_request) Successful in 3m32s
Test / rust-clippy (pull_request) Successful in 4m1s
Test / rust-tests (pull_request) Successful in 5m18s
- Add -L flag to curl for linuxdeploy redirects
- Split migration 015 into 015_add_use_datastore_upload and 016_add_created_at
- Use separate execute calls for ALTER TABLE statements
- Add idempotency test for migration 015
- Use bool type for use_datastore_upload instead of i64
2026-04-13 08:38:43 -05:00
Shaun Arman
84c69fbea8 fix: add missing ai_providers columns and fix linux-amd64 build
Some checks failed
Test / rust-fmt-check (pull_request) Successful in 15s
Test / rust-clippy (pull_request) Failing after 17s
Test / frontend-typecheck (pull_request) Successful in 1m23s
Test / frontend-tests (pull_request) Successful in 1m23s
PR Review Automation / review (pull_request) Successful in 3m16s
Test / rust-tests (pull_request) Successful in 4m19s
- Add migration 015 to add use_datastore_upload and created_at columns
- Handle column-already-exists errors gracefully
- Update Dockerfile to install linuxdeploy for AppImage bundling
- Add fuse dependency for AppImage support
2026-04-13 08:22:08 -05:00
2 changed files with 151 additions and 4 deletions

View File

@ -2,7 +2,7 @@
# All system dependencies are installed once here; CI jobs skip apt-get entirely.
# Rebuild when: Rust toolchain version changes, webkit2gtk/gtk major version changes,
# Node.js major version changes, OpenSSL major version changes (used via OPENSSL_STATIC=1),
# or Tauri CLI version changes that affect bundler system deps.
# Tauri CLI version changes that affect bundler system deps, or linuxdeploy is needed.
# Tag format: rust<VER>-node<VER>
FROM rust:1.88-slim
@ -20,9 +20,18 @@ RUN apt-get update -qq \
perl \
jq \
git \
fuse \
&& curl -fsSL https://deb.nodesource.com/setup_22.x | bash - \
&& apt-get install -y --no-install-recommends nodejs \
&& rm -rf /var/lib/apt/lists/*
# Install linuxdeploy for AppImage bundling (required for Tauri 2.x)
# Download linuxdeploy AppImage and extract to get the binary
RUN curl -Ls https://github.com/tauri-apps/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage -o /tmp/linuxdeploy.AppImage \
&& chmod +x /tmp/linuxdeploy.AppImage \
&& /tmp/linuxdeploy.AppImage --appimage-extract \
&& mv squashfs-root/usr/bin/linuxdeploy /usr/local/bin/ \
&& rm -rf /tmp/linuxdeploy.AppImage squashfs-root
RUN rustup target add x86_64-unknown-linux-gnu \
&& rustup component add rustfmt clippy

View File

@ -191,6 +191,14 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> {
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);",
),
(
"015_add_use_datastore_upload",
"ALTER TABLE ai_providers ADD COLUMN use_datastore_upload INTEGER DEFAULT 0",
),
(
"016_add_created_at",
"ALTER TABLE ai_providers ADD COLUMN created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%d %H:%M:%S', 'now'))",
),
];
for (name, sql) in migrations {
@ -201,10 +209,27 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> {
if !already_applied {
// FTS5 virtual table creation can be skipped if FTS5 is not compiled in
if let Err(e) = conn.execute_batch(sql) {
if name.contains("fts") {
// Also handle column-already-exists errors for migrations 015-016
if name.contains("fts") {
if let Err(e) = conn.execute_batch(sql) {
tracing::warn!("FTS5 not available, skipping: {e}");
} else {
}
} else if name.ends_with("_add_use_datastore_upload")
|| name.ends_with("_add_created_at")
{
// Use execute for ALTER TABLE (SQLite only allows one statement per command)
// Skip error if column already exists (SQLITE_ERROR with "duplicate column name")
if let Err(e) = conn.execute(sql, []) {
let err_str = e.to_string();
if err_str.contains("duplicate column name") {
tracing::info!("Column may already exist, skipping migration {name}: {e}");
} else {
return Err(e.into());
}
}
} else {
// Use execute_batch for other migrations (FTS5, CREATE TABLE, etc.)
if let Err(e) = conn.execute_batch(sql) {
return Err(e.into());
}
}
@ -560,4 +585,117 @@ mod tests {
assert_eq!(encrypted_key, "encrypted_key_123");
assert_eq!(model, "gpt-4o");
}
#[test]
fn test_add_missing_columns_to_existing_table() {
let conn = Connection::open_in_memory().unwrap();
// Simulate existing table without use_datastore_upload and created_at
conn.execute_batch(
"CREATE TABLE IF NOT EXISTS ai_providers (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
provider_type TEXT NOT NULL,
api_url TEXT NOT NULL,
encrypted_api_key TEXT NOT NULL,
model TEXT NOT NULL,
max_tokens INTEGER,
temperature REAL,
custom_endpoint_path TEXT,
custom_auth_header TEXT,
custom_auth_prefix TEXT,
api_format TEXT,
user_id TEXT,
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);",
)
.unwrap();
// Verify columns BEFORE migration
let mut stmt = conn.prepare("PRAGMA table_info(ai_providers)").unwrap();
let columns: Vec<String> = stmt
.query_map([], |row| row.get::<_, String>(1))
.unwrap()
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert!(columns.contains(&"name".to_string()));
assert!(columns.contains(&"model".to_string()));
assert!(!columns.contains(&"use_datastore_upload".to_string()));
assert!(!columns.contains(&"created_at".to_string()));
// Run migrations (should apply 015 to add missing columns)
run_migrations(&conn).unwrap();
// Verify columns AFTER migration
let mut stmt = conn.prepare("PRAGMA table_info(ai_providers)").unwrap();
let columns: Vec<String> = stmt
.query_map([], |row| row.get::<_, String>(1))
.unwrap()
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert!(columns.contains(&"name".to_string()));
assert!(columns.contains(&"model".to_string()));
assert!(columns.contains(&"use_datastore_upload".to_string()));
assert!(columns.contains(&"created_at".to_string()));
// Verify data integrity - existing rows should have default values
conn.execute(
"INSERT INTO ai_providers (id, name, provider_type, api_url, encrypted_api_key, model)
VALUES (?, ?, ?, ?, ?, ?)",
rusqlite::params![
"test-provider-2",
"Test Provider",
"openai",
"https://api.example.com",
"encrypted_key_456",
"gpt-3.5-turbo"
],
)
.unwrap();
let (name, use_datastore_upload, created_at): (String, bool, String) = conn
.query_row(
"SELECT name, use_datastore_upload, created_at FROM ai_providers WHERE name = ?1",
["Test Provider"],
|r| Ok((r.get(0)?, r.get(1)?, r.get(2)?)),
)
.unwrap();
assert_eq!(name, "Test Provider");
assert!(!use_datastore_upload);
assert!(created_at.len() > 0);
}
#[test]
fn test_idempotent_add_missing_columns() {
let conn = Connection::open_in_memory().unwrap();
// Create table with both columns already present (simulating prior migration run)
conn.execute_batch(
"CREATE TABLE IF NOT EXISTS ai_providers (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
provider_type TEXT NOT NULL,
api_url TEXT NOT NULL,
encrypted_api_key TEXT NOT NULL,
model TEXT NOT NULL,
max_tokens INTEGER,
temperature REAL,
custom_endpoint_path TEXT,
custom_auth_header TEXT,
custom_auth_prefix TEXT,
api_format TEXT,
user_id TEXT,
use_datastore_upload INTEGER DEFAULT 0,
created_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);",
)
.unwrap();
// Should not fail even though columns already exist
run_migrations(&conn).unwrap();
}
}