fix: add missing ai_providers columns and fix linux-amd64 build
Some checks failed
Test / rust-fmt-check (pull_request) Successful in 15s
Test / rust-clippy (pull_request) Failing after 17s
Test / frontend-typecheck (pull_request) Successful in 1m23s
Test / frontend-tests (pull_request) Successful in 1m23s
PR Review Automation / review (pull_request) Successful in 3m16s
Test / rust-tests (pull_request) Successful in 4m19s

- Add migration 015 to add use_datastore_upload and created_at columns
- Handle column-already-exists errors gracefully
- Update Dockerfile to install linuxdeploy for AppImage bundling
- Add fuse dependency for AppImage support
This commit is contained in:
Shaun Arman 2026-04-13 08:22:08 -05:00
parent 9bc570774a
commit 84c69fbea8
2 changed files with 104 additions and 1 deletions

View File

@ -2,7 +2,7 @@
# All system dependencies are installed once here; CI jobs skip apt-get entirely.
# Rebuild when: Rust toolchain version changes, webkit2gtk/gtk major version changes,
# Node.js major version changes, OpenSSL major version changes (used via OPENSSL_STATIC=1),
# or Tauri CLI version changes that affect bundler system deps.
# Tauri CLI version changes that affect bundler system deps, or linuxdeploy is needed.
# Tag format: rust<VER>-node<VER>
FROM rust:1.88-slim
@ -20,9 +20,14 @@ RUN apt-get update -qq \
perl \
jq \
git \
fuse \
&& curl -fsSL https://deb.nodesource.com/setup_22.x | bash - \
&& apt-get install -y --no-install-recommends nodejs \
&& rm -rf /var/lib/apt/lists/*
# Install linuxdeploy for AppImage bundling (required for Tauri 2.x)
RUN curl -fsSL https://github.com/tauri-apps/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage -o /usr/local/bin/linuxdeploy \
&& chmod +x /usr/local/bin/linuxdeploy
RUN rustup target add x86_64-unknown-linux-gnu \
&& rustup component add rustfmt clippy

View File

@ -191,6 +191,11 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> {
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);",
),
(
"015_add_ai_provider_missing_columns",
"ALTER TABLE ai_providers ADD COLUMN use_datastore_upload INTEGER DEFAULT 0;
ALTER TABLE ai_providers ADD COLUMN created_at TEXT NOT NULL DEFAULT (datetime('now'));",
),
];
for (name, sql) in migrations {
@ -201,9 +206,20 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> {
if !already_applied {
// FTS5 virtual table creation can be skipped if FTS5 is not compiled in
// Also handle column-already-exists errors for migration 015
if let Err(e) = conn.execute_batch(sql) {
if name.contains("fts") {
tracing::warn!("FTS5 not available, skipping: {e}");
} else if *name == "015_add_ai_provider_missing_columns" {
// Skip error if columns already exist (e.g., from earlier migration or manual creation)
let err_str = e.to_string();
if err_str.contains("duplicate column name")
|| err_str.contains("has no column named")
{
tracing::info!("Columns may already exist, skipping migration 015: {e}");
} else {
return Err(e.into());
}
} else {
return Err(e.into());
}
@ -560,4 +576,86 @@ mod tests {
assert_eq!(encrypted_key, "encrypted_key_123");
assert_eq!(model, "gpt-4o");
}
#[test]
fn test_add_missing_columns_to_existing_table() {
let conn = Connection::open_in_memory().unwrap();
// Simulate existing table without use_datastore_upload and created_at
conn.execute_batch(
"CREATE TABLE IF NOT EXISTS ai_providers (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
provider_type TEXT NOT NULL,
api_url TEXT NOT NULL,
encrypted_api_key TEXT NOT NULL,
model TEXT NOT NULL,
max_tokens INTEGER,
temperature REAL,
custom_endpoint_path TEXT,
custom_auth_header TEXT,
custom_auth_prefix TEXT,
api_format TEXT,
user_id TEXT,
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);",
)
.unwrap();
// Verify columns BEFORE migration
let mut stmt = conn.prepare("PRAGMA table_info(ai_providers)").unwrap();
let columns: Vec<String> = stmt
.query_map([], |row| row.get::<_, String>(1))
.unwrap()
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert!(columns.contains(&"name".to_string()));
assert!(columns.contains(&"model".to_string()));
assert!(!columns.contains(&"use_datastore_upload".to_string()));
assert!(!columns.contains(&"created_at".to_string()));
// Run migrations (should apply 015 to add missing columns)
run_migrations(&conn).unwrap();
// Verify columns AFTER migration
let mut stmt = conn.prepare("PRAGMA table_info(ai_providers)").unwrap();
let columns: Vec<String> = stmt
.query_map([], |row| row.get::<_, String>(1))
.unwrap()
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert!(columns.contains(&"name".to_string()));
assert!(columns.contains(&"model".to_string()));
assert!(columns.contains(&"use_datastore_upload".to_string()));
assert!(columns.contains(&"created_at".to_string()));
// Verify data integrity - existing rows should have default values
conn.execute(
"INSERT INTO ai_providers (id, name, provider_type, api_url, encrypted_api_key, model)
VALUES (?, ?, ?, ?, ?, ?)",
rusqlite::params![
"test-provider-2",
"Test Provider",
"openai",
"https://api.example.com",
"encrypted_key_456",
"gpt-3.5-turbo"
],
)
.unwrap();
let (name, use_datastore_upload, created_at): (String, i64, String) = conn
.query_row(
"SELECT name, use_datastore_upload, created_at FROM ai_providers WHERE name = ?1",
["Test Provider"],
|r| Ok((r.get(0)?, r.get(1)?, r.get(2)?)),
)
.unwrap();
assert_eq!(name, "Test Provider");
assert_eq!(use_datastore_upload, 0);
assert!(created_at.len() > 0);
}
}