From 107fee8853dbbceec7260e0141b5df17352d9dea Mon Sep 17 00:00:00 2001 From: Shaun Arman Date: Sun, 19 Apr 2026 18:02:38 -0500 Subject: [PATCH] feat: add timeline_events table, model, and CRUD commands - Add migration 017_create_timeline_events with indexes - Update TimelineEvent struct with issue_id, metadata, UTC string timestamps - Add TimelineEvent::new() constructor with UUIDv7 - Add timeline_events field to IssueDetail - Rewrite add_timeline_event to write to new table + audit_log (dual-write) - Add get_timeline_events command for ordered retrieval - Update get_issue to load timeline_events - Update delete_issue to clean up timeline_events - Register get_timeline_events in generate_handler - Add migration tests for table, indexes, and cascade delete - Fix flaky derive_aes_key test (env var race condition in parallel tests) --- src-tauri/src/commands/db.rs | 101 +++++++++++++++++++++++++---- src-tauri/src/db/migrations.rs | 90 +++++++++++++++++++++++++ src-tauri/src/db/models.rs | 18 ++++- src-tauri/src/docs/postmortem.rs | 1 + src-tauri/src/docs/rca.rs | 1 + src-tauri/src/integrations/auth.rs | 7 +- src-tauri/src/lib.rs | 1 + 7 files changed, 201 insertions(+), 18 deletions(-) diff --git a/src-tauri/src/commands/db.rs b/src-tauri/src/commands/db.rs index 4419b222..033feca0 100644 --- a/src-tauri/src/commands/db.rs +++ b/src-tauri/src/commands/db.rs @@ -2,7 +2,7 @@ use tauri::State; use crate::db::models::{ AiConversation, AiMessage, ImageAttachment, Issue, IssueDetail, IssueFilter, IssueSummary, - IssueUpdate, LogFile, ResolutionStep, + IssueUpdate, LogFile, ResolutionStep, TimelineEvent, }; use crate::state::AppState; @@ -171,12 +171,35 @@ pub async fn get_issue( .filter_map(|r| r.ok()) .collect(); + // Load timeline events + let mut te_stmt = db + .prepare( + "SELECT id, issue_id, event_type, description, metadata, created_at \ + FROM timeline_events WHERE issue_id = ?1 ORDER BY created_at ASC", + ) + .map_err(|e| e.to_string())?; + let timeline_events: Vec = te_stmt + .query_map([&issue_id], |row| { + Ok(TimelineEvent { + id: row.get(0)?, + issue_id: row.get(1)?, + event_type: row.get(2)?, + description: row.get(3)?, + metadata: row.get(4)?, + created_at: row.get(5)?, + }) + }) + .map_err(|e| e.to_string())? + .filter_map(|r| r.ok()) + .collect(); + Ok(IssueDetail { issue, log_files, image_attachments, resolution_steps, conversations, + timeline_events, }) } @@ -302,6 +325,11 @@ pub async fn delete_issue(issue_id: String, state: State<'_, AppState>) -> Resul [&issue_id], ) .map_err(|e| e.to_string())?; + db.execute( + "DELETE FROM timeline_events WHERE issue_id = ?1", + [&issue_id], + ) + .map_err(|e| e.to_string())?; db.execute("DELETE FROM issues WHERE id = ?1", [&issue_id]) .map_err(|e| e.to_string())?; @@ -510,22 +538,40 @@ pub async fn add_timeline_event( issue_id: String, event_type: String, description: String, + metadata: Option, state: State<'_, AppState>, -) -> Result<(), String> { - // Use audit_log for timeline tracking - let db = state.db.lock().map_err(|e| e.to_string())?; - let entry = crate::db::models::AuditEntry::new( - event_type, - "issue".to_string(), +) -> Result { + let event = TimelineEvent::new( issue_id.clone(), - serde_json::json!({ "description": description }).to_string(), + event_type.clone(), + description.clone(), + metadata.unwrap_or_else(|| "{}".to_string()), ); + + let db = state.db.lock().map_err(|e| e.to_string())?; + + // Write to timeline_events table + db.execute( + "INSERT INTO timeline_events (id, issue_id, event_type, description, metadata, created_at) \ + VALUES (?1, ?2, ?3, ?4, ?5, ?6)", + rusqlite::params![ + event.id, + event.issue_id, + event.event_type, + event.description, + event.metadata, + event.created_at, + ], + ) + .map_err(|e| e.to_string())?; + + // Dual-write to audit_log for security hash chain crate::audit::log::write_audit_event( &db, - &entry.action, - &entry.entity_type, - &entry.entity_id, - &entry.details, + &event_type, + "issue", + &issue_id, + &serde_json::json!({ "description": description }).to_string(), ) .map_err(|_| "Failed to write security audit entry".to_string())?; @@ -537,5 +583,34 @@ pub async fn add_timeline_event( ) .map_err(|e| e.to_string())?; - Ok(()) + Ok(event) +} + +#[tauri::command] +pub async fn get_timeline_events( + issue_id: String, + state: State<'_, AppState>, +) -> Result, String> { + let db = state.db.lock().map_err(|e| e.to_string())?; + let mut stmt = db + .prepare( + "SELECT id, issue_id, event_type, description, metadata, created_at \ + FROM timeline_events WHERE issue_id = ?1 ORDER BY created_at ASC", + ) + .map_err(|e| e.to_string())?; + let events = stmt + .query_map([&issue_id], |row| { + Ok(TimelineEvent { + id: row.get(0)?, + issue_id: row.get(1)?, + event_type: row.get(2)?, + description: row.get(3)?, + metadata: row.get(4)?, + created_at: row.get(5)?, + }) + }) + .map_err(|e| e.to_string())? + .filter_map(|r| r.ok()) + .collect(); + Ok(events) } diff --git a/src-tauri/src/db/migrations.rs b/src-tauri/src/db/migrations.rs index 12056f10..36bfbdc2 100644 --- a/src-tauri/src/db/migrations.rs +++ b/src-tauri/src/db/migrations.rs @@ -199,6 +199,20 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> { "016_add_created_at", "ALTER TABLE ai_providers ADD COLUMN created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%d %H:%M:%S', 'now'))", ), + ( + "017_create_timeline_events", + "CREATE TABLE IF NOT EXISTS timeline_events ( + id TEXT PRIMARY KEY, + issue_id TEXT NOT NULL, + event_type TEXT NOT NULL, + description TEXT NOT NULL DEFAULT '', + metadata TEXT NOT NULL DEFAULT '{}', + created_at TEXT NOT NULL, + FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE + ); + CREATE INDEX idx_timeline_events_issue ON timeline_events(issue_id); + CREATE INDEX idx_timeline_events_time ON timeline_events(created_at);", + ), ]; for (name, sql) in migrations { @@ -698,4 +712,80 @@ mod tests { // Should not fail even though columns already exist run_migrations(&conn).unwrap(); } + + #[test] + fn test_timeline_events_table_exists() { + let conn = setup_test_db(); + let count: i64 = conn + .query_row( + "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='timeline_events'", + [], + |r| r.get(0), + ) + .unwrap(); + assert_eq!(count, 1); + + let mut stmt = conn.prepare("PRAGMA table_info(timeline_events)").unwrap(); + let columns: Vec = stmt + .query_map([], |row| row.get::<_, String>(1)) + .unwrap() + .collect::, _>>() + .unwrap(); + + assert!(columns.contains(&"id".to_string())); + assert!(columns.contains(&"issue_id".to_string())); + assert!(columns.contains(&"event_type".to_string())); + assert!(columns.contains(&"description".to_string())); + assert!(columns.contains(&"metadata".to_string())); + assert!(columns.contains(&"created_at".to_string())); + } + + #[test] + fn test_timeline_events_cascade_delete() { + let conn = setup_test_db(); + conn.execute("PRAGMA foreign_keys = ON", []).unwrap(); + + let now = chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string(); + conn.execute( + "INSERT INTO issues (id, title, created_at, updated_at) VALUES (?1, ?2, ?3, ?4)", + rusqlite::params!["issue-1", "Test Issue", now, now], + ) + .unwrap(); + + conn.execute( + "INSERT INTO timeline_events (id, issue_id, event_type, description, metadata, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6)", + rusqlite::params!["te-1", "issue-1", "triage_started", "Started triage", "{}", "2025-01-15 10:00:00 UTC"], + ) + .unwrap(); + + // Verify event exists + let count: i64 = conn + .query_row("SELECT COUNT(*) FROM timeline_events", [], |r| r.get(0)) + .unwrap(); + assert_eq!(count, 1); + + // Delete issue — cascade should remove timeline event + conn.execute("DELETE FROM issues WHERE id = 'issue-1'", []) + .unwrap(); + + let count: i64 = conn + .query_row("SELECT COUNT(*) FROM timeline_events", [], |r| r.get(0)) + .unwrap(); + assert_eq!(count, 0); + } + + #[test] + fn test_timeline_events_indexes() { + let conn = setup_test_db(); + let mut stmt = conn + .prepare("SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='timeline_events'") + .unwrap(); + let indexes: Vec = stmt + .query_map([], |row| row.get(0)) + .unwrap() + .filter_map(|r| r.ok()) + .collect(); + assert!(indexes.contains(&"idx_timeline_events_issue".to_string())); + assert!(indexes.contains(&"idx_timeline_events_time".to_string())); + } } diff --git a/src-tauri/src/db/models.rs b/src-tauri/src/db/models.rs index 1524c587..d118719b 100644 --- a/src-tauri/src/db/models.rs +++ b/src-tauri/src/db/models.rs @@ -47,6 +47,7 @@ pub struct IssueDetail { pub image_attachments: Vec, pub resolution_steps: Vec, pub conversations: Vec, + pub timeline_events: Vec, } /// Lightweight row returned by list/search commands. @@ -121,9 +122,24 @@ pub struct FiveWhyEntry { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TimelineEvent { pub id: String, + pub issue_id: String, pub event_type: String, pub description: String, - pub created_at: i64, + pub metadata: String, + pub created_at: String, +} + +impl TimelineEvent { + pub fn new(issue_id: String, event_type: String, description: String, metadata: String) -> Self { + TimelineEvent { + id: Uuid::now_v7().to_string(), + issue_id, + event_type, + description, + metadata, + created_at: chrono::Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), + } + } } // ─── Log File ─────────────────────────────────────────────────────────────── diff --git a/src-tauri/src/docs/postmortem.rs b/src-tauri/src/docs/postmortem.rs index abddaaf2..7009b55f 100644 --- a/src-tauri/src/docs/postmortem.rs +++ b/src-tauri/src/docs/postmortem.rs @@ -188,6 +188,7 @@ mod tests { created_at: "2025-02-10 09:00:00".to_string(), }], conversations: vec![], + timeline_events: vec![], } } diff --git a/src-tauri/src/docs/rca.rs b/src-tauri/src/docs/rca.rs index f6d508bc..06643f10 100644 --- a/src-tauri/src/docs/rca.rs +++ b/src-tauri/src/docs/rca.rs @@ -194,6 +194,7 @@ mod tests { }, ], conversations: vec![], + timeline_events: vec![], } } diff --git a/src-tauri/src/integrations/auth.rs b/src-tauri/src/integrations/auth.rs index c91048b4..4a2e9d6e 100644 --- a/src-tauri/src/integrations/auth.rs +++ b/src-tauri/src/integrations/auth.rs @@ -629,11 +629,10 @@ mod tests { #[test] fn test_derive_aes_key_is_stable_for_same_input() { - std::env::set_var("TFTSR_ENCRYPTION_KEY", "stable-test-key"); - let k1 = derive_aes_key().unwrap(); - let k2 = derive_aes_key().unwrap(); + // Use deterministic helper to avoid env var race conditions in parallel tests + let k1 = derive_aes_key_from_str("stable-test-key").unwrap(); + let k2 = derive_aes_key_from_str("stable-test-key").unwrap(); assert_eq!(k1, k2); - std::env::remove_var("TFTSR_ENCRYPTION_KEY"); } // Test helper functions that accept key directly (bypass env var) diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index cdf319ba..5ee2269e 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -69,6 +69,7 @@ pub fn run() { commands::db::add_five_why, commands::db::update_five_why, commands::db::add_timeline_event, + commands::db::get_timeline_events, // Analysis / PII commands::analysis::upload_log_file, commands::analysis::upload_log_file_by_content,