feat: add timeline_events table, model, and CRUD commands

- Add migration 017_create_timeline_events with indexes
- Update TimelineEvent struct with issue_id, metadata, UTC string timestamps
- Add TimelineEvent::new() constructor with UUIDv7
- Add timeline_events field to IssueDetail
- Rewrite add_timeline_event to write to new table + audit_log (dual-write)
- Add get_timeline_events command for ordered retrieval
- Update get_issue to load timeline_events
- Update delete_issue to clean up timeline_events
- Register get_timeline_events in generate_handler
- Add migration tests for table, indexes, and cascade delete
- Fix flaky derive_aes_key test (env var race condition in parallel tests)
This commit is contained in:
Shaun Arman 2026-04-19 18:02:38 -05:00
parent 6d105a70ad
commit 107fee8853
7 changed files with 201 additions and 18 deletions

View File

@ -2,7 +2,7 @@ use tauri::State;
use crate::db::models::{
AiConversation, AiMessage, ImageAttachment, Issue, IssueDetail, IssueFilter, IssueSummary,
IssueUpdate, LogFile, ResolutionStep,
IssueUpdate, LogFile, ResolutionStep, TimelineEvent,
};
use crate::state::AppState;
@ -171,12 +171,35 @@ pub async fn get_issue(
.filter_map(|r| r.ok())
.collect();
// Load timeline events
let mut te_stmt = db
.prepare(
"SELECT id, issue_id, event_type, description, metadata, created_at \
FROM timeline_events WHERE issue_id = ?1 ORDER BY created_at ASC",
)
.map_err(|e| e.to_string())?;
let timeline_events: Vec<TimelineEvent> = te_stmt
.query_map([&issue_id], |row| {
Ok(TimelineEvent {
id: row.get(0)?,
issue_id: row.get(1)?,
event_type: row.get(2)?,
description: row.get(3)?,
metadata: row.get(4)?,
created_at: row.get(5)?,
})
})
.map_err(|e| e.to_string())?
.filter_map(|r| r.ok())
.collect();
Ok(IssueDetail {
issue,
log_files,
image_attachments,
resolution_steps,
conversations,
timeline_events,
})
}
@ -302,6 +325,11 @@ pub async fn delete_issue(issue_id: String, state: State<'_, AppState>) -> Resul
[&issue_id],
)
.map_err(|e| e.to_string())?;
db.execute(
"DELETE FROM timeline_events WHERE issue_id = ?1",
[&issue_id],
)
.map_err(|e| e.to_string())?;
db.execute("DELETE FROM issues WHERE id = ?1", [&issue_id])
.map_err(|e| e.to_string())?;
@ -510,22 +538,40 @@ pub async fn add_timeline_event(
issue_id: String,
event_type: String,
description: String,
metadata: Option<String>,
state: State<'_, AppState>,
) -> Result<(), String> {
// Use audit_log for timeline tracking
let db = state.db.lock().map_err(|e| e.to_string())?;
let entry = crate::db::models::AuditEntry::new(
event_type,
"issue".to_string(),
) -> Result<TimelineEvent, String> {
let event = TimelineEvent::new(
issue_id.clone(),
serde_json::json!({ "description": description }).to_string(),
event_type.clone(),
description.clone(),
metadata.unwrap_or_else(|| "{}".to_string()),
);
let db = state.db.lock().map_err(|e| e.to_string())?;
// Write to timeline_events table
db.execute(
"INSERT INTO timeline_events (id, issue_id, event_type, description, metadata, created_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
rusqlite::params![
event.id,
event.issue_id,
event.event_type,
event.description,
event.metadata,
event.created_at,
],
)
.map_err(|e| e.to_string())?;
// Dual-write to audit_log for security hash chain
crate::audit::log::write_audit_event(
&db,
&entry.action,
&entry.entity_type,
&entry.entity_id,
&entry.details,
&event_type,
"issue",
&issue_id,
&serde_json::json!({ "description": description }).to_string(),
)
.map_err(|_| "Failed to write security audit entry".to_string())?;
@ -537,5 +583,34 @@ pub async fn add_timeline_event(
)
.map_err(|e| e.to_string())?;
Ok(())
Ok(event)
}
#[tauri::command]
pub async fn get_timeline_events(
issue_id: String,
state: State<'_, AppState>,
) -> Result<Vec<TimelineEvent>, String> {
let db = state.db.lock().map_err(|e| e.to_string())?;
let mut stmt = db
.prepare(
"SELECT id, issue_id, event_type, description, metadata, created_at \
FROM timeline_events WHERE issue_id = ?1 ORDER BY created_at ASC",
)
.map_err(|e| e.to_string())?;
let events = stmt
.query_map([&issue_id], |row| {
Ok(TimelineEvent {
id: row.get(0)?,
issue_id: row.get(1)?,
event_type: row.get(2)?,
description: row.get(3)?,
metadata: row.get(4)?,
created_at: row.get(5)?,
})
})
.map_err(|e| e.to_string())?
.filter_map(|r| r.ok())
.collect();
Ok(events)
}

View File

@ -199,6 +199,20 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> {
"016_add_created_at",
"ALTER TABLE ai_providers ADD COLUMN created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%d %H:%M:%S', 'now'))",
),
(
"017_create_timeline_events",
"CREATE TABLE IF NOT EXISTS timeline_events (
id TEXT PRIMARY KEY,
issue_id TEXT NOT NULL,
event_type TEXT NOT NULL,
description TEXT NOT NULL DEFAULT '',
metadata TEXT NOT NULL DEFAULT '{}',
created_at TEXT NOT NULL,
FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE
);
CREATE INDEX idx_timeline_events_issue ON timeline_events(issue_id);
CREATE INDEX idx_timeline_events_time ON timeline_events(created_at);",
),
];
for (name, sql) in migrations {
@ -698,4 +712,80 @@ mod tests {
// Should not fail even though columns already exist
run_migrations(&conn).unwrap();
}
#[test]
fn test_timeline_events_table_exists() {
let conn = setup_test_db();
let count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='timeline_events'",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(count, 1);
let mut stmt = conn.prepare("PRAGMA table_info(timeline_events)").unwrap();
let columns: Vec<String> = stmt
.query_map([], |row| row.get::<_, String>(1))
.unwrap()
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert!(columns.contains(&"id".to_string()));
assert!(columns.contains(&"issue_id".to_string()));
assert!(columns.contains(&"event_type".to_string()));
assert!(columns.contains(&"description".to_string()));
assert!(columns.contains(&"metadata".to_string()));
assert!(columns.contains(&"created_at".to_string()));
}
#[test]
fn test_timeline_events_cascade_delete() {
let conn = setup_test_db();
conn.execute("PRAGMA foreign_keys = ON", []).unwrap();
let now = chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string();
conn.execute(
"INSERT INTO issues (id, title, created_at, updated_at) VALUES (?1, ?2, ?3, ?4)",
rusqlite::params!["issue-1", "Test Issue", now, now],
)
.unwrap();
conn.execute(
"INSERT INTO timeline_events (id, issue_id, event_type, description, metadata, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
rusqlite::params!["te-1", "issue-1", "triage_started", "Started triage", "{}", "2025-01-15 10:00:00 UTC"],
)
.unwrap();
// Verify event exists
let count: i64 = conn
.query_row("SELECT COUNT(*) FROM timeline_events", [], |r| r.get(0))
.unwrap();
assert_eq!(count, 1);
// Delete issue — cascade should remove timeline event
conn.execute("DELETE FROM issues WHERE id = 'issue-1'", [])
.unwrap();
let count: i64 = conn
.query_row("SELECT COUNT(*) FROM timeline_events", [], |r| r.get(0))
.unwrap();
assert_eq!(count, 0);
}
#[test]
fn test_timeline_events_indexes() {
let conn = setup_test_db();
let mut stmt = conn
.prepare("SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='timeline_events'")
.unwrap();
let indexes: Vec<String> = stmt
.query_map([], |row| row.get(0))
.unwrap()
.filter_map(|r| r.ok())
.collect();
assert!(indexes.contains(&"idx_timeline_events_issue".to_string()));
assert!(indexes.contains(&"idx_timeline_events_time".to_string()));
}
}

View File

@ -47,6 +47,7 @@ pub struct IssueDetail {
pub image_attachments: Vec<ImageAttachment>,
pub resolution_steps: Vec<ResolutionStep>,
pub conversations: Vec<AiConversation>,
pub timeline_events: Vec<TimelineEvent>,
}
/// Lightweight row returned by list/search commands.
@ -121,9 +122,24 @@ pub struct FiveWhyEntry {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TimelineEvent {
pub id: String,
pub issue_id: String,
pub event_type: String,
pub description: String,
pub created_at: i64,
pub metadata: String,
pub created_at: String,
}
impl TimelineEvent {
pub fn new(issue_id: String, event_type: String, description: String, metadata: String) -> Self {
TimelineEvent {
id: Uuid::now_v7().to_string(),
issue_id,
event_type,
description,
metadata,
created_at: chrono::Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(),
}
}
}
// ─── Log File ───────────────────────────────────────────────────────────────

View File

@ -188,6 +188,7 @@ mod tests {
created_at: "2025-02-10 09:00:00".to_string(),
}],
conversations: vec![],
timeline_events: vec![],
}
}

View File

@ -194,6 +194,7 @@ mod tests {
},
],
conversations: vec![],
timeline_events: vec![],
}
}

View File

@ -629,11 +629,10 @@ mod tests {
#[test]
fn test_derive_aes_key_is_stable_for_same_input() {
std::env::set_var("TFTSR_ENCRYPTION_KEY", "stable-test-key");
let k1 = derive_aes_key().unwrap();
let k2 = derive_aes_key().unwrap();
// Use deterministic helper to avoid env var race conditions in parallel tests
let k1 = derive_aes_key_from_str("stable-test-key").unwrap();
let k2 = derive_aes_key_from_str("stable-test-key").unwrap();
assert_eq!(k1, k2);
std::env::remove_var("TFTSR_ENCRYPTION_KEY");
}
// Test helper functions that accept key directly (bypass env var)

View File

@ -69,6 +69,7 @@ pub fn run() {
commands::db::add_five_why,
commands::db::update_five_why,
commands::db::add_timeline_event,
commands::db::get_timeline_events,
// Analysis / PII
commands::analysis::upload_log_file,
commands::analysis::upload_log_file_by_content,