Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c0d482ace7 | |||
| 5a12718566 | |||
|
|
4a0c7957ec | ||
| 12a76b4dd8 | |||
|
|
0e6fd09455 | ||
|
|
b7f348bf34 | ||
|
|
7234704636 | ||
| 06b0c10b17 | |||
|
|
ab231b6564 | ||
| 8b828fe4c3 | |||
|
|
27193c91e6 | ||
| cb542d7f22 | |||
|
|
d066e71eeb | ||
| 257b2fb9c5 | |||
|
|
d715ba0b25 | ||
|
|
8b0cbc3ce8 | ||
|
|
13c4969e31 | ||
|
|
79a623dbb2 | ||
|
|
107fee8853 | ||
| 6d105a70ad | |||
| ca56b583c5 | |||
|
|
8c35e91aef | ||
|
|
1055841b6f | ||
| f38ca7e2fc | |||
| a9956a16a4 | |||
|
|
bc50a78db7 | ||
|
|
e6d1965342 | ||
|
|
708e1e9c18 | ||
|
|
5b45c6c418 | ||
|
|
096068ed2b | ||
|
|
9248811076 | ||
|
|
007d0ee9d5 | ||
|
|
9e1a9b1d34 | ||
| cdb1dd1dad |
@ -43,13 +43,13 @@ jobs:
|
|||||||
git diff origin/${{ github.base_ref }}..HEAD > /tmp/pr_diff.txt
|
git diff origin/${{ github.base_ref }}..HEAD > /tmp/pr_diff.txt
|
||||||
echo "diff_size=$(wc -l < /tmp/pr_diff.txt | tr -d ' ')" >> $GITHUB_OUTPUT
|
echo "diff_size=$(wc -l < /tmp/pr_diff.txt | tr -d ' ')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Analyze with Ollama
|
- name: Analyze with LLM
|
||||||
id: analyze
|
id: analyze
|
||||||
if: steps.diff.outputs.diff_size != '0'
|
if: steps.diff.outputs.diff_size != '0'
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
OLLAMA_URL: https://ollama-ui.tftsr.com/ollama/v1
|
LITELLM_URL: http://172.0.0.29:11434/v1
|
||||||
OLLAMA_API_KEY: ${{ secrets.OLLAMA_API_KEY }}
|
LITELLM_API_KEY: ${{ secrets.OLLAMA_API_KEY }}
|
||||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||||
run: |
|
run: |
|
||||||
@ -62,32 +62,32 @@ jobs:
|
|||||||
| grep -v -E '^[+-].*[A-Za-z0-9+/]{40,}={0,2}([^A-Za-z0-9+/=]|$)')
|
| grep -v -E '^[+-].*[A-Za-z0-9+/]{40,}={0,2}([^A-Za-z0-9+/=]|$)')
|
||||||
PROMPT="Analyze the following code changes for correctness, security issues, and best practices. PR Title: ${PR_TITLE}\n\nDiff:\n${DIFF_CONTENT}\n\nProvide a review with: 1) Summary, 2) Bugs/errors, 3) Security issues, 4) Best practices. Give specific comments with suggested fixes."
|
PROMPT="Analyze the following code changes for correctness, security issues, and best practices. PR Title: ${PR_TITLE}\n\nDiff:\n${DIFF_CONTENT}\n\nProvide a review with: 1) Summary, 2) Bugs/errors, 3) Security issues, 4) Best practices. Give specific comments with suggested fixes."
|
||||||
BODY=$(jq -cn \
|
BODY=$(jq -cn \
|
||||||
--arg model "qwen3-coder-next:latest" \
|
--arg model "qwen2.5-72b" \
|
||||||
--arg content "$PROMPT" \
|
--arg content "$PROMPT" \
|
||||||
'{model: $model, messages: [{role: "user", content: $content}], stream: false}')
|
'{model: $model, messages: [{role: "user", content: $content}], stream: false}')
|
||||||
echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] PR #${PR_NUMBER} - Calling Ollama API (${#BODY} bytes)..."
|
echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] PR #${PR_NUMBER} - Calling liteLLM API (${#BODY} bytes)..."
|
||||||
HTTP_CODE=$(curl -s --max-time 120 --connect-timeout 30 \
|
HTTP_CODE=$(curl -s --max-time 300 --connect-timeout 30 \
|
||||||
--retry 3 --retry-delay 5 --retry-connrefused --retry-max-time 120 \
|
--retry 3 --retry-delay 10 --retry-connrefused --retry-max-time 300 \
|
||||||
-o /tmp/ollama_response.json -w "%{http_code}" \
|
-o /tmp/llm_response.json -w "%{http_code}" \
|
||||||
-X POST "$OLLAMA_URL/chat/completions" \
|
-X POST "$LITELLM_URL/chat/completions" \
|
||||||
-H "Authorization: Bearer $OLLAMA_API_KEY" \
|
-H "Authorization: Bearer $LITELLM_API_KEY" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d "$BODY")
|
-d "$BODY")
|
||||||
echo "HTTP status: $HTTP_CODE"
|
echo "HTTP status: $HTTP_CODE"
|
||||||
echo "Response file size: $(wc -c < /tmp/ollama_response.json) bytes"
|
echo "Response file size: $(wc -c < /tmp/llm_response.json) bytes"
|
||||||
if [ "$HTTP_CODE" != "200" ]; then
|
if [ "$HTTP_CODE" != "200" ]; then
|
||||||
echo "ERROR: Ollama returned HTTP $HTTP_CODE"
|
echo "ERROR: liteLLM returned HTTP $HTTP_CODE"
|
||||||
cat /tmp/ollama_response.json
|
cat /tmp/llm_response.json
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
if ! jq empty /tmp/ollama_response.json 2>/dev/null; then
|
if ! jq empty /tmp/llm_response.json 2>/dev/null; then
|
||||||
echo "ERROR: Invalid JSON response from Ollama"
|
echo "ERROR: Invalid JSON response from liteLLM"
|
||||||
cat /tmp/ollama_response.json
|
cat /tmp/llm_response.json
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
REVIEW=$(jq -r '.choices[0].message.content // empty' /tmp/ollama_response.json)
|
REVIEW=$(jq -r '.choices[0].message.content // empty' /tmp/llm_response.json)
|
||||||
if [ -z "$REVIEW" ]; then
|
if [ -z "$REVIEW" ]; then
|
||||||
echo "ERROR: No content in Ollama response"
|
echo "ERROR: No content in liteLLM response"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo "Review length: ${#REVIEW} chars"
|
echo "Review length: ${#REVIEW} chars"
|
||||||
@ -109,11 +109,11 @@ jobs:
|
|||||||
if [ -f "/tmp/pr_review.txt" ] && [ -s "/tmp/pr_review.txt" ]; then
|
if [ -f "/tmp/pr_review.txt" ] && [ -s "/tmp/pr_review.txt" ]; then
|
||||||
REVIEW_BODY=$(head -c 65536 /tmp/pr_review.txt)
|
REVIEW_BODY=$(head -c 65536 /tmp/pr_review.txt)
|
||||||
BODY=$(jq -n \
|
BODY=$(jq -n \
|
||||||
--arg body "🤖 Automated PR Review:\n\n${REVIEW_BODY}\n\n---\n*this is an automated review from Ollama*" \
|
--arg body "Automated PR Review (qwen2.5-72b via liteLLM):\n\n${REVIEW_BODY}\n\n---\n*automated code review*" \
|
||||||
'{body: $body, event: "COMMENT"}')
|
'{body: $body, event: "COMMENT"}')
|
||||||
else
|
else
|
||||||
BODY=$(jq -n \
|
BODY=$(jq -n \
|
||||||
'{body: "⚠️ Automated PR Review could not be completed — Ollama analysis failed or produced no output.", event: "COMMENT"}')
|
'{body: "Automated PR Review could not be completed - LLM analysis failed or produced no output.", event: "COMMENT"}')
|
||||||
fi
|
fi
|
||||||
HTTP_CODE=$(curl -s --max-time 30 --connect-timeout 10 \
|
HTTP_CODE=$(curl -s --max-time 30 --connect-timeout 10 \
|
||||||
-o /tmp/review_post_response.json -w "%{http_code}" \
|
-o /tmp/review_post_response.json -w "%{http_code}" \
|
||||||
@ -131,4 +131,4 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
shell: bash
|
shell: bash
|
||||||
run: rm -f /tmp/pr_diff.txt /tmp/ollama_response.json /tmp/pr_review.txt /tmp/review_post_response.json
|
run: rm -f /tmp/pr_diff.txt /tmp/llm_response.json /tmp/pr_review.txt /tmp/review_post_response.json
|
||||||
|
|||||||
@ -1,6 +1,9 @@
|
|||||||
name: Test
|
name: Test
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
pull_request:
|
pull_request:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@ -37,6 +40,11 @@ jobs:
|
|||||||
key: ${{ runner.os }}-cargo-linux-amd64-${{ hashFiles('**/Cargo.lock') }}
|
key: ${{ runner.os }}-cargo-linux-amd64-${{ hashFiles('**/Cargo.lock') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-cargo-linux-amd64-
|
${{ runner.os }}-cargo-linux-amd64-
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm install --legacy-peer-deps
|
||||||
|
- name: Update version from Git
|
||||||
|
run: node scripts/update-version.mjs
|
||||||
|
- run: cargo generate-lockfile --manifest-path src-tauri/Cargo.toml
|
||||||
- run: cargo fmt --manifest-path src-tauri/Cargo.toml --check
|
- run: cargo fmt --manifest-path src-tauri/Cargo.toml --check
|
||||||
|
|
||||||
rust-clippy:
|
rust-clippy:
|
||||||
@ -72,7 +80,7 @@ jobs:
|
|||||||
key: ${{ runner.os }}-cargo-linux-amd64-${{ hashFiles('**/Cargo.lock') }}
|
key: ${{ runner.os }}-cargo-linux-amd64-${{ hashFiles('**/Cargo.lock') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-cargo-linux-amd64-
|
${{ runner.os }}-cargo-linux-amd64-
|
||||||
- run: cargo clippy --locked --manifest-path src-tauri/Cargo.toml -- -D warnings
|
- run: cargo clippy --manifest-path src-tauri/Cargo.toml -- -D warnings
|
||||||
|
|
||||||
rust-tests:
|
rust-tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -107,7 +115,7 @@ jobs:
|
|||||||
key: ${{ runner.os }}-cargo-linux-amd64-${{ hashFiles('**/Cargo.lock') }}
|
key: ${{ runner.os }}-cargo-linux-amd64-${{ hashFiles('**/Cargo.lock') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-cargo-linux-amd64-
|
${{ runner.os }}-cargo-linux-amd64-
|
||||||
- run: cargo test --locked --manifest-path src-tauri/Cargo.toml -- --test-threads=1
|
- run: cargo test --manifest-path src-tauri/Cargo.toml -- --test-threads=1
|
||||||
|
|
||||||
frontend-typecheck:
|
frontend-typecheck:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
66
CHANGELOG.md
66
CHANGELOG.md
@ -6,6 +6,72 @@ CI, chore, and build changes are excluded.
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
- Harden timeline event input validation and atomic writes
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
- Update wiki for timeline events and incident response methodology
|
||||||
|
|
||||||
|
### Features
|
||||||
|
- Add timeline_events table, model, and CRUD commands
|
||||||
|
- Populate RCA and postmortem docs with real timeline data
|
||||||
|
- Wire incident response methodology into AI and record triage events
|
||||||
|
|
||||||
|
## [0.2.65] — 2026-04-15
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
- Add --locked to cargo commands and improve version update script
|
||||||
|
- Remove invalid --locked flag from cargo commands and fix format string
|
||||||
|
- **integrations**: Security and correctness improvements
|
||||||
|
- Correct WIQL syntax and escape_wiql implementation
|
||||||
|
|
||||||
|
### Features
|
||||||
|
- Implement dynamic versioning from Git tags
|
||||||
|
- **integrations**: Implement query expansion for semantic search
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- Fix query expansion issues from PR review
|
||||||
|
- Address all issues from automated PR review
|
||||||
|
|
||||||
|
## [0.2.63] — 2026-04-13
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
- Add Windows nsis target and update CHANGELOG to v0.2.61
|
||||||
|
|
||||||
|
## [0.2.61] — 2026-04-13
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
- Remove AppImage from upload artifact patterns
|
||||||
|
|
||||||
|
## [0.2.59] — 2026-04-13
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
- Remove AppImage bundling to fix linux-amd64 build
|
||||||
|
|
||||||
|
## [0.2.57] — 2026-04-13
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
- Add fuse dependency for AppImage support
|
||||||
|
|
||||||
|
### Refactoring
|
||||||
|
- Remove custom linuxdeploy install per CI CI uses tauri-downloaded version
|
||||||
|
- Revert to original Dockerfile without manual linuxdeploy installation
|
||||||
|
|
||||||
|
## [0.2.56] — 2026-04-13
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
- Add missing ai_providers columns and fix linux-amd64 build
|
||||||
|
- Address AI review findings
|
||||||
|
- Address critical AI review issues
|
||||||
|
|
||||||
|
## [0.2.55] — 2026-04-13
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
- **ci**: Use Gitea file API to push CHANGELOG.md — eliminates non-fast-forward rejection
|
||||||
|
- **ci**: Harden CHANGELOG.md API push step per review
|
||||||
|
|
||||||
|
## [0.2.54] — 2026-04-13
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
- **ci**: Correct git-cliff archive path in tar extraction
|
- **ci**: Correct git-cliff archive path in tar extraction
|
||||||
|
|
||||||
|
|||||||
@ -50,7 +50,7 @@ All command handlers receive `State<'_, AppState>` as a Tauri-injected parameter
|
|||||||
| `commands/integrations.rs` | Confluence / ServiceNow / ADO — v0.2 stubs |
|
| `commands/integrations.rs` | Confluence / ServiceNow / ADO — v0.2 stubs |
|
||||||
| `ai/provider.rs` | `Provider` trait + `create_provider()` factory |
|
| `ai/provider.rs` | `Provider` trait + `create_provider()` factory |
|
||||||
| `pii/detector.rs` | Multi-pattern PII scanner with overlap resolution |
|
| `pii/detector.rs` | Multi-pattern PII scanner with overlap resolution |
|
||||||
| `db/migrations.rs` | Versioned schema (12 migrations in `_migrations` table) |
|
| `db/migrations.rs` | Versioned schema (17 migrations in `_migrations` table) |
|
||||||
| `db/models.rs` | All DB types — see `IssueDetail` note below |
|
| `db/models.rs` | All DB types — see `IssueDetail` note below |
|
||||||
| `docs/rca.rs` + `docs/postmortem.rs` | Markdown template builders |
|
| `docs/rca.rs` + `docs/postmortem.rs` | Markdown template builders |
|
||||||
| `audit/log.rs` | `write_audit_event()` — called before every external send |
|
| `audit/log.rs` | `write_audit_event()` — called before every external send |
|
||||||
@ -176,6 +176,55 @@ pub struct IssueDetail {
|
|||||||
|
|
||||||
Use `detail.issue.title`, **not** `detail.title`.
|
Use `detail.issue.title`, **not** `detail.title`.
|
||||||
|
|
||||||
|
## Incident Response Methodology
|
||||||
|
|
||||||
|
The application integrates a comprehensive incident response framework via system prompt injection. The `INCIDENT_RESPONSE_FRAMEWORK` constant in `src/lib/domainPrompts.ts` is appended to all 17 domain-specific system prompts (Linux, Windows, Network, Kubernetes, Databases, Virtualization, Hardware, Observability, and others).
|
||||||
|
|
||||||
|
**5-Phase Framework:**
|
||||||
|
|
||||||
|
1. **Detection & Evidence Gathering** — Initial issue assessment, log collection, PII redaction
|
||||||
|
2. **Diagnosis & Hypothesis Testing** — AI-assisted analysis, pattern matching against known incidents
|
||||||
|
3. **Root Cause Analysis with 5-Whys** — Iterative questioning to identify underlying cause (steps 1–5)
|
||||||
|
4. **Resolution & Prevention** — Remediation planning and implementation
|
||||||
|
5. **Post-Incident Review** — Timeline-based blameless post-mortem and lessons learned
|
||||||
|
|
||||||
|
**System Prompt Injection:**
|
||||||
|
|
||||||
|
The `chat_message` command accepts an optional `system_prompt` parameter. If provided, it prepends domain expertise before the conversation history. If omitted, the framework selects the appropriate domain prompt based on the issue category. This allows:
|
||||||
|
|
||||||
|
- **Specialized expertise**: Different frameworks for Linux vs. Kubernetes vs. Network incidents
|
||||||
|
- **Flexible override**: Users can inject custom system prompts for cross-domain problems
|
||||||
|
- **Consistent methodology**: All 17 domain prompts follow the same 5-phase incident response structure
|
||||||
|
|
||||||
|
**Timeline Event Recording:**
|
||||||
|
|
||||||
|
Timeline events are recorded non-blockingly at key triage moments:
|
||||||
|
|
||||||
|
```
|
||||||
|
Issue Creation → triage_started
|
||||||
|
↓
|
||||||
|
Log Upload → log_uploaded (metadata: file_name, file_size)
|
||||||
|
↓
|
||||||
|
Why-Level Progression → why_level_advanced (metadata: from_level → to_level)
|
||||||
|
↓
|
||||||
|
Root Cause Identified → root_cause_identified (metadata: root_cause, confidence)
|
||||||
|
↓
|
||||||
|
RCA Generated → rca_generated (metadata: doc_id, section_count)
|
||||||
|
↓
|
||||||
|
Postmortem Generated → postmortem_generated (metadata: doc_id, timeline_events_count)
|
||||||
|
↓
|
||||||
|
Document Exported → document_exported (metadata: format, file_path)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Document Generation:**
|
||||||
|
|
||||||
|
RCA and Postmortem generators now use real timeline event data instead of placeholders:
|
||||||
|
|
||||||
|
- **RCA**: Incorporates timeline to show detection-to-root-cause progression
|
||||||
|
- **Postmortem**: Uses full timeline to demonstrate the complete incident lifecycle and response effectiveness
|
||||||
|
|
||||||
|
Timeline events are stored in the `timeline_events` table (indexed by issue_id and created_at for fast retrieval) and dual-written to `audit_log` for security/compliance purposes.
|
||||||
|
|
||||||
## Application Startup Sequence
|
## Application Startup Sequence
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
TFTSR uses **SQLite** via `rusqlite` with the `bundled-sqlcipher` feature for AES-256 encryption in production. 12 versioned migrations are tracked in the `_migrations` table.
|
TFTSR uses **SQLite** via `rusqlite` with the `bundled-sqlcipher` feature for AES-256 encryption in production. 17 versioned migrations are tracked in the `_migrations` table.
|
||||||
|
|
||||||
**DB file location:** `{app_data_dir}/tftsr.db`
|
**DB file location:** `{app_data_dir}/tftsr.db`
|
||||||
|
|
||||||
@ -38,7 +38,7 @@ pub fn init_db(data_dir: &Path) -> anyhow::Result<Connection> {
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Schema (11 Migrations)
|
## Schema (17 Migrations)
|
||||||
|
|
||||||
### 001 — issues
|
### 001 — issues
|
||||||
|
|
||||||
@ -245,6 +245,51 @@ CREATE TABLE image_attachments (
|
|||||||
- Basic auth (ServiceNow): Store encrypted password
|
- Basic auth (ServiceNow): Store encrypted password
|
||||||
- One credential per service (enforced by UNIQUE constraint)
|
- One credential per service (enforced by UNIQUE constraint)
|
||||||
|
|
||||||
|
### 017 — timeline_events (Incident Response Timeline)
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE timeline_events (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
issue_id TEXT NOT NULL REFERENCES issues(id) ON DELETE CASCADE,
|
||||||
|
event_type TEXT NOT NULL,
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
metadata TEXT, -- JSON object with event-specific data
|
||||||
|
created_at TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_timeline_events_issue ON timeline_events(issue_id);
|
||||||
|
CREATE INDEX idx_timeline_events_time ON timeline_events(created_at);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Event Types:**
|
||||||
|
- `triage_started` — Incident response begins, initial issue properties recorded
|
||||||
|
- `log_uploaded` — Log file uploaded and analyzed
|
||||||
|
- `why_level_advanced` — 5-Whys entry completed, progression to next level
|
||||||
|
- `root_cause_identified` — Root cause determined from analysis
|
||||||
|
- `rca_generated` — Root Cause Analysis document created
|
||||||
|
- `postmortem_generated` — Post-mortem document created
|
||||||
|
- `document_exported` — Document exported to file (MD or PDF)
|
||||||
|
|
||||||
|
**Metadata Structure (JSON):**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"triage_started": {"severity": "high", "category": "network"},
|
||||||
|
"log_uploaded": {"file_name": "app.log", "file_size": 2048576},
|
||||||
|
"why_level_advanced": {"from_level": 2, "to_level": 3, "question": "Why did the service timeout?"},
|
||||||
|
"root_cause_identified": {"root_cause": "DNS resolution failure", "confidence": 0.95},
|
||||||
|
"rca_generated": {"doc_id": "doc_abc123", "section_count": 7},
|
||||||
|
"postmortem_generated": {"doc_id": "doc_def456", "timeline_events_count": 12},
|
||||||
|
"document_exported": {"format": "pdf", "file_path": "/home/user/docs/rca.pdf"}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Design Notes:**
|
||||||
|
- Timeline events are **queryable** (indexed by issue_id and created_at) for document generation
|
||||||
|
- Dual-write: Events recorded to both `timeline_events` and `audit_log` — timeline for chronological reporting, audit_log for security/compliance
|
||||||
|
- `created_at`: TEXT UTC timestamp (`YYYY-MM-DD HH:MM:SS`)
|
||||||
|
- Non-blocking writes: Timeline events recorded asynchronously at key triage moments
|
||||||
|
- Cascade delete from issues ensures cleanup
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Key Design Notes
|
## Key Design Notes
|
||||||
@ -289,4 +334,13 @@ pub struct AuditEntry {
|
|||||||
pub user_id: String,
|
pub user_id: String,
|
||||||
pub details: Option<String>,
|
pub details: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct TimelineEvent {
|
||||||
|
pub id: String,
|
||||||
|
pub issue_id: String,
|
||||||
|
pub event_type: String,
|
||||||
|
pub description: String,
|
||||||
|
pub metadata: Option<String>, // JSON
|
||||||
|
pub created_at: String,
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@ -62,11 +62,27 @@ updateFiveWhyCmd(entryId: string, answer: string) → void
|
|||||||
```
|
```
|
||||||
Sets or updates the answer for an existing 5-Whys entry.
|
Sets or updates the answer for an existing 5-Whys entry.
|
||||||
|
|
||||||
|
### `get_timeline_events`
|
||||||
|
```typescript
|
||||||
|
getTimelineEventsCmd(issueId: string) → TimelineEvent[]
|
||||||
|
```
|
||||||
|
Retrieves all timeline events for an issue, ordered by created_at ascending.
|
||||||
|
```typescript
|
||||||
|
interface TimelineEvent {
|
||||||
|
id: string;
|
||||||
|
issue_id: string;
|
||||||
|
event_type: string; // One of: triage_started, log_uploaded, why_level_advanced, etc.
|
||||||
|
description: string;
|
||||||
|
metadata?: Record<string, any>; // Event-specific JSON data
|
||||||
|
created_at: string; // UTC timestamp
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### `add_timeline_event`
|
### `add_timeline_event`
|
||||||
```typescript
|
```typescript
|
||||||
addTimelineEventCmd(issueId: string, eventType: string, description: string) → TimelineEvent
|
addTimelineEventCmd(issueId: string, eventType: string, description: string, metadata?: Record<string, any>) → TimelineEvent
|
||||||
```
|
```
|
||||||
Records a timestamped event in the issue timeline.
|
Records a timestamped event in the issue timeline. Dual-writes to both `timeline_events` (for document generation) and `audit_log` (for security audit trail).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -137,9 +153,9 @@ Sends selected (redacted) log files to the AI provider with an analysis prompt.
|
|||||||
|
|
||||||
### `chat_message`
|
### `chat_message`
|
||||||
```typescript
|
```typescript
|
||||||
chatMessageCmd(issueId: string, message: string, providerConfig: ProviderConfig) → ChatResponse
|
chatMessageCmd(issueId: string, message: string, providerConfig: ProviderConfig, systemPrompt?: string) → ChatResponse
|
||||||
```
|
```
|
||||||
Sends a message in the ongoing triage conversation. Domain system prompt is injected automatically on first message. AI response is parsed for why-level indicators (1–5).
|
Sends a message in the ongoing triage conversation. Optional `systemPrompt` parameter allows prepending domain expertise before conversation history. If not provided, the domain-specific system prompt for the issue category is injected automatically on first message. AI response is parsed for why-level indicators (1–5).
|
||||||
|
|
||||||
### `list_providers`
|
### `list_providers`
|
||||||
```typescript
|
```typescript
|
||||||
@ -155,13 +171,13 @@ Returns the list of supported providers with their available models and configur
|
|||||||
```typescript
|
```typescript
|
||||||
generateRcaCmd(issueId: string) → Document
|
generateRcaCmd(issueId: string) → Document
|
||||||
```
|
```
|
||||||
Builds an RCA Markdown document from the issue data, 5-Whys answers, and timeline.
|
Builds an RCA Markdown document from the issue data, 5-Whys answers, and timeline events. Uses real incident response timeline (log uploads, why-level progression, root cause identification) instead of placeholders.
|
||||||
|
|
||||||
### `generate_postmortem`
|
### `generate_postmortem`
|
||||||
```typescript
|
```typescript
|
||||||
generatePostmortemCmd(issueId: string) → Document
|
generatePostmortemCmd(issueId: string) → Document
|
||||||
```
|
```
|
||||||
Builds a blameless post-mortem Markdown document.
|
Builds a blameless post-mortem Markdown document. Incorporates timeline events to show the full incident lifecycle: detection, diagnosis, resolution, and post-incident review phases.
|
||||||
|
|
||||||
### `update_document`
|
### `update_document`
|
||||||
```typescript
|
```typescript
|
||||||
|
|||||||
@ -1,11 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "tftsr",
|
"name": "tftsr",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.2.50",
|
"version": "0.2.62",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
"build": "tsc && vite build",
|
"build": "tsc && vite build",
|
||||||
|
"version:update": "node scripts/update-version.mjs",
|
||||||
"preview": "vite preview",
|
"preview": "vite preview",
|
||||||
"tauri": "tauri",
|
"tauri": "tauri",
|
||||||
"test": "vitest",
|
"test": "vitest",
|
||||||
|
|||||||
111
scripts/update-version.mjs
Normal file
111
scripts/update-version.mjs
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import { execSync } from 'child_process';
|
||||||
|
import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
|
||||||
|
import { resolve, dirname } from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const projectRoot = resolve(__dirname, '..');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate version is semver-compliant (X.Y.Z)
|
||||||
|
*/
|
||||||
|
function isValidSemver(version) {
|
||||||
|
return /^[0-9]+\.[0-9]+\.[0-9]+$/.test(version);
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateGitRepo(root) {
|
||||||
|
if (!existsSync(resolve(root, '.git'))) {
|
||||||
|
throw new Error(`Not a Git repository: ${root}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getVersionFromGit() {
|
||||||
|
validateGitRepo(projectRoot);
|
||||||
|
try {
|
||||||
|
const output = execSync('git describe --tags --abbrev=0', {
|
||||||
|
encoding: 'utf-8',
|
||||||
|
cwd: projectRoot,
|
||||||
|
shell: false
|
||||||
|
});
|
||||||
|
let version = output.trim();
|
||||||
|
|
||||||
|
// Remove v prefix
|
||||||
|
version = version.replace(/^v/, '');
|
||||||
|
|
||||||
|
// Validate it's a valid semver
|
||||||
|
if (!isValidSemver(version)) {
|
||||||
|
const pkgJsonVersion = getFallbackVersion();
|
||||||
|
console.warn(`Invalid version format "${version}" from git describe, using package.json fallback: ${pkgJsonVersion}`);
|
||||||
|
return pkgJsonVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
return version;
|
||||||
|
} catch (e) {
|
||||||
|
const pkgJsonVersion = getFallbackVersion();
|
||||||
|
console.warn(`Failed to get version from Git tags, using package.json fallback: ${pkgJsonVersion}`);
|
||||||
|
return pkgJsonVersion;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getFallbackVersion() {
|
||||||
|
const pkgPath = resolve(projectRoot, 'package.json');
|
||||||
|
if (!existsSync(pkgPath)) {
|
||||||
|
return '0.2.50';
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const content = readFileSync(pkgPath, 'utf-8');
|
||||||
|
const json = JSON.parse(content);
|
||||||
|
return json.version || '0.2.50';
|
||||||
|
} catch {
|
||||||
|
return '0.2.50';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function updatePackageJson(version) {
|
||||||
|
const fullPath = resolve(projectRoot, 'package.json');
|
||||||
|
if (!existsSync(fullPath)) {
|
||||||
|
throw new Error(`File not found: ${fullPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = readFileSync(fullPath, 'utf-8');
|
||||||
|
const json = JSON.parse(content);
|
||||||
|
json.version = version;
|
||||||
|
|
||||||
|
// Write with 2-space indentation
|
||||||
|
writeFileSync(fullPath, JSON.stringify(json, null, 2) + '\n', 'utf-8');
|
||||||
|
console.log(`✓ Updated package.json to ${version}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateTOML(path, version) {
|
||||||
|
const fullPath = resolve(projectRoot, path);
|
||||||
|
if (!existsSync(fullPath)) {
|
||||||
|
throw new Error(`File not found: ${fullPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = readFileSync(fullPath, 'utf-8');
|
||||||
|
const lines = content.split('\n');
|
||||||
|
const output = [];
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.match(/^\s*version\s*=\s*"/)) {
|
||||||
|
output.push(`version = "${version}"`);
|
||||||
|
} else {
|
||||||
|
output.push(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
writeFileSync(fullPath, output.join('\n') + '\n', 'utf-8');
|
||||||
|
console.log(`✓ Updated ${path} to ${version}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const version = getVersionFromGit();
|
||||||
|
console.log(`Setting version to: ${version}`);
|
||||||
|
|
||||||
|
updatePackageJson(version);
|
||||||
|
updateTOML('src-tauri/Cargo.toml', version);
|
||||||
|
updateTOML('src-tauri/tauri.conf.json', version);
|
||||||
|
|
||||||
|
console.log(`✓ All version fields updated to ${version}`);
|
||||||
3
src-tauri/Cargo.lock
generated
3
src-tauri/Cargo.lock
generated
@ -6139,7 +6139,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "trcaa"
|
name = "trcaa"
|
||||||
version = "0.2.50"
|
version = "0.2.62"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aes-gcm",
|
"aes-gcm",
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
@ -6174,6 +6174,7 @@ dependencies = [
|
|||||||
"tokio-test",
|
"tokio-test",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
|
"url",
|
||||||
"urlencoding",
|
"urlencoding",
|
||||||
"uuid",
|
"uuid",
|
||||||
"warp",
|
"warp",
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "trcaa"
|
name = "trcaa"
|
||||||
version = "0.2.50"
|
version = "0.2.62"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
@ -44,6 +44,7 @@ lazy_static = "1.4"
|
|||||||
warp = "0.3"
|
warp = "0.3"
|
||||||
urlencoding = "2"
|
urlencoding = "2"
|
||||||
infer = "0.15"
|
infer = "0.15"
|
||||||
|
url = "2.5.8"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio-test = "0.4"
|
tokio-test = "0.4"
|
||||||
@ -52,3 +53,7 @@ mockito = "1.2"
|
|||||||
[profile.release]
|
[profile.release]
|
||||||
opt-level = "s"
|
opt-level = "s"
|
||||||
strip = true
|
strip = true
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,30 @@
|
|||||||
fn main() {
|
fn main() {
|
||||||
|
let version = get_version_from_git();
|
||||||
|
|
||||||
|
println!("cargo:rustc-env=APP_VERSION={version}");
|
||||||
|
println!("cargo:rerun-if-changed=.git/refs/heads/master");
|
||||||
|
println!("cargo:rerun-if-changed=.git/refs/tags");
|
||||||
|
|
||||||
tauri_build::build()
|
tauri_build::build()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_version_from_git() -> String {
|
||||||
|
if let Ok(output) = std::process::Command::new("git")
|
||||||
|
.arg("describe")
|
||||||
|
.arg("--tags")
|
||||||
|
.arg("--abbrev=0")
|
||||||
|
.output()
|
||||||
|
{
|
||||||
|
if output.status.success() {
|
||||||
|
let version = String::from_utf8_lossy(&output.stdout)
|
||||||
|
.trim()
|
||||||
|
.trim_start_matches('v')
|
||||||
|
.to_string();
|
||||||
|
if !version.is_empty() {
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
"0.2.50".to_string()
|
||||||
|
}
|
||||||
|
|||||||
@ -165,6 +165,7 @@ pub async fn chat_message(
|
|||||||
issue_id: String,
|
issue_id: String,
|
||||||
message: String,
|
message: String,
|
||||||
provider_config: ProviderConfig,
|
provider_config: ProviderConfig,
|
||||||
|
system_prompt: Option<String>,
|
||||||
app_handle: tauri::AppHandle,
|
app_handle: tauri::AppHandle,
|
||||||
state: State<'_, AppState>,
|
state: State<'_, AppState>,
|
||||||
) -> Result<ChatResponse, String> {
|
) -> Result<ChatResponse, String> {
|
||||||
@ -232,7 +233,21 @@ pub async fn chat_message(
|
|||||||
// Search integration sources for relevant context
|
// Search integration sources for relevant context
|
||||||
let integration_context = search_integration_sources(&message, &app_handle, &state).await;
|
let integration_context = search_integration_sources(&message, &app_handle, &state).await;
|
||||||
|
|
||||||
let mut messages = history;
|
let mut messages = Vec::new();
|
||||||
|
|
||||||
|
// Inject domain system prompt if provided
|
||||||
|
if let Some(ref prompt) = system_prompt {
|
||||||
|
if !prompt.is_empty() {
|
||||||
|
messages.push(Message {
|
||||||
|
role: "system".into(),
|
||||||
|
content: prompt.clone(),
|
||||||
|
tool_call_id: None,
|
||||||
|
tool_calls: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
messages.extend(history);
|
||||||
|
|
||||||
// If we found integration content, add it to the conversation context
|
// If we found integration content, add it to the conversation context
|
||||||
if !integration_context.is_empty() {
|
if !integration_context.is_empty() {
|
||||||
|
|||||||
@ -2,7 +2,7 @@ use tauri::State;
|
|||||||
|
|
||||||
use crate::db::models::{
|
use crate::db::models::{
|
||||||
AiConversation, AiMessage, ImageAttachment, Issue, IssueDetail, IssueFilter, IssueSummary,
|
AiConversation, AiMessage, ImageAttachment, Issue, IssueDetail, IssueFilter, IssueSummary,
|
||||||
IssueUpdate, LogFile, ResolutionStep,
|
IssueUpdate, LogFile, ResolutionStep, TimelineEvent,
|
||||||
};
|
};
|
||||||
use crate::state::AppState;
|
use crate::state::AppState;
|
||||||
|
|
||||||
@ -171,12 +171,35 @@ pub async fn get_issue(
|
|||||||
.filter_map(|r| r.ok())
|
.filter_map(|r| r.ok())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
// Load timeline events
|
||||||
|
let mut te_stmt = db
|
||||||
|
.prepare(
|
||||||
|
"SELECT id, issue_id, event_type, description, metadata, created_at \
|
||||||
|
FROM timeline_events WHERE issue_id = ?1 ORDER BY created_at ASC",
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
let timeline_events: Vec<TimelineEvent> = te_stmt
|
||||||
|
.query_map([&issue_id], |row| {
|
||||||
|
Ok(TimelineEvent {
|
||||||
|
id: row.get(0)?,
|
||||||
|
issue_id: row.get(1)?,
|
||||||
|
event_type: row.get(2)?,
|
||||||
|
description: row.get(3)?,
|
||||||
|
metadata: row.get(4)?,
|
||||||
|
created_at: row.get(5)?,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.map_err(|e| e.to_string())?
|
||||||
|
.filter_map(|r| r.ok())
|
||||||
|
.collect();
|
||||||
|
|
||||||
Ok(IssueDetail {
|
Ok(IssueDetail {
|
||||||
issue,
|
issue,
|
||||||
log_files,
|
log_files,
|
||||||
image_attachments,
|
image_attachments,
|
||||||
resolution_steps,
|
resolution_steps,
|
||||||
conversations,
|
conversations,
|
||||||
|
timeline_events,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -302,6 +325,11 @@ pub async fn delete_issue(issue_id: String, state: State<'_, AppState>) -> Resul
|
|||||||
[&issue_id],
|
[&issue_id],
|
||||||
)
|
)
|
||||||
.map_err(|e| e.to_string())?;
|
.map_err(|e| e.to_string())?;
|
||||||
|
db.execute(
|
||||||
|
"DELETE FROM timeline_events WHERE issue_id = ?1",
|
||||||
|
[&issue_id],
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
db.execute("DELETE FROM issues WHERE id = ?1", [&issue_id])
|
db.execute("DELETE FROM issues WHERE id = ?1", [&issue_id])
|
||||||
.map_err(|e| e.to_string())?;
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
@ -505,37 +533,105 @@ pub async fn update_five_why(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const VALID_EVENT_TYPES: &[&str] = &[
|
||||||
|
"triage_started",
|
||||||
|
"log_uploaded",
|
||||||
|
"why_level_advanced",
|
||||||
|
"root_cause_identified",
|
||||||
|
"rca_generated",
|
||||||
|
"postmortem_generated",
|
||||||
|
"document_exported",
|
||||||
|
];
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub async fn add_timeline_event(
|
pub async fn add_timeline_event(
|
||||||
issue_id: String,
|
issue_id: String,
|
||||||
event_type: String,
|
event_type: String,
|
||||||
description: String,
|
description: String,
|
||||||
|
metadata: Option<String>,
|
||||||
state: State<'_, AppState>,
|
state: State<'_, AppState>,
|
||||||
) -> Result<(), String> {
|
) -> Result<TimelineEvent, String> {
|
||||||
// Use audit_log for timeline tracking
|
if !VALID_EVENT_TYPES.contains(&event_type.as_str()) {
|
||||||
let db = state.db.lock().map_err(|e| e.to_string())?;
|
return Err(format!("Invalid event_type: {event_type}"));
|
||||||
let entry = crate::db::models::AuditEntry::new(
|
}
|
||||||
event_type,
|
|
||||||
"issue".to_string(),
|
let meta = metadata.unwrap_or_else(|| "{}".to_string());
|
||||||
|
if meta.len() > 10240 {
|
||||||
|
return Err("metadata exceeds maximum size of 10KB".to_string());
|
||||||
|
}
|
||||||
|
serde_json::from_str::<serde_json::Value>(&meta)
|
||||||
|
.map_err(|_| "metadata must be valid JSON".to_string())?;
|
||||||
|
|
||||||
|
let event = TimelineEvent::new(
|
||||||
issue_id.clone(),
|
issue_id.clone(),
|
||||||
serde_json::json!({ "description": description }).to_string(),
|
event_type.clone(),
|
||||||
|
description.clone(),
|
||||||
|
meta,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let mut db = state.db.lock().map_err(|e| e.to_string())?;
|
||||||
|
let tx = db.transaction().map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
tx.execute(
|
||||||
|
"INSERT INTO timeline_events (id, issue_id, event_type, description, metadata, created_at) \
|
||||||
|
VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
|
||||||
|
rusqlite::params![
|
||||||
|
event.id,
|
||||||
|
event.issue_id,
|
||||||
|
event.event_type,
|
||||||
|
event.description,
|
||||||
|
event.metadata,
|
||||||
|
event.created_at,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
crate::audit::log::write_audit_event(
|
crate::audit::log::write_audit_event(
|
||||||
&db,
|
&tx,
|
||||||
&entry.action,
|
&event_type,
|
||||||
&entry.entity_type,
|
"issue",
|
||||||
&entry.entity_id,
|
&issue_id,
|
||||||
&entry.details,
|
&serde_json::json!({ "description": description, "metadata": event.metadata }).to_string(),
|
||||||
)
|
)
|
||||||
.map_err(|_| "Failed to write security audit entry".to_string())?;
|
.map_err(|_| "Failed to write security audit entry".to_string())?;
|
||||||
|
|
||||||
// Update issue timestamp
|
|
||||||
let now = chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string();
|
let now = chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string();
|
||||||
db.execute(
|
tx.execute(
|
||||||
"UPDATE issues SET updated_at = ?1 WHERE id = ?2",
|
"UPDATE issues SET updated_at = ?1 WHERE id = ?2",
|
||||||
rusqlite::params![now, issue_id],
|
rusqlite::params![now, issue_id],
|
||||||
)
|
)
|
||||||
.map_err(|e| e.to_string())?;
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
Ok(())
|
tx.commit().map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
Ok(event)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn get_timeline_events(
|
||||||
|
issue_id: String,
|
||||||
|
state: State<'_, AppState>,
|
||||||
|
) -> Result<Vec<TimelineEvent>, String> {
|
||||||
|
let db = state.db.lock().map_err(|e| e.to_string())?;
|
||||||
|
let mut stmt = db
|
||||||
|
.prepare(
|
||||||
|
"SELECT id, issue_id, event_type, description, metadata, created_at \
|
||||||
|
FROM timeline_events WHERE issue_id = ?1 ORDER BY created_at ASC",
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
let events = stmt
|
||||||
|
.query_map([&issue_id], |row| {
|
||||||
|
Ok(TimelineEvent {
|
||||||
|
id: row.get(0)?,
|
||||||
|
issue_id: row.get(1)?,
|
||||||
|
event_type: row.get(2)?,
|
||||||
|
description: row.get(3)?,
|
||||||
|
metadata: row.get(4)?,
|
||||||
|
created_at: row.get(5)?,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.map_err(|e| e.to_string())?
|
||||||
|
.filter_map(|r| r.ok())
|
||||||
|
.collect();
|
||||||
|
Ok(events)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,7 @@ use crate::ollama::{
|
|||||||
OllamaStatus,
|
OllamaStatus,
|
||||||
};
|
};
|
||||||
use crate::state::{AppSettings, AppState, ProviderConfig};
|
use crate::state::{AppSettings, AppState, ProviderConfig};
|
||||||
|
use std::env;
|
||||||
|
|
||||||
// --- Ollama commands ---
|
// --- Ollama commands ---
|
||||||
|
|
||||||
@ -275,3 +276,11 @@ pub async fn delete_ai_provider(
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the application version from build-time environment
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn get_app_version() -> Result<String, String> {
|
||||||
|
env::var("APP_VERSION")
|
||||||
|
.or_else(|_| env::var("CARGO_PKG_VERSION"))
|
||||||
|
.map_err(|e| format!("Failed to get version: {e}"))
|
||||||
|
}
|
||||||
|
|||||||
@ -199,6 +199,20 @@ pub fn run_migrations(conn: &Connection) -> anyhow::Result<()> {
|
|||||||
"016_add_created_at",
|
"016_add_created_at",
|
||||||
"ALTER TABLE ai_providers ADD COLUMN created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%d %H:%M:%S', 'now'))",
|
"ALTER TABLE ai_providers ADD COLUMN created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%d %H:%M:%S', 'now'))",
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
"017_create_timeline_events",
|
||||||
|
"CREATE TABLE IF NOT EXISTS timeline_events (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
issue_id TEXT NOT NULL,
|
||||||
|
event_type TEXT NOT NULL,
|
||||||
|
description TEXT NOT NULL DEFAULT '',
|
||||||
|
metadata TEXT NOT NULL DEFAULT '{}',
|
||||||
|
created_at TEXT NOT NULL,
|
||||||
|
FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_timeline_events_issue ON timeline_events(issue_id);
|
||||||
|
CREATE INDEX idx_timeline_events_time ON timeline_events(created_at);",
|
||||||
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
for (name, sql) in migrations {
|
for (name, sql) in migrations {
|
||||||
@ -698,4 +712,82 @@ mod tests {
|
|||||||
// Should not fail even though columns already exist
|
// Should not fail even though columns already exist
|
||||||
run_migrations(&conn).unwrap();
|
run_migrations(&conn).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_timeline_events_table_exists() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
let count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='timeline_events'",
|
||||||
|
[],
|
||||||
|
|r| r.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(count, 1);
|
||||||
|
|
||||||
|
let mut stmt = conn.prepare("PRAGMA table_info(timeline_events)").unwrap();
|
||||||
|
let columns: Vec<String> = stmt
|
||||||
|
.query_map([], |row| row.get::<_, String>(1))
|
||||||
|
.unwrap()
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(columns.contains(&"id".to_string()));
|
||||||
|
assert!(columns.contains(&"issue_id".to_string()));
|
||||||
|
assert!(columns.contains(&"event_type".to_string()));
|
||||||
|
assert!(columns.contains(&"description".to_string()));
|
||||||
|
assert!(columns.contains(&"metadata".to_string()));
|
||||||
|
assert!(columns.contains(&"created_at".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_timeline_events_cascade_delete() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
conn.execute("PRAGMA foreign_keys = ON", []).unwrap();
|
||||||
|
|
||||||
|
let now = chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, title, created_at, updated_at) VALUES (?1, ?2, ?3, ?4)",
|
||||||
|
rusqlite::params!["issue-1", "Test Issue", now, now],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO timeline_events (id, issue_id, event_type, description, metadata, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
|
||||||
|
rusqlite::params!["te-1", "issue-1", "triage_started", "Started triage", "{}", "2025-01-15 10:00:00 UTC"],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Verify event exists
|
||||||
|
let count: i64 = conn
|
||||||
|
.query_row("SELECT COUNT(*) FROM timeline_events", [], |r| r.get(0))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(count, 1);
|
||||||
|
|
||||||
|
// Delete issue — cascade should remove timeline event
|
||||||
|
conn.execute("DELETE FROM issues WHERE id = 'issue-1'", [])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let count: i64 = conn
|
||||||
|
.query_row("SELECT COUNT(*) FROM timeline_events", [], |r| r.get(0))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(count, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_timeline_events_indexes() {
|
||||||
|
let conn = setup_test_db();
|
||||||
|
let mut stmt = conn
|
||||||
|
.prepare(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='timeline_events'",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let indexes: Vec<String> = stmt
|
||||||
|
.query_map([], |row| row.get(0))
|
||||||
|
.unwrap()
|
||||||
|
.filter_map(|r| r.ok())
|
||||||
|
.collect();
|
||||||
|
assert!(indexes.contains(&"idx_timeline_events_issue".to_string()));
|
||||||
|
assert!(indexes.contains(&"idx_timeline_events_time".to_string()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -47,6 +47,7 @@ pub struct IssueDetail {
|
|||||||
pub image_attachments: Vec<ImageAttachment>,
|
pub image_attachments: Vec<ImageAttachment>,
|
||||||
pub resolution_steps: Vec<ResolutionStep>,
|
pub resolution_steps: Vec<ResolutionStep>,
|
||||||
pub conversations: Vec<AiConversation>,
|
pub conversations: Vec<AiConversation>,
|
||||||
|
pub timeline_events: Vec<TimelineEvent>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Lightweight row returned by list/search commands.
|
/// Lightweight row returned by list/search commands.
|
||||||
@ -121,9 +122,31 @@ pub struct FiveWhyEntry {
|
|||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct TimelineEvent {
|
pub struct TimelineEvent {
|
||||||
pub id: String,
|
pub id: String,
|
||||||
|
pub issue_id: String,
|
||||||
pub event_type: String,
|
pub event_type: String,
|
||||||
pub description: String,
|
pub description: String,
|
||||||
pub created_at: i64,
|
pub metadata: String,
|
||||||
|
pub created_at: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TimelineEvent {
|
||||||
|
pub fn new(
|
||||||
|
issue_id: String,
|
||||||
|
event_type: String,
|
||||||
|
description: String,
|
||||||
|
metadata: String,
|
||||||
|
) -> Self {
|
||||||
|
TimelineEvent {
|
||||||
|
id: Uuid::now_v7().to_string(),
|
||||||
|
issue_id,
|
||||||
|
event_type,
|
||||||
|
description,
|
||||||
|
metadata,
|
||||||
|
created_at: chrono::Utc::now()
|
||||||
|
.format("%Y-%m-%d %H:%M:%S UTC")
|
||||||
|
.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ─── Log File ───────────────────────────────────────────────────────────────
|
// ─── Log File ───────────────────────────────────────────────────────────────
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
use crate::db::models::IssueDetail;
|
use crate::db::models::IssueDetail;
|
||||||
|
use crate::docs::rca::{calculate_duration, format_event_type};
|
||||||
|
|
||||||
pub fn generate_postmortem_markdown(detail: &IssueDetail) -> String {
|
pub fn generate_postmortem_markdown(detail: &IssueDetail) -> String {
|
||||||
let issue = &detail.issue;
|
let issue = &detail.issue;
|
||||||
@ -51,7 +52,16 @@ pub fn generate_postmortem_markdown(detail: &IssueDetail) -> String {
|
|||||||
|
|
||||||
// Impact
|
// Impact
|
||||||
md.push_str("## Impact\n\n");
|
md.push_str("## Impact\n\n");
|
||||||
md.push_str("- **Duration:** _[How long did the incident last?]_\n");
|
if detail.timeline_events.len() >= 2 {
|
||||||
|
let first = &detail.timeline_events[0].created_at;
|
||||||
|
let last = &detail.timeline_events[detail.timeline_events.len() - 1].created_at;
|
||||||
|
md.push_str(&format!(
|
||||||
|
"- **Duration:** {}\n",
|
||||||
|
calculate_duration(first, last)
|
||||||
|
));
|
||||||
|
} else {
|
||||||
|
md.push_str("- **Duration:** _[How long did the incident last?]_\n");
|
||||||
|
}
|
||||||
md.push_str("- **Users Affected:** _[Number/percentage of affected users]_\n");
|
md.push_str("- **Users Affected:** _[Number/percentage of affected users]_\n");
|
||||||
md.push_str("- **Revenue Impact:** _[Financial impact, if applicable]_\n");
|
md.push_str("- **Revenue Impact:** _[Financial impact, if applicable]_\n");
|
||||||
md.push_str("- **SLA Impact:** _[Were any SLAs breached?]_\n\n");
|
md.push_str("- **SLA Impact:** _[Were any SLAs breached?]_\n\n");
|
||||||
@ -67,7 +77,19 @@ pub fn generate_postmortem_markdown(detail: &IssueDetail) -> String {
|
|||||||
if let Some(ref resolved) = issue.resolved_at {
|
if let Some(ref resolved) = issue.resolved_at {
|
||||||
md.push_str(&format!("| {resolved} | Issue resolved |\n"));
|
md.push_str(&format!("| {resolved} | Issue resolved |\n"));
|
||||||
}
|
}
|
||||||
md.push_str("| _HH:MM_ | _[Add additional timeline events]_ |\n\n");
|
if detail.timeline_events.is_empty() {
|
||||||
|
md.push_str("| _HH:MM_ | _[Add additional timeline events]_ |\n");
|
||||||
|
} else {
|
||||||
|
for event in &detail.timeline_events {
|
||||||
|
md.push_str(&format!(
|
||||||
|
"| {} | {} - {} |\n",
|
||||||
|
event.created_at,
|
||||||
|
format_event_type(&event.event_type),
|
||||||
|
event.description
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
md.push('\n');
|
||||||
|
|
||||||
// Root Cause Analysis
|
// Root Cause Analysis
|
||||||
md.push_str("## Root Cause Analysis\n\n");
|
md.push_str("## Root Cause Analysis\n\n");
|
||||||
@ -114,6 +136,19 @@ pub fn generate_postmortem_markdown(detail: &IssueDetail) -> String {
|
|||||||
|
|
||||||
// What Went Well
|
// What Went Well
|
||||||
md.push_str("## What Went Well\n\n");
|
md.push_str("## What Went Well\n\n");
|
||||||
|
if !detail.resolution_steps.is_empty() {
|
||||||
|
md.push_str(&format!(
|
||||||
|
"- Systematic 5-whys analysis conducted ({} steps completed)\n",
|
||||||
|
detail.resolution_steps.len()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if detail
|
||||||
|
.timeline_events
|
||||||
|
.iter()
|
||||||
|
.any(|e| e.event_type == "root_cause_identified")
|
||||||
|
{
|
||||||
|
md.push_str("- Root cause was identified during triage\n");
|
||||||
|
}
|
||||||
md.push_str("- _[e.g., Quick detection through existing alerts]_\n");
|
md.push_str("- _[e.g., Quick detection through existing alerts]_\n");
|
||||||
md.push_str("- _[e.g., Effective cross-team collaboration]_\n");
|
md.push_str("- _[e.g., Effective cross-team collaboration]_\n");
|
||||||
md.push_str("- _[e.g., Smooth communication with stakeholders]_\n\n");
|
md.push_str("- _[e.g., Smooth communication with stakeholders]_\n\n");
|
||||||
@ -158,7 +193,7 @@ pub fn generate_postmortem_markdown(detail: &IssueDetail) -> String {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::db::models::{Issue, IssueDetail, ResolutionStep};
|
use crate::db::models::{Issue, IssueDetail, ResolutionStep, TimelineEvent};
|
||||||
|
|
||||||
fn make_test_detail() -> IssueDetail {
|
fn make_test_detail() -> IssueDetail {
|
||||||
IssueDetail {
|
IssueDetail {
|
||||||
@ -188,6 +223,7 @@ mod tests {
|
|||||||
created_at: "2025-02-10 09:00:00".to_string(),
|
created_at: "2025-02-10 09:00:00".to_string(),
|
||||||
}],
|
}],
|
||||||
conversations: vec![],
|
conversations: vec![],
|
||||||
|
timeline_events: vec![],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -246,4 +282,76 @@ mod tests {
|
|||||||
assert!(md.contains("| Priority | Action | Owner | Due Date | Status |"));
|
assert!(md.contains("| Priority | Action | Owner | Due Date | Status |"));
|
||||||
assert!(md.contains("| P0 |"));
|
assert!(md.contains("| P0 |"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_postmortem_timeline_with_real_events() {
|
||||||
|
let mut detail = make_test_detail();
|
||||||
|
detail.timeline_events = vec![
|
||||||
|
TimelineEvent {
|
||||||
|
id: "te-1".to_string(),
|
||||||
|
issue_id: "pm-456".to_string(),
|
||||||
|
event_type: "triage_started".to_string(),
|
||||||
|
description: "Triage initiated".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-02-10 08:05:00 UTC".to_string(),
|
||||||
|
},
|
||||||
|
TimelineEvent {
|
||||||
|
id: "te-2".to_string(),
|
||||||
|
issue_id: "pm-456".to_string(),
|
||||||
|
event_type: "root_cause_identified".to_string(),
|
||||||
|
description: "Certificate expiry confirmed".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-02-10 10:30:00 UTC".to_string(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let md = generate_postmortem_markdown(&detail);
|
||||||
|
assert!(md.contains("## Timeline"));
|
||||||
|
assert!(md.contains("| 2025-02-10 08:05:00 UTC | Triage Started - Triage initiated |"));
|
||||||
|
assert!(md.contains(
|
||||||
|
"| 2025-02-10 10:30:00 UTC | Root Cause Identified - Certificate expiry confirmed |"
|
||||||
|
));
|
||||||
|
assert!(!md.contains("_[Add additional timeline events]_"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_postmortem_impact_with_duration() {
|
||||||
|
let mut detail = make_test_detail();
|
||||||
|
detail.timeline_events = vec![
|
||||||
|
TimelineEvent {
|
||||||
|
id: "te-1".to_string(),
|
||||||
|
issue_id: "pm-456".to_string(),
|
||||||
|
event_type: "triage_started".to_string(),
|
||||||
|
description: "Triage initiated".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-02-10 08:00:00 UTC".to_string(),
|
||||||
|
},
|
||||||
|
TimelineEvent {
|
||||||
|
id: "te-2".to_string(),
|
||||||
|
issue_id: "pm-456".to_string(),
|
||||||
|
event_type: "root_cause_identified".to_string(),
|
||||||
|
description: "Found it".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-02-10 10:30:00 UTC".to_string(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let md = generate_postmortem_markdown(&detail);
|
||||||
|
assert!(md.contains("**Duration:** 2h 30m"));
|
||||||
|
assert!(!md.contains("_[How long did the incident last?]_"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_postmortem_what_went_well_with_steps() {
|
||||||
|
let mut detail = make_test_detail();
|
||||||
|
detail.timeline_events = vec![TimelineEvent {
|
||||||
|
id: "te-1".to_string(),
|
||||||
|
issue_id: "pm-456".to_string(),
|
||||||
|
event_type: "root_cause_identified".to_string(),
|
||||||
|
description: "Root cause found".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-02-10 10:00:00 UTC".to_string(),
|
||||||
|
}];
|
||||||
|
let md = generate_postmortem_markdown(&detail);
|
||||||
|
assert!(md.contains("Systematic 5-whys analysis conducted (1 steps completed)"));
|
||||||
|
assert!(md.contains("Root cause was identified during triage"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,48 @@
|
|||||||
use crate::db::models::IssueDetail;
|
use crate::db::models::IssueDetail;
|
||||||
|
|
||||||
|
pub fn format_event_type(event_type: &str) -> &str {
|
||||||
|
match event_type {
|
||||||
|
"triage_started" => "Triage Started",
|
||||||
|
"log_uploaded" => "Log File Uploaded",
|
||||||
|
"why_level_advanced" => "Why Level Advanced",
|
||||||
|
"root_cause_identified" => "Root Cause Identified",
|
||||||
|
"rca_generated" => "RCA Document Generated",
|
||||||
|
"postmortem_generated" => "Post-Mortem Generated",
|
||||||
|
"document_exported" => "Document Exported",
|
||||||
|
other => other,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn calculate_duration(start: &str, end: &str) -> String {
|
||||||
|
let fmt = "%Y-%m-%d %H:%M:%S UTC";
|
||||||
|
let start_dt = match chrono::NaiveDateTime::parse_from_str(start, fmt) {
|
||||||
|
Ok(dt) => dt,
|
||||||
|
Err(_) => return "N/A".to_string(),
|
||||||
|
};
|
||||||
|
let end_dt = match chrono::NaiveDateTime::parse_from_str(end, fmt) {
|
||||||
|
Ok(dt) => dt,
|
||||||
|
Err(_) => return "N/A".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let duration = end_dt.signed_duration_since(start_dt);
|
||||||
|
let total_minutes = duration.num_minutes();
|
||||||
|
if total_minutes < 0 {
|
||||||
|
return "N/A".to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
let days = total_minutes / (24 * 60);
|
||||||
|
let hours = (total_minutes % (24 * 60)) / 60;
|
||||||
|
let minutes = total_minutes % 60;
|
||||||
|
|
||||||
|
if days > 0 {
|
||||||
|
format!("{days}d {hours}h")
|
||||||
|
} else if hours > 0 {
|
||||||
|
format!("{hours}h {minutes}m")
|
||||||
|
} else {
|
||||||
|
format!("{minutes}m")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn generate_rca_markdown(detail: &IssueDetail) -> String {
|
pub fn generate_rca_markdown(detail: &IssueDetail) -> String {
|
||||||
let issue = &detail.issue;
|
let issue = &detail.issue;
|
||||||
|
|
||||||
@ -57,6 +100,52 @@ pub fn generate_rca_markdown(detail: &IssueDetail) -> String {
|
|||||||
md.push_str("\n\n");
|
md.push_str("\n\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Incident Timeline
|
||||||
|
md.push_str("## Incident Timeline\n\n");
|
||||||
|
if detail.timeline_events.is_empty() {
|
||||||
|
md.push_str("_No timeline events recorded._\n\n");
|
||||||
|
} else {
|
||||||
|
md.push_str("| Time (UTC) | Event | Description |\n");
|
||||||
|
md.push_str("|------------|-------|-------------|\n");
|
||||||
|
for event in &detail.timeline_events {
|
||||||
|
md.push_str(&format!(
|
||||||
|
"| {} | {} | {} |\n",
|
||||||
|
event.created_at,
|
||||||
|
format_event_type(&event.event_type),
|
||||||
|
event.description
|
||||||
|
));
|
||||||
|
}
|
||||||
|
md.push('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Incident Metrics
|
||||||
|
md.push_str("## Incident Metrics\n\n");
|
||||||
|
md.push_str(&format!(
|
||||||
|
"- **Total Events:** {}\n",
|
||||||
|
detail.timeline_events.len()
|
||||||
|
));
|
||||||
|
if detail.timeline_events.len() >= 2 {
|
||||||
|
let first = &detail.timeline_events[0].created_at;
|
||||||
|
let last = &detail.timeline_events[detail.timeline_events.len() - 1].created_at;
|
||||||
|
md.push_str(&format!(
|
||||||
|
"- **Incident Duration:** {}\n",
|
||||||
|
calculate_duration(first, last)
|
||||||
|
));
|
||||||
|
} else {
|
||||||
|
md.push_str("- **Incident Duration:** N/A\n");
|
||||||
|
}
|
||||||
|
let root_cause_event = detail
|
||||||
|
.timeline_events
|
||||||
|
.iter()
|
||||||
|
.find(|e| e.event_type == "root_cause_identified");
|
||||||
|
if let (Some(first), Some(rc)) = (detail.timeline_events.first(), root_cause_event) {
|
||||||
|
md.push_str(&format!(
|
||||||
|
"- **Time to Root Cause:** {}\n",
|
||||||
|
calculate_duration(&first.created_at, &rc.created_at)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
md.push('\n');
|
||||||
|
|
||||||
// 5 Whys Analysis
|
// 5 Whys Analysis
|
||||||
md.push_str("## 5 Whys Analysis\n\n");
|
md.push_str("## 5 Whys Analysis\n\n");
|
||||||
if detail.resolution_steps.is_empty() {
|
if detail.resolution_steps.is_empty() {
|
||||||
@ -143,7 +232,7 @@ pub fn generate_rca_markdown(detail: &IssueDetail) -> String {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::db::models::{Issue, IssueDetail, LogFile, ResolutionStep};
|
use crate::db::models::{Issue, IssueDetail, LogFile, ResolutionStep, TimelineEvent};
|
||||||
|
|
||||||
fn make_test_detail() -> IssueDetail {
|
fn make_test_detail() -> IssueDetail {
|
||||||
IssueDetail {
|
IssueDetail {
|
||||||
@ -194,6 +283,7 @@ mod tests {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
conversations: vec![],
|
conversations: vec![],
|
||||||
|
timeline_events: vec![],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -247,4 +337,135 @@ mod tests {
|
|||||||
let md = generate_rca_markdown(&detail);
|
let md = generate_rca_markdown(&detail);
|
||||||
assert!(md.contains("Unassigned"));
|
assert!(md.contains("Unassigned"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rca_timeline_section_with_events() {
|
||||||
|
let mut detail = make_test_detail();
|
||||||
|
detail.timeline_events = vec![
|
||||||
|
TimelineEvent {
|
||||||
|
id: "te-1".to_string(),
|
||||||
|
issue_id: "test-123".to_string(),
|
||||||
|
event_type: "triage_started".to_string(),
|
||||||
|
description: "Triage initiated by oncall".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-01-15 10:00:00 UTC".to_string(),
|
||||||
|
},
|
||||||
|
TimelineEvent {
|
||||||
|
id: "te-2".to_string(),
|
||||||
|
issue_id: "test-123".to_string(),
|
||||||
|
event_type: "log_uploaded".to_string(),
|
||||||
|
description: "app.log uploaded".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-01-15 10:30:00 UTC".to_string(),
|
||||||
|
},
|
||||||
|
TimelineEvent {
|
||||||
|
id: "te-3".to_string(),
|
||||||
|
issue_id: "test-123".to_string(),
|
||||||
|
event_type: "root_cause_identified".to_string(),
|
||||||
|
description: "Connection pool leak found".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-01-15 12:15:00 UTC".to_string(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let md = generate_rca_markdown(&detail);
|
||||||
|
assert!(md.contains("## Incident Timeline"));
|
||||||
|
assert!(md.contains("| Time (UTC) | Event | Description |"));
|
||||||
|
assert!(md
|
||||||
|
.contains("| 2025-01-15 10:00:00 UTC | Triage Started | Triage initiated by oncall |"));
|
||||||
|
assert!(md.contains("| 2025-01-15 10:30:00 UTC | Log File Uploaded | app.log uploaded |"));
|
||||||
|
assert!(md.contains(
|
||||||
|
"| 2025-01-15 12:15:00 UTC | Root Cause Identified | Connection pool leak found |"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rca_timeline_section_empty() {
|
||||||
|
let detail = make_test_detail();
|
||||||
|
let md = generate_rca_markdown(&detail);
|
||||||
|
assert!(md.contains("## Incident Timeline"));
|
||||||
|
assert!(md.contains("_No timeline events recorded._"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rca_metrics_section() {
|
||||||
|
let mut detail = make_test_detail();
|
||||||
|
detail.timeline_events = vec![
|
||||||
|
TimelineEvent {
|
||||||
|
id: "te-1".to_string(),
|
||||||
|
issue_id: "test-123".to_string(),
|
||||||
|
event_type: "triage_started".to_string(),
|
||||||
|
description: "Triage started".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-01-15 10:00:00 UTC".to_string(),
|
||||||
|
},
|
||||||
|
TimelineEvent {
|
||||||
|
id: "te-2".to_string(),
|
||||||
|
issue_id: "test-123".to_string(),
|
||||||
|
event_type: "root_cause_identified".to_string(),
|
||||||
|
description: "Root cause found".to_string(),
|
||||||
|
metadata: "{}".to_string(),
|
||||||
|
created_at: "2025-01-15 12:15:00 UTC".to_string(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let md = generate_rca_markdown(&detail);
|
||||||
|
assert!(md.contains("## Incident Metrics"));
|
||||||
|
assert!(md.contains("**Total Events:** 2"));
|
||||||
|
assert!(md.contains("**Incident Duration:** 2h 15m"));
|
||||||
|
assert!(md.contains("**Time to Root Cause:** 2h 15m"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_calculate_duration_hours_minutes() {
|
||||||
|
assert_eq!(
|
||||||
|
calculate_duration("2025-01-15 10:00:00 UTC", "2025-01-15 12:15:00 UTC"),
|
||||||
|
"2h 15m"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_calculate_duration_days() {
|
||||||
|
assert_eq!(
|
||||||
|
calculate_duration("2025-01-15 10:00:00 UTC", "2025-01-18 11:00:00 UTC"),
|
||||||
|
"3d 1h"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_calculate_duration_minutes_only() {
|
||||||
|
assert_eq!(
|
||||||
|
calculate_duration("2025-01-15 10:00:00 UTC", "2025-01-15 10:45:00 UTC"),
|
||||||
|
"45m"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_calculate_duration_invalid() {
|
||||||
|
assert_eq!(calculate_duration("bad-date", "also-bad"), "N/A");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_event_type_known() {
|
||||||
|
assert_eq!(format_event_type("triage_started"), "Triage Started");
|
||||||
|
assert_eq!(format_event_type("log_uploaded"), "Log File Uploaded");
|
||||||
|
assert_eq!(
|
||||||
|
format_event_type("why_level_advanced"),
|
||||||
|
"Why Level Advanced"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
format_event_type("root_cause_identified"),
|
||||||
|
"Root Cause Identified"
|
||||||
|
);
|
||||||
|
assert_eq!(format_event_type("rca_generated"), "RCA Document Generated");
|
||||||
|
assert_eq!(
|
||||||
|
format_event_type("postmortem_generated"),
|
||||||
|
"Post-Mortem Generated"
|
||||||
|
);
|
||||||
|
assert_eq!(format_event_type("document_exported"), "Document Exported");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_event_type_unknown() {
|
||||||
|
assert_eq!(format_event_type("custom_event"), "custom_event");
|
||||||
|
assert_eq!(format_event_type(""), "");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -629,11 +629,10 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_derive_aes_key_is_stable_for_same_input() {
|
fn test_derive_aes_key_is_stable_for_same_input() {
|
||||||
std::env::set_var("TFTSR_ENCRYPTION_KEY", "stable-test-key");
|
// Use deterministic helper to avoid env var race conditions in parallel tests
|
||||||
let k1 = derive_aes_key().unwrap();
|
let k1 = derive_aes_key_from_str("stable-test-key").unwrap();
|
||||||
let k2 = derive_aes_key().unwrap();
|
let k2 = derive_aes_key_from_str("stable-test-key").unwrap();
|
||||||
assert_eq!(k1, k2);
|
assert_eq!(k1, k2);
|
||||||
std::env::remove_var("TFTSR_ENCRYPTION_KEY");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test helper functions that accept key directly (bypass env var)
|
// Test helper functions that accept key directly (bypass env var)
|
||||||
|
|||||||
@ -1,4 +1,40 @@
|
|||||||
use super::confluence_search::SearchResult;
|
use super::confluence_search::SearchResult;
|
||||||
|
use crate::integrations::query_expansion::expand_query;
|
||||||
|
|
||||||
|
const MAX_EXPANDED_QUERIES: usize = 3;
|
||||||
|
|
||||||
|
fn escape_wiql(s: &str) -> String {
|
||||||
|
s.replace('\'', "''")
|
||||||
|
.replace('"', "\\\"")
|
||||||
|
.replace('\\', "\\\\")
|
||||||
|
.replace('(', "\\(")
|
||||||
|
.replace(')', "\\)")
|
||||||
|
.replace(';', "\\;")
|
||||||
|
.replace('=', "\\=")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Basic HTML tag stripping to prevent XSS in excerpts
|
||||||
|
fn strip_html_tags(html: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
let mut in_tag = false;
|
||||||
|
|
||||||
|
for ch in html.chars() {
|
||||||
|
match ch {
|
||||||
|
'<' => in_tag = true,
|
||||||
|
'>' => in_tag = false,
|
||||||
|
_ if !in_tag => result.push(ch),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up whitespace
|
||||||
|
result
|
||||||
|
.split_whitespace()
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(" ")
|
||||||
|
.trim()
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
/// Search Azure DevOps Wiki for content matching the query
|
/// Search Azure DevOps Wiki for content matching the query
|
||||||
pub async fn search_wiki(
|
pub async fn search_wiki(
|
||||||
@ -10,90 +46,94 @@ pub async fn search_wiki(
|
|||||||
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
// Use Azure DevOps Search API
|
let expanded_queries = expand_query(query);
|
||||||
let search_url = format!(
|
|
||||||
"{}/_apis/search/wikisearchresults?api-version=7.0",
|
|
||||||
org_url.trim_end_matches('/')
|
|
||||||
);
|
|
||||||
|
|
||||||
let search_body = serde_json::json!({
|
let mut all_results = Vec::new();
|
||||||
"searchText": query,
|
|
||||||
"$top": 5,
|
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
|
||||||
"filters": {
|
// Use Azure DevOps Search API
|
||||||
"ProjectFilters": [project]
|
let search_url = format!(
|
||||||
|
"{}/_apis/search/wikisearchresults?api-version=7.0",
|
||||||
|
org_url.trim_end_matches('/')
|
||||||
|
);
|
||||||
|
|
||||||
|
let search_body = serde_json::json!({
|
||||||
|
"searchText": expanded_query,
|
||||||
|
"$top": 5,
|
||||||
|
"filters": {
|
||||||
|
"ProjectFilters": [project]
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tracing::info!("Searching Azure DevOps Wiki with query: {}", expanded_query);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(&search_url)
|
||||||
|
.header("Cookie", &cookie_header)
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.json(&search_body)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Azure DevOps wiki search failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
tracing::warn!("Azure DevOps wiki search failed with status {status}: {text}");
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
tracing::info!("Searching Azure DevOps Wiki: {}", search_url);
|
let json: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse ADO wiki search response: {e}"))?;
|
||||||
|
|
||||||
let resp = client
|
if let Some(results_array) = json["results"].as_array() {
|
||||||
.post(&search_url)
|
for item in results_array.iter().take(MAX_EXPANDED_QUERIES) {
|
||||||
.header("Cookie", &cookie_header)
|
let title = item["fileName"].as_str().unwrap_or("Untitled").to_string();
|
||||||
.header("Accept", "application/json")
|
|
||||||
.header("Content-Type", "application/json")
|
|
||||||
.json(&search_body)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|e| format!("Azure DevOps wiki search failed: {e}"))?;
|
|
||||||
|
|
||||||
if !resp.status().is_success() {
|
let path = item["path"].as_str().unwrap_or("");
|
||||||
let status = resp.status();
|
let url = format!(
|
||||||
let text = resp.text().await.unwrap_or_default();
|
"{}/_wiki/wikis/{}/{}",
|
||||||
return Err(format!(
|
org_url.trim_end_matches('/'),
|
||||||
"Azure DevOps wiki search failed with status {status}: {text}"
|
project,
|
||||||
));
|
path
|
||||||
}
|
);
|
||||||
|
|
||||||
let json: serde_json::Value = resp
|
let excerpt = strip_html_tags(item["content"].as_str().unwrap_or(""))
|
||||||
.json()
|
.chars()
|
||||||
.await
|
.take(300)
|
||||||
.map_err(|e| format!("Failed to parse ADO wiki search response: {e}"))?;
|
.collect::<String>();
|
||||||
|
|
||||||
let mut results = Vec::new();
|
// Fetch full wiki page content
|
||||||
|
let content = if let Some(wiki_id) = item["wiki"]["id"].as_str() {
|
||||||
if let Some(results_array) = json["results"].as_array() {
|
if let Some(page_path) = item["path"].as_str() {
|
||||||
for item in results_array.iter().take(3) {
|
fetch_wiki_page(org_url, wiki_id, page_path, &cookie_header)
|
||||||
let title = item["fileName"].as_str().unwrap_or("Untitled").to_string();
|
.await
|
||||||
|
.ok()
|
||||||
let path = item["path"].as_str().unwrap_or("");
|
} else {
|
||||||
let url = format!(
|
None
|
||||||
"{}/_wiki/wikis/{}/{}",
|
}
|
||||||
org_url.trim_end_matches('/'),
|
|
||||||
project,
|
|
||||||
path
|
|
||||||
);
|
|
||||||
|
|
||||||
let excerpt = item["content"]
|
|
||||||
.as_str()
|
|
||||||
.unwrap_or("")
|
|
||||||
.chars()
|
|
||||||
.take(300)
|
|
||||||
.collect::<String>();
|
|
||||||
|
|
||||||
// Fetch full wiki page content
|
|
||||||
let content = if let Some(wiki_id) = item["wiki"]["id"].as_str() {
|
|
||||||
if let Some(page_path) = item["path"].as_str() {
|
|
||||||
fetch_wiki_page(org_url, wiki_id, page_path, &cookie_header)
|
|
||||||
.await
|
|
||||||
.ok()
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
};
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
results.push(SearchResult {
|
all_results.push(SearchResult {
|
||||||
title,
|
title,
|
||||||
url,
|
url,
|
||||||
excerpt,
|
excerpt,
|
||||||
content,
|
content,
|
||||||
source: "Azure DevOps".to_string(),
|
source: "Azure DevOps".to_string(),
|
||||||
});
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(results)
|
all_results.sort_by(|a, b| a.url.cmp(&b.url));
|
||||||
|
all_results.dedup_by(|a, b| a.url == b.url);
|
||||||
|
|
||||||
|
Ok(all_results)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch full wiki page content
|
/// Fetch full wiki page content
|
||||||
@ -151,55 +191,68 @@ pub async fn search_work_items(
|
|||||||
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
// Use WIQL (Work Item Query Language)
|
let expanded_queries = expand_query(query);
|
||||||
let wiql_url = format!(
|
|
||||||
"{}/_apis/wit/wiql?api-version=7.0",
|
|
||||||
org_url.trim_end_matches('/')
|
|
||||||
);
|
|
||||||
|
|
||||||
let wiql_query = format!(
|
let mut all_results = Vec::new();
|
||||||
"SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{project}' AND ([System.Title] CONTAINS '{query}' OR [System.Description] CONTAINS '{query}') ORDER BY [System.ChangedDate] DESC"
|
|
||||||
);
|
|
||||||
|
|
||||||
let wiql_body = serde_json::json!({
|
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
|
||||||
"query": wiql_query
|
// Use WIQL (Work Item Query Language)
|
||||||
});
|
let wiql_url = format!(
|
||||||
|
"{}/_apis/wit/wiql?api-version=7.0",
|
||||||
|
org_url.trim_end_matches('/')
|
||||||
|
);
|
||||||
|
|
||||||
tracing::info!("Searching Azure DevOps work items");
|
let safe_query = escape_wiql(expanded_query);
|
||||||
|
let wiql_query = format!(
|
||||||
|
"SELECT [System.Id], [System.Title], [System.Description], [System.State] FROM WorkItems WHERE [System.TeamProject] = '{project}' AND ([System.Title] ~ '{safe_query}' OR [System.Description] ~ '{safe_query}') ORDER BY [System.ChangedDate] DESC"
|
||||||
|
);
|
||||||
|
|
||||||
let resp = client
|
let wiql_body = serde_json::json!({
|
||||||
.post(&wiql_url)
|
"query": wiql_query
|
||||||
.header("Cookie", &cookie_header)
|
});
|
||||||
.header("Accept", "application/json")
|
|
||||||
.header("Content-Type", "application/json")
|
|
||||||
.json(&wiql_body)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|e| format!("ADO work item search failed: {e}"))?;
|
|
||||||
|
|
||||||
if !resp.status().is_success() {
|
tracing::info!(
|
||||||
return Ok(Vec::new()); // Don't fail if work item search fails
|
"Searching Azure DevOps work items with query: {}",
|
||||||
}
|
expanded_query
|
||||||
|
);
|
||||||
|
|
||||||
let json: serde_json::Value = resp
|
let resp = client
|
||||||
.json()
|
.post(&wiql_url)
|
||||||
.await
|
.header("Cookie", &cookie_header)
|
||||||
.map_err(|_| "Failed to parse work item response".to_string())?;
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.json(&wiql_body)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("ADO work item search failed: {e}"))?;
|
||||||
|
|
||||||
let mut results = Vec::new();
|
if !resp.status().is_success() {
|
||||||
|
continue; // Don't fail if work item search fails
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(work_items) = json["workItems"].as_array() {
|
let json: serde_json::Value = resp
|
||||||
// Fetch details for top 3 work items
|
.json()
|
||||||
for item in work_items.iter().take(3) {
|
.await
|
||||||
if let Some(id) = item["id"].as_i64() {
|
.map_err(|_| "Failed to parse work item response".to_string())?;
|
||||||
if let Ok(work_item) = fetch_work_item_details(org_url, id, &cookie_header).await {
|
|
||||||
results.push(work_item);
|
if let Some(work_items) = json["workItems"].as_array() {
|
||||||
|
// Fetch details for top 3 work items
|
||||||
|
for item in work_items.iter().take(MAX_EXPANDED_QUERIES) {
|
||||||
|
if let Some(id) = item["id"].as_i64() {
|
||||||
|
if let Ok(work_item) =
|
||||||
|
fetch_work_item_details(org_url, id, &cookie_header).await
|
||||||
|
{
|
||||||
|
all_results.push(work_item);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(results)
|
all_results.sort_by(|a, b| a.url.cmp(&b.url));
|
||||||
|
all_results.dedup_by(|a, b| a.url == b.url);
|
||||||
|
|
||||||
|
Ok(all_results)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch work item details
|
/// Fetch work item details
|
||||||
@ -263,3 +316,53 @@ async fn fetch_work_item_details(
|
|||||||
source: "Azure DevOps".to_string(),
|
source: "Azure DevOps".to_string(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_wiql_escapes_single_quotes() {
|
||||||
|
assert_eq!(escape_wiql("test'single"), "test''single");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_wiql_escapes_double_quotes() {
|
||||||
|
assert_eq!(escape_wiql("test\"double"), "test\\\\\"double");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_wiql_escapes_backslash() {
|
||||||
|
assert_eq!(escape_wiql("test\\backslash"), r#"test\\backslash"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_wiql_escapes_parens() {
|
||||||
|
assert_eq!(escape_wiql("test(paren"), r#"test\(paren"#);
|
||||||
|
assert_eq!(escape_wiql("test)paren"), r#"test\)paren"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_wiql_escapes_semicolon() {
|
||||||
|
assert_eq!(escape_wiql("test;semi"), r#"test\;semi"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_wiql_escapes_equals() {
|
||||||
|
assert_eq!(escape_wiql("test=equal"), r#"test\=equal"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_wiql_no_special_chars() {
|
||||||
|
assert_eq!(escape_wiql("simple query"), "simple query");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_html_tags() {
|
||||||
|
let html = "<p>Hello <strong>world</strong>!</p>";
|
||||||
|
assert_eq!(strip_html_tags(html), "Hello world!");
|
||||||
|
|
||||||
|
let html2 = "<div><h1>Title</h1><p>Content</p></div>";
|
||||||
|
assert_eq!(strip_html_tags(html2), "TitleContent");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -1,4 +1,9 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use super::query_expansion::expand_query;
|
||||||
|
|
||||||
|
const MAX_EXPANDED_QUERIES: usize = 3;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct SearchResult {
|
pub struct SearchResult {
|
||||||
@ -6,10 +11,36 @@ pub struct SearchResult {
|
|||||||
pub url: String,
|
pub url: String,
|
||||||
pub excerpt: String,
|
pub excerpt: String,
|
||||||
pub content: Option<String>,
|
pub content: Option<String>,
|
||||||
pub source: String, // "confluence", "servicenow", "azuredevops"
|
pub source: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn canonicalize_url(url: &str) -> String {
|
||||||
|
Url::parse(url)
|
||||||
|
.ok()
|
||||||
|
.map(|u| {
|
||||||
|
let mut u = u.clone();
|
||||||
|
u.set_fragment(None);
|
||||||
|
u.set_query(None);
|
||||||
|
u.to_string()
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| url.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn escape_cql(s: &str) -> String {
|
||||||
|
s.replace('"', "\\\"")
|
||||||
|
.replace(')', "\\)")
|
||||||
|
.replace('(', "\\(")
|
||||||
|
.replace('~', "\\~")
|
||||||
|
.replace('&', "\\&")
|
||||||
|
.replace('|', "\\|")
|
||||||
|
.replace('+', "\\+")
|
||||||
|
.replace('-', "\\-")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Search Confluence for content matching the query
|
/// Search Confluence for content matching the query
|
||||||
|
///
|
||||||
|
/// This function expands the user query with related terms, synonyms, and variations
|
||||||
|
/// to improve search coverage across Confluence spaces.
|
||||||
pub async fn search_confluence(
|
pub async fn search_confluence(
|
||||||
base_url: &str,
|
base_url: &str,
|
||||||
query: &str,
|
query: &str,
|
||||||
@ -18,86 +49,89 @@ pub async fn search_confluence(
|
|||||||
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
// Use Confluence CQL search
|
let expanded_queries = expand_query(query);
|
||||||
let search_url = format!(
|
|
||||||
"{}/rest/api/search?cql=text~\"{}\"&limit=5",
|
|
||||||
base_url.trim_end_matches('/'),
|
|
||||||
urlencoding::encode(query)
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::info!("Searching Confluence: {}", search_url);
|
let mut all_results = Vec::new();
|
||||||
|
|
||||||
let resp = client
|
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
|
||||||
.get(&search_url)
|
let safe_query = escape_cql(expanded_query);
|
||||||
.header("Cookie", &cookie_header)
|
let search_url = format!(
|
||||||
.header("Accept", "application/json")
|
"{}/rest/api/search?cql=text~\"{}\"&limit=5",
|
||||||
.send()
|
base_url.trim_end_matches('/'),
|
||||||
.await
|
urlencoding::encode(&safe_query)
|
||||||
.map_err(|e| format!("Confluence search request failed: {e}"))?;
|
);
|
||||||
|
|
||||||
if !resp.status().is_success() {
|
tracing::info!(
|
||||||
let status = resp.status();
|
"Searching Confluence with expanded query: {}",
|
||||||
let text = resp.text().await.unwrap_or_default();
|
expanded_query
|
||||||
return Err(format!(
|
);
|
||||||
"Confluence search failed with status {status}: {text}"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let json: serde_json::Value = resp
|
let resp = client
|
||||||
.json()
|
.get(&search_url)
|
||||||
.await
|
.header("Cookie", &cookie_header)
|
||||||
.map_err(|e| format!("Failed to parse Confluence search response: {e}"))?;
|
.header("Accept", "application/json")
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Confluence search request failed: {e}"))?;
|
||||||
|
|
||||||
let mut results = Vec::new();
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
tracing::warn!("Confluence search failed with status {status}: {text}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(results_array) = json["results"].as_array() {
|
let json: serde_json::Value = resp
|
||||||
for item in results_array.iter().take(3) {
|
.json()
|
||||||
// Take top 3 results
|
.await
|
||||||
let title = item["title"].as_str().unwrap_or("Untitled").to_string();
|
.map_err(|e| format!("Failed to parse Confluence search response: {e}"))?;
|
||||||
|
|
||||||
let id = item["content"]["id"].as_str();
|
if let Some(results_array) = json["results"].as_array() {
|
||||||
let space_key = item["content"]["space"]["key"].as_str();
|
for item in results_array.iter().take(MAX_EXPANDED_QUERIES) {
|
||||||
|
let title = item["title"].as_str().unwrap_or("Untitled").to_string();
|
||||||
|
|
||||||
// Build URL
|
let id = item["content"]["id"].as_str();
|
||||||
let url = if let (Some(id_str), Some(space)) = (id, space_key) {
|
let space_key = item["content"]["space"]["key"].as_str();
|
||||||
format!(
|
|
||||||
"{}/display/{}/{}",
|
|
||||||
base_url.trim_end_matches('/'),
|
|
||||||
space,
|
|
||||||
id_str
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
base_url.to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Get excerpt from search result
|
let url = if let (Some(id_str), Some(space)) = (id, space_key) {
|
||||||
let excerpt = item["excerpt"]
|
format!(
|
||||||
.as_str()
|
"{}/display/{}/{}",
|
||||||
.unwrap_or("")
|
base_url.trim_end_matches('/'),
|
||||||
.to_string()
|
space,
|
||||||
.replace("<span class=\"highlight\">", "")
|
id_str
|
||||||
.replace("</span>", "");
|
)
|
||||||
|
} else {
|
||||||
|
base_url.to_string()
|
||||||
|
};
|
||||||
|
|
||||||
// Fetch full page content
|
let excerpt = strip_html_tags(item["excerpt"].as_str().unwrap_or(""))
|
||||||
let content = if let Some(content_id) = id {
|
.chars()
|
||||||
fetch_page_content(base_url, content_id, &cookie_header)
|
.take(300)
|
||||||
.await
|
.collect::<String>();
|
||||||
.ok()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
results.push(SearchResult {
|
let content = if let Some(content_id) = id {
|
||||||
title,
|
fetch_page_content(base_url, content_id, &cookie_header)
|
||||||
url,
|
.await
|
||||||
excerpt,
|
.ok()
|
||||||
content,
|
} else {
|
||||||
source: "Confluence".to_string(),
|
None
|
||||||
});
|
};
|
||||||
|
|
||||||
|
all_results.push(SearchResult {
|
||||||
|
title,
|
||||||
|
url,
|
||||||
|
excerpt,
|
||||||
|
content,
|
||||||
|
source: "Confluence".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(results)
|
all_results.sort_by(|a, b| canonicalize_url(&a.url).cmp(&canonicalize_url(&b.url)));
|
||||||
|
all_results.dedup_by(|a, b| canonicalize_url(&a.url) == canonicalize_url(&b.url));
|
||||||
|
|
||||||
|
Ok(all_results)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch full content of a Confluence page
|
/// Fetch full content of a Confluence page
|
||||||
@ -185,4 +219,43 @@ mod tests {
|
|||||||
let html2 = "<div><h1>Title</h1><p>Content</p></div>";
|
let html2 = "<div><h1>Title</h1><p>Content</p></div>";
|
||||||
assert_eq!(strip_html_tags(html2), "TitleContent");
|
assert_eq!(strip_html_tags(html2), "TitleContent");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_cql_escapes_special_chars() {
|
||||||
|
assert_eq!(escape_cql("test\"quote"), r#"test\"quote"#);
|
||||||
|
assert_eq!(escape_cql("test(paren"), r#"test\(paren"#);
|
||||||
|
assert_eq!(escape_cql("test)paren"), r#"test\)paren"#);
|
||||||
|
assert_eq!(escape_cql("test~tilde"), r#"test\~tilde"#);
|
||||||
|
assert_eq!(escape_cql("test&and"), r#"test\&and"#);
|
||||||
|
assert_eq!(escape_cql("test|or"), r#"test\|or"#);
|
||||||
|
assert_eq!(escape_cql("test+plus"), r#"test\+plus"#);
|
||||||
|
assert_eq!(escape_cql("test-minus"), r#"test\-minus"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_escape_cql_no_special_chars() {
|
||||||
|
assert_eq!(escape_cql("simple query"), "simple query");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_canonicalize_url_removes_fragment() {
|
||||||
|
assert_eq!(
|
||||||
|
canonicalize_url("https://example.com/page#section"),
|
||||||
|
"https://example.com/page"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_canonicalize_url_removes_query() {
|
||||||
|
assert_eq!(
|
||||||
|
canonicalize_url("https://example.com/page?param=value"),
|
||||||
|
"https://example.com/page"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_canonicalize_url_handles_malformed() {
|
||||||
|
// Malformed URLs fall back to original
|
||||||
|
assert_eq!(canonicalize_url("not a url"), "not a url");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,7 @@ pub mod azuredevops_search;
|
|||||||
pub mod callback_server;
|
pub mod callback_server;
|
||||||
pub mod confluence;
|
pub mod confluence;
|
||||||
pub mod confluence_search;
|
pub mod confluence_search;
|
||||||
|
pub mod query_expansion;
|
||||||
pub mod servicenow;
|
pub mod servicenow;
|
||||||
pub mod servicenow_search;
|
pub mod servicenow_search;
|
||||||
pub mod webview_auth;
|
pub mod webview_auth;
|
||||||
|
|||||||
290
src-tauri/src/integrations/query_expansion.rs
Normal file
290
src-tauri/src/integrations/query_expansion.rs
Normal file
@ -0,0 +1,290 @@
|
|||||||
|
/// Query expansion module for integration search
|
||||||
|
///
|
||||||
|
/// This module provides functionality to expand user queries with related terms,
|
||||||
|
/// synonyms, and variations to improve search results across integrations like
|
||||||
|
/// Confluence, ServiceNow, and Azure DevOps.
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
/// Product name synonyms for common product variations
|
||||||
|
/// Maps common abbreviations/variants to their full names for search expansion
|
||||||
|
fn get_product_synonyms(query: &str) -> Vec<String> {
|
||||||
|
let mut synonyms = Vec::new();
|
||||||
|
|
||||||
|
// VESTA NXT related synonyms
|
||||||
|
if query.to_lowercase().contains("vesta") || query.to_lowercase().contains("vnxt") {
|
||||||
|
synonyms.extend(vec![
|
||||||
|
"VESTA NXT".to_string(),
|
||||||
|
"Vesta NXT".to_string(),
|
||||||
|
"VNXT".to_string(),
|
||||||
|
"vnxt".to_string(),
|
||||||
|
"Vesta".to_string(),
|
||||||
|
"vesta".to_string(),
|
||||||
|
"VNX".to_string(),
|
||||||
|
"vnx".to_string(),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Version number patterns (e.g., 1.0.12, 1.1.9)
|
||||||
|
if query.contains('.') {
|
||||||
|
// Extract version-like patterns and add variations
|
||||||
|
let version_parts: Vec<&str> = query.split('.').collect();
|
||||||
|
if version_parts.len() >= 2 {
|
||||||
|
// Add variations without dots
|
||||||
|
let version_no_dots = version_parts.join("");
|
||||||
|
synonyms.push(version_no_dots);
|
||||||
|
|
||||||
|
// Add partial versions
|
||||||
|
if version_parts.len() >= 2 {
|
||||||
|
synonyms.push(version_parts[0..2].join("."));
|
||||||
|
}
|
||||||
|
if version_parts.len() >= 3 {
|
||||||
|
synonyms.push(version_parts[0..3].join("."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Common upgrade-related terms
|
||||||
|
if query.to_lowercase().contains("upgrade") || query.to_lowercase().contains("update") {
|
||||||
|
synonyms.extend(vec![
|
||||||
|
"upgrade".to_string(),
|
||||||
|
"update".to_string(),
|
||||||
|
"migration".to_string(),
|
||||||
|
"patch".to_string(),
|
||||||
|
"version".to_string(),
|
||||||
|
"install".to_string(),
|
||||||
|
"installation".to_string(),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove duplicates and empty strings
|
||||||
|
synonyms.sort();
|
||||||
|
synonyms.dedup();
|
||||||
|
synonyms.retain(|s| !s.is_empty());
|
||||||
|
|
||||||
|
synonyms
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Expand a search query with related terms for better search coverage
|
||||||
|
///
|
||||||
|
/// This function takes a user query and expands it with:
|
||||||
|
/// - Product name synonyms (e.g., "VNXT" -> "VESTA NXT", "Vesta NXT")
|
||||||
|
/// - Version number variations
|
||||||
|
/// - Related terms based on query content
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
/// * `query` - The original user query
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
/// A vector of query strings to search, with the original query first
|
||||||
|
/// followed by expanded variations. Returns empty only if input is empty or
|
||||||
|
/// whitespace-only. Otherwise, always returns at least the original query.
|
||||||
|
pub fn expand_query(query: &str) -> Vec<String> {
|
||||||
|
if query.trim().is_empty() {
|
||||||
|
return Vec::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut expanded = vec![query.to_string()];
|
||||||
|
|
||||||
|
// Get product synonyms
|
||||||
|
let product_synonyms = get_product_synonyms(query);
|
||||||
|
expanded.extend(product_synonyms);
|
||||||
|
|
||||||
|
// Extract keywords from query for additional expansion
|
||||||
|
let keywords = extract_keywords(query);
|
||||||
|
|
||||||
|
// Add keyword variations
|
||||||
|
for keyword in keywords.iter().take(5) {
|
||||||
|
if !expanded.contains(keyword) {
|
||||||
|
expanded.push(keyword.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add common related terms based on query content
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
|
||||||
|
if query_lower.contains("confluence") || query_lower.contains("documentation") {
|
||||||
|
expanded.push("docs".to_string());
|
||||||
|
expanded.push("manual".to_string());
|
||||||
|
expanded.push("guide".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if query_lower.contains("deploy") || query_lower.contains("deployment") {
|
||||||
|
expanded.push("deploy".to_string());
|
||||||
|
expanded.push("deployment".to_string());
|
||||||
|
expanded.push("release".to_string());
|
||||||
|
expanded.push("build".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if query_lower.contains("kubernetes") || query_lower.contains("k8s") {
|
||||||
|
expanded.push("kubernetes".to_string());
|
||||||
|
expanded.push("k8s".to_string());
|
||||||
|
expanded.push("pod".to_string());
|
||||||
|
expanded.push("container".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove duplicates and empty strings
|
||||||
|
expanded.sort();
|
||||||
|
expanded.dedup();
|
||||||
|
expanded.retain(|s| !s.is_empty());
|
||||||
|
|
||||||
|
expanded
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract important keywords from a search query
|
||||||
|
///
|
||||||
|
/// This function removes stop words and extracts meaningful terms
|
||||||
|
/// for search expansion.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
/// * `query` - The original user query
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
/// A vector of extracted keywords
|
||||||
|
fn extract_keywords(query: &str) -> Vec<String> {
|
||||||
|
let stop_words: HashSet<&str> = [
|
||||||
|
"how", "do", "i", "the", "a", "an", "is", "are", "was", "were", "be", "been", "being",
|
||||||
|
"have", "has", "had", "having", "do", "does", "did", "doing", "will", "would", "should",
|
||||||
|
"could", "can", "may", "might", "must", "to", "from", "in", "on", "at", "by", "for",
|
||||||
|
"with", "about", "as", "of", "or", "and", "but", "not", "what", "when", "where", "which",
|
||||||
|
"who", "this", "that", "these", "those", "if", "then", "else", "for", "while", "until",
|
||||||
|
"against", "between", "into", "through", "during", "before", "after", "above", "below",
|
||||||
|
"up", "down", "out", "off", "over", "under", "again", "further", "then", "once", "here",
|
||||||
|
"there", "why", "where", "all", "any", "both", "each", "few", "more", "most", "other",
|
||||||
|
"some", "such", "no", "nor", "only", "own", "same", "so", "than", "too", "very", "can",
|
||||||
|
"just", "should", "now",
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut keywords = Vec::new();
|
||||||
|
let mut remaining = query.to_string();
|
||||||
|
|
||||||
|
while !remaining.is_empty() {
|
||||||
|
// Skip leading whitespace
|
||||||
|
if remaining.starts_with(char::is_whitespace) {
|
||||||
|
remaining = remaining.trim_start().to_string();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to extract version number (e.g., 1.0.12, 1.1.9)
|
||||||
|
if remaining.starts_with(|c: char| c.is_ascii_digit()) {
|
||||||
|
let mut end_pos = 0;
|
||||||
|
let mut dot_count = 0;
|
||||||
|
|
||||||
|
for (i, c) in remaining.chars().enumerate() {
|
||||||
|
if c.is_ascii_digit() {
|
||||||
|
end_pos = i + 1;
|
||||||
|
} else if c == '.' {
|
||||||
|
end_pos = i + 1;
|
||||||
|
dot_count += 1;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only extract if we have at least 2 dots (e.g., 1.0.12)
|
||||||
|
if dot_count >= 2 && end_pos > 0 {
|
||||||
|
let version = remaining[..end_pos].to_string();
|
||||||
|
keywords.push(version.clone());
|
||||||
|
remaining = remaining[end_pos..].to_string();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find word boundary - split on whitespace or non-alphanumeric
|
||||||
|
let mut split_pos = remaining.len();
|
||||||
|
for (i, c) in remaining.chars().enumerate() {
|
||||||
|
if c.is_whitespace() || !c.is_alphanumeric() {
|
||||||
|
split_pos = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If split_pos is 0, the string starts with a non-alphanumeric character
|
||||||
|
// Skip it and continue
|
||||||
|
if split_pos == 0 {
|
||||||
|
remaining = remaining[1..].to_string();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let word = remaining[..split_pos].to_lowercase();
|
||||||
|
remaining = remaining[split_pos..].to_string();
|
||||||
|
|
||||||
|
// Skip empty words, single chars, and stop words
|
||||||
|
if word.is_empty() || word.len() < 2 || stop_words.contains(word.as_str()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add numeric words with 3+ digits
|
||||||
|
if word.chars().all(|c| c.is_ascii_digit()) && word.len() >= 3 {
|
||||||
|
keywords.push(word.clone());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add words with at least one alphabetic character
|
||||||
|
if word.chars().any(|c| c.is_alphabetic()) {
|
||||||
|
keywords.push(word.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
keywords.sort();
|
||||||
|
keywords.dedup();
|
||||||
|
|
||||||
|
keywords
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_expand_query_with_product_synonyms() {
|
||||||
|
let query = "upgrade vesta nxt to 1.1.9";
|
||||||
|
let expanded = expand_query(query);
|
||||||
|
|
||||||
|
// Should contain original query
|
||||||
|
assert!(expanded.contains(&query.to_string()));
|
||||||
|
|
||||||
|
// Should contain product synonyms
|
||||||
|
assert!(expanded
|
||||||
|
.iter()
|
||||||
|
.any(|s| s.contains("vnxt") || s.contains("vnxt")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_expand_query_with_version_numbers() {
|
||||||
|
let query = "version 1.0.12";
|
||||||
|
let expanded = expand_query(query);
|
||||||
|
|
||||||
|
// Should contain original query
|
||||||
|
assert!(expanded.contains(&query.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_keywords() {
|
||||||
|
let query = "How do I upgrade VESTA NXT from 1.0.12 to 1.1.9?";
|
||||||
|
let keywords = extract_keywords(query);
|
||||||
|
|
||||||
|
assert!(keywords.contains(&"upgrade".to_string()));
|
||||||
|
assert!(keywords.contains(&"vesta".to_string()));
|
||||||
|
assert!(keywords.contains(&"nxt".to_string()));
|
||||||
|
assert!(keywords.contains(&"1.0.12".to_string()));
|
||||||
|
assert!(keywords.contains(&"1.1.9".to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_product_synonyms() {
|
||||||
|
let synonyms = get_product_synonyms("vesta nxt upgrade");
|
||||||
|
|
||||||
|
// Should contain VNXT synonym
|
||||||
|
assert!(synonyms
|
||||||
|
.iter()
|
||||||
|
.any(|s| s.contains("VNXT") || s.contains("vnxt")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_empty_query() {
|
||||||
|
let expanded = expand_query("");
|
||||||
|
assert!(expanded.is_empty() || expanded.contains(&"".to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,4 +1,7 @@
|
|||||||
use super::confluence_search::SearchResult;
|
use super::confluence_search::SearchResult;
|
||||||
|
use crate::integrations::query_expansion::expand_query;
|
||||||
|
|
||||||
|
const MAX_EXPANDED_QUERIES: usize = 3;
|
||||||
|
|
||||||
/// Search ServiceNow Knowledge Base for content matching the query
|
/// Search ServiceNow Knowledge Base for content matching the query
|
||||||
pub async fn search_servicenow(
|
pub async fn search_servicenow(
|
||||||
@ -9,82 +12,88 @@ pub async fn search_servicenow(
|
|||||||
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
// Search Knowledge Base articles
|
let expanded_queries = expand_query(query);
|
||||||
let search_url = format!(
|
|
||||||
"{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=5",
|
|
||||||
instance_url.trim_end_matches('/'),
|
|
||||||
urlencoding::encode(query),
|
|
||||||
urlencoding::encode(query)
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::info!("Searching ServiceNow: {}", search_url);
|
let mut all_results = Vec::new();
|
||||||
|
|
||||||
let resp = client
|
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
|
||||||
.get(&search_url)
|
// Search Knowledge Base articles
|
||||||
.header("Cookie", &cookie_header)
|
let search_url = format!(
|
||||||
.header("Accept", "application/json")
|
"{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=5",
|
||||||
.send()
|
instance_url.trim_end_matches('/'),
|
||||||
.await
|
urlencoding::encode(expanded_query),
|
||||||
.map_err(|e| format!("ServiceNow search request failed: {e}"))?;
|
urlencoding::encode(expanded_query)
|
||||||
|
);
|
||||||
|
|
||||||
if !resp.status().is_success() {
|
tracing::info!("Searching ServiceNow with query: {}", expanded_query);
|
||||||
let status = resp.status();
|
|
||||||
let text = resp.text().await.unwrap_or_default();
|
|
||||||
return Err(format!(
|
|
||||||
"ServiceNow search failed with status {status}: {text}"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let json: serde_json::Value = resp
|
let resp = client
|
||||||
.json()
|
.get(&search_url)
|
||||||
.await
|
.header("Cookie", &cookie_header)
|
||||||
.map_err(|e| format!("Failed to parse ServiceNow search response: {e}"))?;
|
.header("Accept", "application/json")
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("ServiceNow search request failed: {e}"))?;
|
||||||
|
|
||||||
let mut results = Vec::new();
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
tracing::warn!("ServiceNow search failed with status {status}: {text}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(result_array) = json["result"].as_array() {
|
let json: serde_json::Value = resp
|
||||||
for item in result_array.iter().take(3) {
|
.json()
|
||||||
// Take top 3 results
|
.await
|
||||||
let title = item["short_description"]
|
.map_err(|e| format!("Failed to parse ServiceNow search response: {e}"))?;
|
||||||
.as_str()
|
|
||||||
.unwrap_or("Untitled")
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let sys_id = item["sys_id"].as_str().unwrap_or("").to_string();
|
if let Some(result_array) = json["result"].as_array() {
|
||||||
|
for item in result_array.iter().take(MAX_EXPANDED_QUERIES) {
|
||||||
|
// Take top 3 results
|
||||||
|
let title = item["short_description"]
|
||||||
|
.as_str()
|
||||||
|
.unwrap_or("Untitled")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
let url = format!(
|
let sys_id = item["sys_id"].as_str().unwrap_or("").to_string();
|
||||||
"{}/kb_view.do?sysparm_article={}",
|
|
||||||
instance_url.trim_end_matches('/'),
|
|
||||||
sys_id
|
|
||||||
);
|
|
||||||
|
|
||||||
let excerpt = item["text"]
|
let url = format!(
|
||||||
.as_str()
|
"{}/kb_view.do?sysparm_article={}",
|
||||||
.unwrap_or("")
|
instance_url.trim_end_matches('/'),
|
||||||
.chars()
|
sys_id
|
||||||
.take(300)
|
);
|
||||||
.collect::<String>();
|
|
||||||
|
|
||||||
// Get full article content
|
let excerpt = item["text"]
|
||||||
let content = item["text"].as_str().map(|text| {
|
.as_str()
|
||||||
if text.len() > 3000 {
|
.unwrap_or("")
|
||||||
format!("{}...", &text[..3000])
|
.chars()
|
||||||
} else {
|
.take(300)
|
||||||
text.to_string()
|
.collect::<String>();
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
results.push(SearchResult {
|
// Get full article content
|
||||||
title,
|
let content = item["text"].as_str().map(|text| {
|
||||||
url,
|
if text.len() > 3000 {
|
||||||
excerpt,
|
format!("{}...", &text[..3000])
|
||||||
content,
|
} else {
|
||||||
source: "ServiceNow".to_string(),
|
text.to_string()
|
||||||
});
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
all_results.push(SearchResult {
|
||||||
|
title,
|
||||||
|
url,
|
||||||
|
excerpt,
|
||||||
|
content,
|
||||||
|
source: "ServiceNow".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(results)
|
all_results.sort_by(|a, b| a.url.cmp(&b.url));
|
||||||
|
all_results.dedup_by(|a, b| a.url == b.url);
|
||||||
|
|
||||||
|
Ok(all_results)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Search ServiceNow Incidents for related issues
|
/// Search ServiceNow Incidents for related issues
|
||||||
@ -96,68 +105,78 @@ pub async fn search_incidents(
|
|||||||
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
let cookie_header = crate::integrations::webview_auth::cookies_to_header(cookies);
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
// Search incidents
|
let expanded_queries = expand_query(query);
|
||||||
let search_url = format!(
|
|
||||||
"{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true",
|
|
||||||
instance_url.trim_end_matches('/'),
|
|
||||||
urlencoding::encode(query),
|
|
||||||
urlencoding::encode(query)
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::info!("Searching ServiceNow incidents: {}", search_url);
|
let mut all_results = Vec::new();
|
||||||
|
|
||||||
let resp = client
|
for expanded_query in expanded_queries.iter().take(MAX_EXPANDED_QUERIES) {
|
||||||
.get(&search_url)
|
// Search incidents
|
||||||
.header("Cookie", &cookie_header)
|
let search_url = format!(
|
||||||
.header("Accept", "application/json")
|
"{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true",
|
||||||
.send()
|
instance_url.trim_end_matches('/'),
|
||||||
.await
|
urlencoding::encode(expanded_query),
|
||||||
.map_err(|e| format!("ServiceNow incident search failed: {e}"))?;
|
urlencoding::encode(expanded_query)
|
||||||
|
);
|
||||||
|
|
||||||
if !resp.status().is_success() {
|
tracing::info!(
|
||||||
return Ok(Vec::new()); // Don't fail if incident search fails
|
"Searching ServiceNow incidents with query: {}",
|
||||||
}
|
expanded_query
|
||||||
|
);
|
||||||
|
|
||||||
let json: serde_json::Value = resp
|
let resp = client
|
||||||
.json()
|
.get(&search_url)
|
||||||
.await
|
.header("Cookie", &cookie_header)
|
||||||
.map_err(|_| "Failed to parse incident response".to_string())?;
|
.header("Accept", "application/json")
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("ServiceNow incident search failed: {e}"))?;
|
||||||
|
|
||||||
let mut results = Vec::new();
|
if !resp.status().is_success() {
|
||||||
|
continue; // Don't fail if incident search fails
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(result_array) = json["result"].as_array() {
|
let json: serde_json::Value = resp
|
||||||
for item in result_array.iter() {
|
.json()
|
||||||
let number = item["number"].as_str().unwrap_or("Unknown");
|
.await
|
||||||
let title = format!(
|
.map_err(|_| "Failed to parse incident response".to_string())?;
|
||||||
"Incident {}: {}",
|
|
||||||
number,
|
|
||||||
item["short_description"].as_str().unwrap_or("No title")
|
|
||||||
);
|
|
||||||
|
|
||||||
let sys_id = item["sys_id"].as_str().unwrap_or("");
|
if let Some(result_array) = json["result"].as_array() {
|
||||||
let url = format!(
|
for item in result_array.iter() {
|
||||||
"{}/incident.do?sys_id={}",
|
let number = item["number"].as_str().unwrap_or("Unknown");
|
||||||
instance_url.trim_end_matches('/'),
|
let title = format!(
|
||||||
sys_id
|
"Incident {}: {}",
|
||||||
);
|
number,
|
||||||
|
item["short_description"].as_str().unwrap_or("No title")
|
||||||
|
);
|
||||||
|
|
||||||
let description = item["description"].as_str().unwrap_or("").to_string();
|
let sys_id = item["sys_id"].as_str().unwrap_or("");
|
||||||
|
let url = format!(
|
||||||
|
"{}/incident.do?sys_id={}",
|
||||||
|
instance_url.trim_end_matches('/'),
|
||||||
|
sys_id
|
||||||
|
);
|
||||||
|
|
||||||
let resolution = item["close_notes"].as_str().unwrap_or("").to_string();
|
let description = item["description"].as_str().unwrap_or("").to_string();
|
||||||
|
|
||||||
let content = format!("Description: {description}\nResolution: {resolution}");
|
let resolution = item["close_notes"].as_str().unwrap_or("").to_string();
|
||||||
|
|
||||||
let excerpt = content.chars().take(200).collect::<String>();
|
let content = format!("Description: {description}\nResolution: {resolution}");
|
||||||
|
|
||||||
results.push(SearchResult {
|
let excerpt = content.chars().take(200).collect::<String>();
|
||||||
title,
|
|
||||||
url,
|
all_results.push(SearchResult {
|
||||||
excerpt,
|
title,
|
||||||
content: Some(content),
|
url,
|
||||||
source: "ServiceNow".to_string(),
|
excerpt,
|
||||||
});
|
content: Some(content),
|
||||||
|
source: "ServiceNow".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(results)
|
all_results.sort_by(|a, b| a.url.cmp(&b.url));
|
||||||
|
all_results.dedup_by(|a, b| a.url == b.url);
|
||||||
|
|
||||||
|
Ok(all_results)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,6 +6,7 @@ use serde_json::Value;
|
|||||||
use tauri::WebviewWindow;
|
use tauri::WebviewWindow;
|
||||||
|
|
||||||
use super::confluence_search::SearchResult;
|
use super::confluence_search::SearchResult;
|
||||||
|
use crate::integrations::query_expansion::expand_query;
|
||||||
|
|
||||||
/// Execute an HTTP request from within the webview context
|
/// Execute an HTTP request from within the webview context
|
||||||
/// This automatically includes all cookies (including HttpOnly) from the authenticated session
|
/// This automatically includes all cookies (including HttpOnly) from the authenticated session
|
||||||
@ -123,106 +124,113 @@ pub async fn search_confluence_webview<R: tauri::Runtime>(
|
|||||||
base_url: &str,
|
base_url: &str,
|
||||||
query: &str,
|
query: &str,
|
||||||
) -> Result<Vec<SearchResult>, String> {
|
) -> Result<Vec<SearchResult>, String> {
|
||||||
// Extract keywords from the query for better search
|
let expanded_queries = expand_query(query);
|
||||||
// Remove common words and extract important terms
|
|
||||||
let keywords = extract_keywords(query);
|
|
||||||
|
|
||||||
// Build CQL query with OR logic for keywords
|
let mut all_results = Vec::new();
|
||||||
let cql = if keywords.len() > 1 {
|
|
||||||
// Multiple keywords - search for any of them
|
|
||||||
let keyword_conditions: Vec<String> =
|
|
||||||
keywords.iter().map(|k| format!("text ~ \"{k}\"")).collect();
|
|
||||||
keyword_conditions.join(" OR ")
|
|
||||||
} else if !keywords.is_empty() {
|
|
||||||
// Single keyword
|
|
||||||
let keyword = &keywords[0];
|
|
||||||
format!("text ~ \"{keyword}\"")
|
|
||||||
} else {
|
|
||||||
// Fallback to original query
|
|
||||||
format!("text ~ \"{query}\"")
|
|
||||||
};
|
|
||||||
|
|
||||||
let search_url = format!(
|
for expanded_query in expanded_queries.iter().take(3) {
|
||||||
"{}/rest/api/search?cql={}&limit=10",
|
// Extract keywords from the query for better search
|
||||||
base_url.trim_end_matches('/'),
|
// Remove common words and extract important terms
|
||||||
urlencoding::encode(&cql)
|
let keywords = extract_keywords(expanded_query);
|
||||||
);
|
|
||||||
|
|
||||||
tracing::info!("Executing Confluence search via webview with CQL: {}", cql);
|
// Build CQL query with OR logic for keywords
|
||||||
|
let cql = if keywords.len() > 1 {
|
||||||
|
// Multiple keywords - search for any of them
|
||||||
|
let keyword_conditions: Vec<String> =
|
||||||
|
keywords.iter().map(|k| format!("text ~ \"{k}\"")).collect();
|
||||||
|
keyword_conditions.join(" OR ")
|
||||||
|
} else if !keywords.is_empty() {
|
||||||
|
// Single keyword
|
||||||
|
let keyword = &keywords[0];
|
||||||
|
format!("text ~ \"{keyword}\"")
|
||||||
|
} else {
|
||||||
|
// Fallback to expanded query
|
||||||
|
format!("text ~ \"{expanded_query}\"")
|
||||||
|
};
|
||||||
|
|
||||||
let response = fetch_from_webview(webview_window, &search_url, "GET", None).await?;
|
let search_url = format!(
|
||||||
|
"{}/rest/api/search?cql={}&limit=10",
|
||||||
|
base_url.trim_end_matches('/'),
|
||||||
|
urlencoding::encode(&cql)
|
||||||
|
);
|
||||||
|
|
||||||
let mut results = Vec::new();
|
tracing::info!("Executing Confluence search via webview with CQL: {}", cql);
|
||||||
|
|
||||||
if let Some(results_array) = response.get("results").and_then(|v| v.as_array()) {
|
let response = fetch_from_webview(webview_window, &search_url, "GET", None).await?;
|
||||||
for item in results_array.iter().take(5) {
|
|
||||||
let title = item["title"].as_str().unwrap_or("Untitled").to_string();
|
|
||||||
let content_id = item["content"]["id"].as_str();
|
|
||||||
let space_key = item["content"]["space"]["key"].as_str();
|
|
||||||
|
|
||||||
let url = if let (Some(id), Some(space)) = (content_id, space_key) {
|
if let Some(results_array) = response.get("results").and_then(|v| v.as_array()) {
|
||||||
format!(
|
for item in results_array.iter().take(5) {
|
||||||
"{}/display/{}/{}",
|
let title = item["title"].as_str().unwrap_or("Untitled").to_string();
|
||||||
base_url.trim_end_matches('/'),
|
let content_id = item["content"]["id"].as_str();
|
||||||
space,
|
let space_key = item["content"]["space"]["key"].as_str();
|
||||||
id
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
base_url.to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
let excerpt = item["excerpt"]
|
let url = if let (Some(id), Some(space)) = (content_id, space_key) {
|
||||||
.as_str()
|
format!(
|
||||||
.unwrap_or("")
|
"{}/display/{}/{}",
|
||||||
.replace("<span class=\"highlight\">", "")
|
base_url.trim_end_matches('/'),
|
||||||
.replace("</span>", "");
|
space,
|
||||||
|
id
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
base_url.to_string()
|
||||||
|
};
|
||||||
|
|
||||||
// Fetch full page content
|
let excerpt = item["excerpt"]
|
||||||
let content = if let Some(id) = content_id {
|
.as_str()
|
||||||
let content_url = format!(
|
.unwrap_or("")
|
||||||
"{}/rest/api/content/{id}?expand=body.storage",
|
.replace("<span class=\"highlight\">", "")
|
||||||
base_url.trim_end_matches('/')
|
.replace("</span>", "");
|
||||||
);
|
|
||||||
if let Ok(content_resp) =
|
// Fetch full page content
|
||||||
fetch_from_webview(webview_window, &content_url, "GET", None).await
|
let content = if let Some(id) = content_id {
|
||||||
{
|
let content_url = format!(
|
||||||
if let Some(body) = content_resp
|
"{}/rest/api/content/{id}?expand=body.storage",
|
||||||
.get("body")
|
base_url.trim_end_matches('/')
|
||||||
.and_then(|b| b.get("storage"))
|
);
|
||||||
.and_then(|s| s.get("value"))
|
if let Ok(content_resp) =
|
||||||
.and_then(|v| v.as_str())
|
fetch_from_webview(webview_window, &content_url, "GET", None).await
|
||||||
{
|
{
|
||||||
let text = strip_html_simple(body);
|
if let Some(body) = content_resp
|
||||||
Some(if text.len() > 3000 {
|
.get("body")
|
||||||
format!("{}...", &text[..3000])
|
.and_then(|b| b.get("storage"))
|
||||||
|
.and_then(|s| s.get("value"))
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
{
|
||||||
|
let text = strip_html_simple(body);
|
||||||
|
Some(if text.len() > 3000 {
|
||||||
|
format!("{}...", &text[..3000])
|
||||||
|
} else {
|
||||||
|
text
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
text
|
None
|
||||||
})
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
};
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
results.push(SearchResult {
|
all_results.push(SearchResult {
|
||||||
title,
|
title,
|
||||||
url,
|
url,
|
||||||
excerpt: excerpt.chars().take(300).collect(),
|
excerpt: excerpt.chars().take(300).collect(),
|
||||||
content,
|
content,
|
||||||
source: "Confluence".to_string(),
|
source: "Confluence".to_string(),
|
||||||
});
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
all_results.sort_by(|a, b| a.url.cmp(&b.url));
|
||||||
|
all_results.dedup_by(|a, b| a.url == b.url);
|
||||||
|
|
||||||
tracing::info!(
|
tracing::info!(
|
||||||
"Confluence webview search returned {} results",
|
"Confluence webview search returned {} results",
|
||||||
results.len()
|
all_results.len()
|
||||||
);
|
);
|
||||||
Ok(results)
|
Ok(all_results)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extract keywords from a search query
|
/// Extract keywords from a search query
|
||||||
@ -296,92 +304,99 @@ pub async fn search_servicenow_webview<R: tauri::Runtime>(
|
|||||||
instance_url: &str,
|
instance_url: &str,
|
||||||
query: &str,
|
query: &str,
|
||||||
) -> Result<Vec<SearchResult>, String> {
|
) -> Result<Vec<SearchResult>, String> {
|
||||||
let mut results = Vec::new();
|
let expanded_queries = expand_query(query);
|
||||||
|
|
||||||
// Search knowledge base
|
let mut all_results = Vec::new();
|
||||||
let kb_url = format!(
|
|
||||||
"{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=3",
|
|
||||||
instance_url.trim_end_matches('/'),
|
|
||||||
urlencoding::encode(query),
|
|
||||||
urlencoding::encode(query)
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::info!("Executing ServiceNow KB search via webview");
|
for expanded_query in expanded_queries.iter().take(3) {
|
||||||
|
// Search knowledge base
|
||||||
|
let kb_url = format!(
|
||||||
|
"{}/api/now/table/kb_knowledge?sysparm_query=textLIKE{}^ORshort_descriptionLIKE{}&sysparm_limit=3",
|
||||||
|
instance_url.trim_end_matches('/'),
|
||||||
|
urlencoding::encode(expanded_query),
|
||||||
|
urlencoding::encode(expanded_query)
|
||||||
|
);
|
||||||
|
|
||||||
if let Ok(kb_response) = fetch_from_webview(webview_window, &kb_url, "GET", None).await {
|
tracing::info!("Executing ServiceNow KB search via webview with expanded query");
|
||||||
if let Some(kb_array) = kb_response.get("result").and_then(|v| v.as_array()) {
|
|
||||||
for item in kb_array {
|
|
||||||
let title = item["short_description"]
|
|
||||||
.as_str()
|
|
||||||
.unwrap_or("Untitled")
|
|
||||||
.to_string();
|
|
||||||
let sys_id = item["sys_id"].as_str().unwrap_or("");
|
|
||||||
let url = format!(
|
|
||||||
"{}/kb_view.do?sysparm_article={sys_id}",
|
|
||||||
instance_url.trim_end_matches('/')
|
|
||||||
);
|
|
||||||
let text = item["text"].as_str().unwrap_or("");
|
|
||||||
let excerpt = text.chars().take(300).collect();
|
|
||||||
let content = Some(if text.len() > 3000 {
|
|
||||||
format!("{}...", &text[..3000])
|
|
||||||
} else {
|
|
||||||
text.to_string()
|
|
||||||
});
|
|
||||||
|
|
||||||
results.push(SearchResult {
|
if let Ok(kb_response) = fetch_from_webview(webview_window, &kb_url, "GET", None).await {
|
||||||
title,
|
if let Some(kb_array) = kb_response.get("result").and_then(|v| v.as_array()) {
|
||||||
url,
|
for item in kb_array {
|
||||||
excerpt,
|
let title = item["short_description"]
|
||||||
content,
|
.as_str()
|
||||||
source: "ServiceNow".to_string(),
|
.unwrap_or("Untitled")
|
||||||
});
|
.to_string();
|
||||||
|
let sys_id = item["sys_id"].as_str().unwrap_or("");
|
||||||
|
let url = format!(
|
||||||
|
"{}/kb_view.do?sysparm_article={sys_id}",
|
||||||
|
instance_url.trim_end_matches('/')
|
||||||
|
);
|
||||||
|
let text = item["text"].as_str().unwrap_or("");
|
||||||
|
let excerpt = text.chars().take(300).collect();
|
||||||
|
let content = Some(if text.len() > 3000 {
|
||||||
|
format!("{}...", &text[..3000])
|
||||||
|
} else {
|
||||||
|
text.to_string()
|
||||||
|
});
|
||||||
|
|
||||||
|
all_results.push(SearchResult {
|
||||||
|
title,
|
||||||
|
url,
|
||||||
|
excerpt,
|
||||||
|
content,
|
||||||
|
source: "ServiceNow".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search incidents
|
||||||
|
let inc_url = format!(
|
||||||
|
"{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true",
|
||||||
|
instance_url.trim_end_matches('/'),
|
||||||
|
urlencoding::encode(expanded_query),
|
||||||
|
urlencoding::encode(expanded_query)
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Ok(inc_response) = fetch_from_webview(webview_window, &inc_url, "GET", None).await {
|
||||||
|
if let Some(inc_array) = inc_response.get("result").and_then(|v| v.as_array()) {
|
||||||
|
for item in inc_array {
|
||||||
|
let number = item["number"].as_str().unwrap_or("Unknown");
|
||||||
|
let title = format!(
|
||||||
|
"Incident {}: {}",
|
||||||
|
number,
|
||||||
|
item["short_description"].as_str().unwrap_or("No title")
|
||||||
|
);
|
||||||
|
let sys_id = item["sys_id"].as_str().unwrap_or("");
|
||||||
|
let url = format!(
|
||||||
|
"{}/incident.do?sys_id={sys_id}",
|
||||||
|
instance_url.trim_end_matches('/')
|
||||||
|
);
|
||||||
|
let description = item["description"].as_str().unwrap_or("");
|
||||||
|
let resolution = item["close_notes"].as_str().unwrap_or("");
|
||||||
|
let content = format!("Description: {description}\nResolution: {resolution}");
|
||||||
|
let excerpt = content.chars().take(200).collect();
|
||||||
|
|
||||||
|
all_results.push(SearchResult {
|
||||||
|
title,
|
||||||
|
url,
|
||||||
|
excerpt,
|
||||||
|
content: Some(content),
|
||||||
|
source: "ServiceNow".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Search incidents
|
all_results.sort_by(|a, b| a.url.cmp(&b.url));
|
||||||
let inc_url = format!(
|
all_results.dedup_by(|a, b| a.url == b.url);
|
||||||
"{}/api/now/table/incident?sysparm_query=short_descriptionLIKE{}^ORdescriptionLIKE{}&sysparm_limit=3&sysparm_display_value=true",
|
|
||||||
instance_url.trim_end_matches('/'),
|
|
||||||
urlencoding::encode(query),
|
|
||||||
urlencoding::encode(query)
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Ok(inc_response) = fetch_from_webview(webview_window, &inc_url, "GET", None).await {
|
|
||||||
if let Some(inc_array) = inc_response.get("result").and_then(|v| v.as_array()) {
|
|
||||||
for item in inc_array {
|
|
||||||
let number = item["number"].as_str().unwrap_or("Unknown");
|
|
||||||
let title = format!(
|
|
||||||
"Incident {}: {}",
|
|
||||||
number,
|
|
||||||
item["short_description"].as_str().unwrap_or("No title")
|
|
||||||
);
|
|
||||||
let sys_id = item["sys_id"].as_str().unwrap_or("");
|
|
||||||
let url = format!(
|
|
||||||
"{}/incident.do?sys_id={sys_id}",
|
|
||||||
instance_url.trim_end_matches('/')
|
|
||||||
);
|
|
||||||
let description = item["description"].as_str().unwrap_or("");
|
|
||||||
let resolution = item["close_notes"].as_str().unwrap_or("");
|
|
||||||
let content = format!("Description: {description}\nResolution: {resolution}");
|
|
||||||
let excerpt = content.chars().take(200).collect();
|
|
||||||
|
|
||||||
results.push(SearchResult {
|
|
||||||
title,
|
|
||||||
url,
|
|
||||||
excerpt,
|
|
||||||
content: Some(content),
|
|
||||||
source: "ServiceNow".to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::info!(
|
tracing::info!(
|
||||||
"ServiceNow webview search returned {} results",
|
"ServiceNow webview search returned {} results",
|
||||||
results.len()
|
all_results.len()
|
||||||
);
|
);
|
||||||
Ok(results)
|
Ok(all_results)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Search Azure DevOps wiki using webview fetch
|
/// Search Azure DevOps wiki using webview fetch
|
||||||
@ -391,82 +406,89 @@ pub async fn search_azuredevops_wiki_webview<R: tauri::Runtime>(
|
|||||||
project: &str,
|
project: &str,
|
||||||
query: &str,
|
query: &str,
|
||||||
) -> Result<Vec<SearchResult>, String> {
|
) -> Result<Vec<SearchResult>, String> {
|
||||||
// Extract keywords for better search
|
let expanded_queries = expand_query(query);
|
||||||
let keywords = extract_keywords(query);
|
|
||||||
|
|
||||||
let search_text = if !keywords.is_empty() {
|
let mut all_results = Vec::new();
|
||||||
keywords.join(" ")
|
|
||||||
} else {
|
|
||||||
query.to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Azure DevOps wiki search API
|
for expanded_query in expanded_queries.iter().take(3) {
|
||||||
let search_url = format!(
|
// Extract keywords for better search
|
||||||
"{}/{}/_apis/wiki/wikis?api-version=7.0",
|
let keywords = extract_keywords(expanded_query);
|
||||||
org_url.trim_end_matches('/'),
|
|
||||||
urlencoding::encode(project)
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::info!(
|
let search_text = if !keywords.is_empty() {
|
||||||
"Executing Azure DevOps wiki search via webview for: {}",
|
keywords.join(" ")
|
||||||
search_text
|
} else {
|
||||||
);
|
expanded_query.clone()
|
||||||
|
};
|
||||||
|
|
||||||
// First, get list of wikis
|
// Azure DevOps wiki search API
|
||||||
let wikis_response = fetch_from_webview(webview_window, &search_url, "GET", None).await?;
|
let search_url = format!(
|
||||||
|
"{}/{}/_apis/wiki/wikis?api-version=7.0",
|
||||||
|
org_url.trim_end_matches('/'),
|
||||||
|
urlencoding::encode(project)
|
||||||
|
);
|
||||||
|
|
||||||
let mut results = Vec::new();
|
tracing::info!(
|
||||||
|
"Executing Azure DevOps wiki search via webview for: {}",
|
||||||
|
search_text
|
||||||
|
);
|
||||||
|
|
||||||
if let Some(wikis_array) = wikis_response.get("value").and_then(|v| v.as_array()) {
|
// First, get list of wikis
|
||||||
// Search each wiki
|
let wikis_response = fetch_from_webview(webview_window, &search_url, "GET", None).await?;
|
||||||
for wiki in wikis_array.iter().take(3) {
|
|
||||||
let wiki_id = wiki["id"].as_str().unwrap_or("");
|
|
||||||
|
|
||||||
if wiki_id.is_empty() {
|
if let Some(wikis_array) = wikis_response.get("value").and_then(|v| v.as_array()) {
|
||||||
continue;
|
// Search each wiki
|
||||||
}
|
for wiki in wikis_array.iter().take(3) {
|
||||||
|
let wiki_id = wiki["id"].as_str().unwrap_or("");
|
||||||
|
|
||||||
// Search wiki pages
|
if wiki_id.is_empty() {
|
||||||
let pages_url = format!(
|
continue;
|
||||||
"{}/{}/_apis/wiki/wikis/{}/pages?recursionLevel=Full&includeContent=true&api-version=7.0",
|
}
|
||||||
org_url.trim_end_matches('/'),
|
|
||||||
urlencoding::encode(project),
|
|
||||||
urlencoding::encode(wiki_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Ok(pages_response) =
|
// Search wiki pages
|
||||||
fetch_from_webview(webview_window, &pages_url, "GET", None).await
|
let pages_url = format!(
|
||||||
{
|
"{}/{}/_apis/wiki/wikis/{}/pages?recursionLevel=Full&includeContent=true&api-version=7.0",
|
||||||
// Try to get "page" field, or use the response itself if it's the page object
|
org_url.trim_end_matches('/'),
|
||||||
if let Some(page) = pages_response.get("page") {
|
urlencoding::encode(project),
|
||||||
search_page_recursive(
|
urlencoding::encode(wiki_id)
|
||||||
page,
|
);
|
||||||
&search_text,
|
|
||||||
org_url,
|
if let Ok(pages_response) =
|
||||||
project,
|
fetch_from_webview(webview_window, &pages_url, "GET", None).await
|
||||||
wiki_id,
|
{
|
||||||
&mut results,
|
// Try to get "page" field, or use the response itself if it's the page object
|
||||||
);
|
if let Some(page) = pages_response.get("page") {
|
||||||
} else {
|
search_page_recursive(
|
||||||
// Response might be the page object itself
|
page,
|
||||||
search_page_recursive(
|
&search_text,
|
||||||
&pages_response,
|
org_url,
|
||||||
&search_text,
|
project,
|
||||||
org_url,
|
wiki_id,
|
||||||
project,
|
&mut all_results,
|
||||||
wiki_id,
|
);
|
||||||
&mut results,
|
} else {
|
||||||
);
|
// Response might be the page object itself
|
||||||
|
search_page_recursive(
|
||||||
|
&pages_response,
|
||||||
|
&search_text,
|
||||||
|
org_url,
|
||||||
|
project,
|
||||||
|
wiki_id,
|
||||||
|
&mut all_results,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
all_results.sort_by(|a, b| a.url.cmp(&b.url));
|
||||||
|
all_results.dedup_by(|a, b| a.url == b.url);
|
||||||
|
|
||||||
tracing::info!(
|
tracing::info!(
|
||||||
"Azure DevOps wiki webview search returned {} results",
|
"Azure DevOps wiki webview search returned {} results",
|
||||||
results.len()
|
all_results.len()
|
||||||
);
|
);
|
||||||
Ok(results)
|
Ok(all_results)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursively search through wiki pages for matching content
|
/// Recursively search through wiki pages for matching content
|
||||||
@ -544,115 +566,124 @@ pub async fn search_azuredevops_workitems_webview<R: tauri::Runtime>(
|
|||||||
project: &str,
|
project: &str,
|
||||||
query: &str,
|
query: &str,
|
||||||
) -> Result<Vec<SearchResult>, String> {
|
) -> Result<Vec<SearchResult>, String> {
|
||||||
// Extract keywords
|
let expanded_queries = expand_query(query);
|
||||||
let keywords = extract_keywords(query);
|
|
||||||
|
|
||||||
// Check if query contains a work item ID (pure number)
|
let mut all_results = Vec::new();
|
||||||
let work_item_id: Option<i64> = keywords
|
|
||||||
.iter()
|
|
||||||
.filter(|k| k.chars().all(|c| c.is_numeric()))
|
|
||||||
.filter_map(|k| k.parse::<i64>().ok())
|
|
||||||
.next();
|
|
||||||
|
|
||||||
// Build WIQL query
|
for expanded_query in expanded_queries.iter().take(3) {
|
||||||
let wiql_query = if let Some(id) = work_item_id {
|
// Extract keywords
|
||||||
// Search by specific ID
|
let keywords = extract_keywords(expanded_query);
|
||||||
format!(
|
|
||||||
"SELECT [System.Id], [System.Title], [System.Description], [System.WorkItemType] \
|
// Check if query contains a work item ID (pure number)
|
||||||
FROM WorkItems WHERE [System.Id] = {id}"
|
let work_item_id: Option<i64> = keywords
|
||||||
)
|
.iter()
|
||||||
} else {
|
.filter(|k| k.chars().all(|c| c.is_numeric()))
|
||||||
// Search by text in title/description
|
.filter_map(|k| k.parse::<i64>().ok())
|
||||||
let search_terms = if !keywords.is_empty() {
|
.next();
|
||||||
keywords.join(" ")
|
|
||||||
|
// Build WIQL query
|
||||||
|
let wiql_query = if let Some(id) = work_item_id {
|
||||||
|
// Search by specific ID
|
||||||
|
format!(
|
||||||
|
"SELECT [System.Id], [System.Title], [System.Description], [System.WorkItemType] \
|
||||||
|
FROM WorkItems WHERE [System.Id] = {id}"
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
query.to_string()
|
// Search by text in title/description
|
||||||
|
let search_terms = if !keywords.is_empty() {
|
||||||
|
keywords.join(" ")
|
||||||
|
} else {
|
||||||
|
expanded_query.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Use CONTAINS for text search (case-insensitive)
|
||||||
|
format!(
|
||||||
|
"SELECT [System.Id], [System.Title], [System.Description], [System.WorkItemType] \
|
||||||
|
FROM WorkItems WHERE [System.TeamProject] = '{project}' \
|
||||||
|
AND ([System.Title] CONTAINS '{search_terms}' OR [System.Description] CONTAINS '{search_terms}') \
|
||||||
|
ORDER BY [System.ChangedDate] DESC"
|
||||||
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
// Use CONTAINS for text search (case-insensitive)
|
let wiql_url = format!(
|
||||||
format!(
|
"{}/{}/_apis/wit/wiql?api-version=7.0",
|
||||||
"SELECT [System.Id], [System.Title], [System.Description], [System.WorkItemType] \
|
org_url.trim_end_matches('/'),
|
||||||
FROM WorkItems WHERE [System.TeamProject] = '{project}' \
|
urlencoding::encode(project)
|
||||||
AND ([System.Title] CONTAINS '{search_terms}' OR [System.Description] CONTAINS '{search_terms}') \
|
);
|
||||||
ORDER BY [System.ChangedDate] DESC"
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let wiql_url = format!(
|
let body = serde_json::json!({
|
||||||
"{}/{}/_apis/wit/wiql?api-version=7.0",
|
"query": wiql_query
|
||||||
org_url.trim_end_matches('/'),
|
})
|
||||||
urlencoding::encode(project)
|
.to_string();
|
||||||
);
|
|
||||||
|
|
||||||
let body = serde_json::json!({
|
tracing::info!("Executing Azure DevOps work item search via webview");
|
||||||
"query": wiql_query
|
tracing::debug!("WIQL query: {}", wiql_query);
|
||||||
})
|
tracing::debug!("Request URL: {}", wiql_url);
|
||||||
.to_string();
|
|
||||||
|
|
||||||
tracing::info!("Executing Azure DevOps work item search via webview");
|
let wiql_response =
|
||||||
tracing::debug!("WIQL query: {}", wiql_query);
|
fetch_from_webview(webview_window, &wiql_url, "POST", Some(&body)).await?;
|
||||||
tracing::debug!("Request URL: {}", wiql_url);
|
|
||||||
|
|
||||||
let wiql_response = fetch_from_webview(webview_window, &wiql_url, "POST", Some(&body)).await?;
|
if let Some(work_items) = wiql_response.get("workItems").and_then(|v| v.as_array()) {
|
||||||
|
// Fetch details for first 5 work items
|
||||||
|
for item in work_items.iter().take(5) {
|
||||||
|
if let Some(id) = item.get("id").and_then(|i| i.as_i64()) {
|
||||||
|
let details_url = format!(
|
||||||
|
"{}/_apis/wit/workitems/{}?api-version=7.0",
|
||||||
|
org_url.trim_end_matches('/'),
|
||||||
|
id
|
||||||
|
);
|
||||||
|
|
||||||
let mut results = Vec::new();
|
if let Ok(details) =
|
||||||
|
fetch_from_webview(webview_window, &details_url, "GET", None).await
|
||||||
|
{
|
||||||
|
if let Some(fields) = details.get("fields") {
|
||||||
|
let title = fields
|
||||||
|
.get("System.Title")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("Untitled");
|
||||||
|
let work_item_type = fields
|
||||||
|
.get("System.WorkItemType")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("Item");
|
||||||
|
let description = fields
|
||||||
|
.get("System.Description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.unwrap_or("");
|
||||||
|
|
||||||
if let Some(work_items) = wiql_response.get("workItems").and_then(|v| v.as_array()) {
|
let clean_description = strip_html_simple(description);
|
||||||
// Fetch details for first 5 work items
|
let excerpt = clean_description.chars().take(200).collect();
|
||||||
for item in work_items.iter().take(5) {
|
|
||||||
if let Some(id) = item.get("id").and_then(|i| i.as_i64()) {
|
|
||||||
let details_url = format!(
|
|
||||||
"{}/_apis/wit/workitems/{}?api-version=7.0",
|
|
||||||
org_url.trim_end_matches('/'),
|
|
||||||
id
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Ok(details) =
|
let url =
|
||||||
fetch_from_webview(webview_window, &details_url, "GET", None).await
|
format!("{}/_workitems/edit/{id}", org_url.trim_end_matches('/'));
|
||||||
{
|
|
||||||
if let Some(fields) = details.get("fields") {
|
|
||||||
let title = fields
|
|
||||||
.get("System.Title")
|
|
||||||
.and_then(|t| t.as_str())
|
|
||||||
.unwrap_or("Untitled");
|
|
||||||
let work_item_type = fields
|
|
||||||
.get("System.WorkItemType")
|
|
||||||
.and_then(|t| t.as_str())
|
|
||||||
.unwrap_or("Item");
|
|
||||||
let description = fields
|
|
||||||
.get("System.Description")
|
|
||||||
.and_then(|d| d.as_str())
|
|
||||||
.unwrap_or("");
|
|
||||||
|
|
||||||
let clean_description = strip_html_simple(description);
|
let full_content = if clean_description.len() > 3000 {
|
||||||
let excerpt = clean_description.chars().take(200).collect();
|
format!("{}...", &clean_description[..3000])
|
||||||
|
} else {
|
||||||
|
clean_description.clone()
|
||||||
|
};
|
||||||
|
|
||||||
let url = format!("{}/_workitems/edit/{id}", org_url.trim_end_matches('/'));
|
all_results.push(SearchResult {
|
||||||
|
title: format!("{work_item_type} #{id}: {title}"),
|
||||||
let full_content = if clean_description.len() > 3000 {
|
url,
|
||||||
format!("{}...", &clean_description[..3000])
|
excerpt,
|
||||||
} else {
|
content: Some(full_content),
|
||||||
clean_description.clone()
|
source: "Azure DevOps".to_string(),
|
||||||
};
|
});
|
||||||
|
}
|
||||||
results.push(SearchResult {
|
|
||||||
title: format!("{work_item_type} #{id}: {title}"),
|
|
||||||
url,
|
|
||||||
excerpt,
|
|
||||||
content: Some(full_content),
|
|
||||||
source: "Azure DevOps".to_string(),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
all_results.sort_by(|a, b| a.url.cmp(&b.url));
|
||||||
|
all_results.dedup_by(|a, b| a.url == b.url);
|
||||||
|
|
||||||
tracing::info!(
|
tracing::info!(
|
||||||
"Azure DevOps work items webview search returned {} results",
|
"Azure DevOps work items webview search returned {} results",
|
||||||
results.len()
|
all_results.len()
|
||||||
);
|
);
|
||||||
Ok(results)
|
Ok(all_results)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a comment to an Azure DevOps work item
|
/// Add a comment to an Azure DevOps work item
|
||||||
|
|||||||
@ -69,6 +69,7 @@ pub fn run() {
|
|||||||
commands::db::add_five_why,
|
commands::db::add_five_why,
|
||||||
commands::db::update_five_why,
|
commands::db::update_five_why,
|
||||||
commands::db::add_timeline_event,
|
commands::db::add_timeline_event,
|
||||||
|
commands::db::get_timeline_events,
|
||||||
// Analysis / PII
|
// Analysis / PII
|
||||||
commands::analysis::upload_log_file,
|
commands::analysis::upload_log_file,
|
||||||
commands::analysis::upload_log_file_by_content,
|
commands::analysis::upload_log_file_by_content,
|
||||||
@ -120,6 +121,7 @@ pub fn run() {
|
|||||||
commands::system::get_settings,
|
commands::system::get_settings,
|
||||||
commands::system::update_settings,
|
commands::system::update_settings,
|
||||||
commands::system::get_audit_log,
|
commands::system::get_audit_log,
|
||||||
|
commands::system::get_app_version,
|
||||||
])
|
])
|
||||||
.run(tauri::generate_context!())
|
.run(tauri::generate_context!())
|
||||||
.expect("Error running Troubleshooting and RCA Assistant application");
|
.expect("Error running Troubleshooting and RCA Assistant application");
|
||||||
|
|||||||
@ -6,7 +6,7 @@
|
|||||||
"frontendDist": "../dist",
|
"frontendDist": "../dist",
|
||||||
"devUrl": "http://localhost:1420",
|
"devUrl": "http://localhost:1420",
|
||||||
"beforeDevCommand": "npm run dev",
|
"beforeDevCommand": "npm run dev",
|
||||||
"beforeBuildCommand": "npm run build"
|
"beforeBuildCommand": "npm run version:update && npm run build"
|
||||||
},
|
},
|
||||||
"app": {
|
"app": {
|
||||||
"security": {
|
"security": {
|
||||||
@ -41,4 +41,7 @@
|
|||||||
"shortDescription": "Troubleshooting and RCA Assistant",
|
"shortDescription": "Troubleshooting and RCA Assistant",
|
||||||
"longDescription": "Structured AI-backed assistant for IT troubleshooting, 5-whys root cause analysis, and post-mortem documentation with offline Ollama support."
|
"longDescription": "Structured AI-backed assistant for IT troubleshooting, 5-whys root cause analysis, and post-mortem documentation with offline Ollama support."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
import React, { useState, useEffect } from "react";
|
import React, { useState, useEffect } from "react";
|
||||||
import { getVersion } from "@tauri-apps/api/app";
|
|
||||||
import { Routes, Route, NavLink, useLocation } from "react-router-dom";
|
import { Routes, Route, NavLink, useLocation } from "react-router-dom";
|
||||||
import {
|
import {
|
||||||
Home,
|
Home,
|
||||||
@ -15,7 +14,7 @@ import {
|
|||||||
Moon,
|
Moon,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { useSettingsStore } from "@/stores/settingsStore";
|
import { useSettingsStore } from "@/stores/settingsStore";
|
||||||
import { loadAiProvidersCmd, testProviderConnectionCmd } from "@/lib/tauriCommands";
|
import { getAppVersionCmd, loadAiProvidersCmd, testProviderConnectionCmd } from "@/lib/tauriCommands";
|
||||||
|
|
||||||
import Dashboard from "@/pages/Dashboard";
|
import Dashboard from "@/pages/Dashboard";
|
||||||
import NewIssue from "@/pages/NewIssue";
|
import NewIssue from "@/pages/NewIssue";
|
||||||
@ -50,7 +49,7 @@ export default function App() {
|
|||||||
void useLocation();
|
void useLocation();
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
getVersion().then(setAppVersion).catch(() => {});
|
getAppVersionCmd().then(setAppVersion).catch(() => {});
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// Load providers and auto-test active provider on startup
|
// Load providers and auto-test active provider on startup
|
||||||
|
|||||||
@ -331,6 +331,58 @@ When analyzing identity and access issues, focus on these key areas:
|
|||||||
Always ask about the Keycloak version, realm configuration (external IdP vs local users vs LDAP), SSSD version and configured domains, and whether this is a first-time setup or a regression.`,
|
Always ask about the Keycloak version, realm configuration (external IdP vs local users vs LDAP), SSSD version and configured domains, and whether this is a first-time setup or a regression.`,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const INCIDENT_RESPONSE_FRAMEWORK = `
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## INCIDENT RESPONSE METHODOLOGY
|
||||||
|
|
||||||
|
Follow this structured framework for every triage conversation. Each phase must be completed with evidence before advancing.
|
||||||
|
|
||||||
|
### Phase 1: Detection & Evidence Gathering
|
||||||
|
- **Do NOT propose fixes** until the problem is fully understood
|
||||||
|
- Gather: error messages, timestamps, affected systems, scope of impact, recent changes
|
||||||
|
- Ask: "What changed? When did it start? Who/what is affected? What has been tried?"
|
||||||
|
- Record all evidence with UTC timestamps
|
||||||
|
- Establish a clear problem statement before proceeding
|
||||||
|
|
||||||
|
### Phase 2: Diagnosis & Hypothesis Testing
|
||||||
|
- Apply the scientific method: form hypotheses, test them with evidence
|
||||||
|
- **The 3-Fix Rule**: If you cannot confidently identify the root cause after 3 hypotheses, STOP and reassess your assumptions — you may be looking at the wrong system or the wrong layer
|
||||||
|
- Check the most common causes first (Occam's Razor): DNS, certificates, disk space, permissions, recent deployments
|
||||||
|
- Differentiate between symptoms and causes — treat causes, not symptoms
|
||||||
|
- Use binary search to narrow scope: which component, which layer, which change
|
||||||
|
|
||||||
|
### Phase 3: Root Cause Analysis with 5-Whys
|
||||||
|
- Each "Why" must be backed by evidence, not speculation
|
||||||
|
- If you cannot provide evidence for a "Why", state what investigation is needed to confirm
|
||||||
|
- Look for systemic issues, not just proximate causes
|
||||||
|
- The root cause should explain ALL observed symptoms, not just some
|
||||||
|
- Common root cause categories: configuration drift, capacity exhaustion, dependency failure, race condition, human error in process
|
||||||
|
|
||||||
|
### Phase 4: Resolution & Prevention
|
||||||
|
- **Immediate fix**: What stops the bleeding right now? (rollback, restart, failover)
|
||||||
|
- **Permanent fix**: What prevents recurrence? (code fix, config change, automation)
|
||||||
|
- **Runbook update**: Document the fix for future oncall engineers
|
||||||
|
- Verify the fix resolves ALL symptoms, not just the primary one
|
||||||
|
- Monitor for regression after applying the fix
|
||||||
|
|
||||||
|
### Phase 5: Post-Incident Review
|
||||||
|
- Calculate incident metrics: MTTD (detect), MTTA (acknowledge), MTTR (resolve)
|
||||||
|
- Conduct blameless post-mortem focused on systems and processes
|
||||||
|
- Identify action items with owners and due dates
|
||||||
|
- Categories: monitoring gaps, process improvements, technical debt, training needs
|
||||||
|
- Ask: "What would have prevented this? What would have detected it faster? What would have resolved it faster?"
|
||||||
|
|
||||||
|
### Communication Practices
|
||||||
|
- State your current phase explicitly (e.g., "We are in Phase 2: Diagnosis")
|
||||||
|
- Summarize findings at each phase transition
|
||||||
|
- Flag assumptions clearly: "ASSUMPTION: ..." vs "CONFIRMED: ..."
|
||||||
|
- When advancing the Why level, explicitly state the evidence chain
|
||||||
|
`;
|
||||||
|
|
||||||
export function getDomainPrompt(domainId: string): string {
|
export function getDomainPrompt(domainId: string): string {
|
||||||
return domainPrompts[domainId] ?? "";
|
const domainSpecific = domainPrompts[domainId] ?? "";
|
||||||
|
if (!domainSpecific) return "";
|
||||||
|
return domainSpecific + INCIDENT_RESPONSE_FRAMEWORK;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -74,9 +74,11 @@ export interface FiveWhyEntry {
|
|||||||
|
|
||||||
export interface TimelineEvent {
|
export interface TimelineEvent {
|
||||||
id: string;
|
id: string;
|
||||||
|
issue_id: string;
|
||||||
event_type: string;
|
event_type: string;
|
||||||
description: string;
|
description: string;
|
||||||
created_at: number;
|
metadata: string;
|
||||||
|
created_at: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AiConversation {
|
export interface AiConversation {
|
||||||
@ -104,6 +106,7 @@ export interface IssueDetail {
|
|||||||
image_attachments: ImageAttachment[];
|
image_attachments: ImageAttachment[];
|
||||||
resolution_steps: ResolutionStep[];
|
resolution_steps: ResolutionStep[];
|
||||||
conversations: AiConversation[];
|
conversations: AiConversation[];
|
||||||
|
timeline_events: TimelineEvent[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface IssueSummary {
|
export interface IssueSummary {
|
||||||
@ -268,8 +271,8 @@ export interface TriageMessage {
|
|||||||
export const analyzeLogsCmd = (issueId: string, logFileIds: string[], providerConfig: ProviderConfig) =>
|
export const analyzeLogsCmd = (issueId: string, logFileIds: string[], providerConfig: ProviderConfig) =>
|
||||||
invoke<AnalysisResult>("analyze_logs", { issueId, logFileIds, providerConfig });
|
invoke<AnalysisResult>("analyze_logs", { issueId, logFileIds, providerConfig });
|
||||||
|
|
||||||
export const chatMessageCmd = (issueId: string, message: string, providerConfig: ProviderConfig) =>
|
export const chatMessageCmd = (issueId: string, message: string, providerConfig: ProviderConfig, systemPrompt?: string) =>
|
||||||
invoke<ChatResponse>("chat_message", { issueId, message, providerConfig });
|
invoke<ChatResponse>("chat_message", { issueId, message, providerConfig, systemPrompt: systemPrompt ?? null });
|
||||||
|
|
||||||
export const listProvidersCmd = () => invoke<ProviderInfo[]>("list_providers");
|
export const listProvidersCmd = () => invoke<ProviderInfo[]>("list_providers");
|
||||||
|
|
||||||
@ -361,8 +364,11 @@ export const addFiveWhyCmd = (
|
|||||||
export const updateFiveWhyCmd = (entryId: string, answer: string) =>
|
export const updateFiveWhyCmd = (entryId: string, answer: string) =>
|
||||||
invoke<void>("update_five_why", { entryId, answer });
|
invoke<void>("update_five_why", { entryId, answer });
|
||||||
|
|
||||||
export const addTimelineEventCmd = (issueId: string, eventType: string, description: string) =>
|
export const addTimelineEventCmd = (issueId: string, eventType: string, description: string, metadata?: string) =>
|
||||||
invoke<TimelineEvent>("add_timeline_event", { issueId, eventType, description });
|
invoke<TimelineEvent>("add_timeline_event", { issueId, eventType, description, metadata: metadata ?? null });
|
||||||
|
|
||||||
|
export const getTimelineEventsCmd = (issueId: string) =>
|
||||||
|
invoke<TimelineEvent[]>("get_timeline_events", { issueId });
|
||||||
|
|
||||||
// ─── Document commands ────────────────────────────────────────────────────────
|
// ─── Document commands ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
@ -486,3 +492,8 @@ export const loadAiProvidersCmd = () =>
|
|||||||
|
|
||||||
export const deleteAiProviderCmd = (name: string) =>
|
export const deleteAiProviderCmd = (name: string) =>
|
||||||
invoke<void>("delete_ai_provider", { name });
|
invoke<void>("delete_ai_provider", { name });
|
||||||
|
|
||||||
|
// ─── System / Version ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export const getAppVersionCmd = () =>
|
||||||
|
invoke<string>("get_app_version");
|
||||||
|
|||||||
@ -5,7 +5,7 @@ import { DocEditor } from "@/components/DocEditor";
|
|||||||
import { useSettingsStore } from "@/stores/settingsStore";
|
import { useSettingsStore } from "@/stores/settingsStore";
|
||||||
import {
|
import {
|
||||||
generatePostmortemCmd,
|
generatePostmortemCmd,
|
||||||
|
addTimelineEventCmd,
|
||||||
updateDocumentCmd,
|
updateDocumentCmd,
|
||||||
exportDocumentCmd,
|
exportDocumentCmd,
|
||||||
type Document_,
|
type Document_,
|
||||||
@ -28,6 +28,7 @@ export default function Postmortem() {
|
|||||||
const generated = await generatePostmortemCmd(id);
|
const generated = await generatePostmortemCmd(id);
|
||||||
setDoc(generated);
|
setDoc(generated);
|
||||||
setContent(generated.content_md);
|
setContent(generated.content_md);
|
||||||
|
addTimelineEventCmd(id, "postmortem_generated", "Post-mortem document generated").catch(() => {});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
setError(String(err));
|
setError(String(err));
|
||||||
} finally {
|
} finally {
|
||||||
@ -54,6 +55,7 @@ export default function Postmortem() {
|
|||||||
try {
|
try {
|
||||||
const path = await exportDocumentCmd(doc.id, doc.title, content, format, "");
|
const path = await exportDocumentCmd(doc.id, doc.title, content, format, "");
|
||||||
setError(`Document exported to: ${path}`);
|
setError(`Document exported to: ${path}`);
|
||||||
|
addTimelineEventCmd(id!, "document_exported", `Post-mortem exported as ${format}`).catch(() => {});
|
||||||
setTimeout(() => setError(null), 5000);
|
setTimeout(() => setError(null), 5000);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
setError(`Export failed: ${String(err)}`);
|
setError(`Export failed: ${String(err)}`);
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import {
|
|||||||
generateRcaCmd,
|
generateRcaCmd,
|
||||||
updateDocumentCmd,
|
updateDocumentCmd,
|
||||||
exportDocumentCmd,
|
exportDocumentCmd,
|
||||||
|
addTimelineEventCmd,
|
||||||
type Document_,
|
type Document_,
|
||||||
} from "@/lib/tauriCommands";
|
} from "@/lib/tauriCommands";
|
||||||
|
|
||||||
@ -29,6 +30,7 @@ export default function RCA() {
|
|||||||
const generated = await generateRcaCmd(id);
|
const generated = await generateRcaCmd(id);
|
||||||
setDoc(generated);
|
setDoc(generated);
|
||||||
setContent(generated.content_md);
|
setContent(generated.content_md);
|
||||||
|
addTimelineEventCmd(id, "rca_generated", "RCA document generated").catch(() => {});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
setError(String(err));
|
setError(String(err));
|
||||||
} finally {
|
} finally {
|
||||||
@ -55,6 +57,7 @@ export default function RCA() {
|
|||||||
try {
|
try {
|
||||||
const path = await exportDocumentCmd(doc.id, doc.title, content, format, "");
|
const path = await exportDocumentCmd(doc.id, doc.title, content, format, "");
|
||||||
setError(`Document exported to: ${path}`);
|
setError(`Document exported to: ${path}`);
|
||||||
|
addTimelineEventCmd(id!, "document_exported", `RCA exported as ${format}`).catch(() => {});
|
||||||
setTimeout(() => setError(null), 5000);
|
setTimeout(() => setError(null), 5000);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
setError(`Export failed: ${String(err)}`);
|
setError(`Export failed: ${String(err)}`);
|
||||||
|
|||||||
@ -15,6 +15,7 @@ import {
|
|||||||
updateIssueCmd,
|
updateIssueCmd,
|
||||||
addFiveWhyCmd,
|
addFiveWhyCmd,
|
||||||
} from "@/lib/tauriCommands";
|
} from "@/lib/tauriCommands";
|
||||||
|
import { getDomainPrompt } from "@/lib/domainPrompts";
|
||||||
import type { TriageMessage } from "@/lib/tauriCommands";
|
import type { TriageMessage } from "@/lib/tauriCommands";
|
||||||
|
|
||||||
const CLOSE_PATTERNS = [
|
const CLOSE_PATTERNS = [
|
||||||
@ -167,7 +168,8 @@ export default function Triage() {
|
|||||||
setPendingFiles([]);
|
setPendingFiles([]);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await chatMessageCmd(id, aiMessage, provider);
|
const systemPrompt = currentIssue ? getDomainPrompt(currentIssue.category) : undefined;
|
||||||
|
const response = await chatMessageCmd(id, aiMessage, provider, systemPrompt);
|
||||||
const assistantMsg: TriageMessage = {
|
const assistantMsg: TriageMessage = {
|
||||||
id: `asst-${Date.now()}`,
|
id: `asst-${Date.now()}`,
|
||||||
issue_id: id,
|
issue_id: id,
|
||||||
|
|||||||
@ -42,11 +42,8 @@ describe("Audit Log", () => {
|
|||||||
it("displays audit entries", async () => {
|
it("displays audit entries", async () => {
|
||||||
render(<Security />);
|
render(<Security />);
|
||||||
|
|
||||||
// Wait for audit log to load
|
// Wait for table to appear after async audit data loads
|
||||||
await screen.findByText("Audit Log");
|
const table = await screen.findByRole("table");
|
||||||
|
|
||||||
// Check that the table has rows (header + data rows)
|
|
||||||
const table = screen.getByRole("table");
|
|
||||||
expect(table).toBeInTheDocument();
|
expect(table).toBeInTheDocument();
|
||||||
|
|
||||||
const rows = screen.getAllByRole("row");
|
const rows = screen.getAllByRole("row");
|
||||||
@ -56,9 +53,7 @@ describe("Audit Log", () => {
|
|||||||
it("provides way to view transmitted data details", async () => {
|
it("provides way to view transmitted data details", async () => {
|
||||||
render(<Security />);
|
render(<Security />);
|
||||||
|
|
||||||
await screen.findByText("Audit Log");
|
// Wait for async data to load and render the table
|
||||||
|
|
||||||
// Should have View/Hide buttons for expanding details
|
|
||||||
const viewButtons = await screen.findAllByRole("button", { name: /View/i });
|
const viewButtons = await screen.findAllByRole("button", { name: /View/i });
|
||||||
expect(viewButtons.length).toBeGreaterThan(0);
|
expect(viewButtons.length).toBeGreaterThan(0);
|
||||||
});
|
});
|
||||||
@ -66,14 +61,13 @@ describe("Audit Log", () => {
|
|||||||
it("details column or button exists for viewing data", async () => {
|
it("details column or button exists for viewing data", async () => {
|
||||||
render(<Security />);
|
render(<Security />);
|
||||||
|
|
||||||
await screen.findByText("Audit Log");
|
// Wait for async data to load and render the table
|
||||||
|
await screen.findByRole("table");
|
||||||
|
|
||||||
// The audit log should have a Details column header
|
|
||||||
const detailsHeader = screen.getByText("Details");
|
const detailsHeader = screen.getByText("Details");
|
||||||
expect(detailsHeader).toBeInTheDocument();
|
expect(detailsHeader).toBeInTheDocument();
|
||||||
|
|
||||||
// Should have view buttons
|
const viewButtons = screen.getAllByRole("button", { name: /View/i });
|
||||||
const viewButtons = await screen.findAllByRole("button", { name: /View/i });
|
|
||||||
expect(viewButtons.length).toBe(2); // One for each mock entry
|
expect(viewButtons.length).toBe(2); // One for each mock entry
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
63
tests/unit/domainPrompts.test.ts
Normal file
63
tests/unit/domainPrompts.test.ts
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import { describe, it, expect } from "vitest";
|
||||||
|
import { getDomainPrompt, DOMAINS, INCIDENT_RESPONSE_FRAMEWORK } from "@/lib/domainPrompts";
|
||||||
|
|
||||||
|
describe("Domain Prompts with Incident Response Framework", () => {
|
||||||
|
it("exports INCIDENT_RESPONSE_FRAMEWORK constant", () => {
|
||||||
|
expect(INCIDENT_RESPONSE_FRAMEWORK).toBeDefined();
|
||||||
|
expect(typeof INCIDENT_RESPONSE_FRAMEWORK).toBe("string");
|
||||||
|
expect(INCIDENT_RESPONSE_FRAMEWORK.length).toBeGreaterThan(100);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("framework contains all 5 phases", () => {
|
||||||
|
expect(INCIDENT_RESPONSE_FRAMEWORK).toContain("Phase 1: Detection & Evidence Gathering");
|
||||||
|
expect(INCIDENT_RESPONSE_FRAMEWORK).toContain("Phase 2: Diagnosis & Hypothesis Testing");
|
||||||
|
expect(INCIDENT_RESPONSE_FRAMEWORK).toContain("Phase 3: Root Cause Analysis with 5-Whys");
|
||||||
|
expect(INCIDENT_RESPONSE_FRAMEWORK).toContain("Phase 4: Resolution & Prevention");
|
||||||
|
expect(INCIDENT_RESPONSE_FRAMEWORK).toContain("Phase 5: Post-Incident Review");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("framework contains the 3-Fix Rule", () => {
|
||||||
|
expect(INCIDENT_RESPONSE_FRAMEWORK).toContain("3-Fix Rule");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("framework contains communication practices", () => {
|
||||||
|
expect(INCIDENT_RESPONSE_FRAMEWORK).toContain("Communication Practices");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("all defined domains include incident response methodology", () => {
|
||||||
|
for (const domain of DOMAINS) {
|
||||||
|
const prompt = getDomainPrompt(domain.id);
|
||||||
|
if (prompt) {
|
||||||
|
expect(prompt).toContain("INCIDENT RESPONSE METHODOLOGY");
|
||||||
|
expect(prompt).toContain("Phase 1:");
|
||||||
|
expect(prompt).toContain("Phase 5:");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns empty string for unknown domain", () => {
|
||||||
|
expect(getDomainPrompt("nonexistent_domain")).toBe("");
|
||||||
|
expect(getDomainPrompt("")).toBe("");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("preserves existing Linux domain content", () => {
|
||||||
|
const prompt = getDomainPrompt("linux");
|
||||||
|
expect(prompt).toContain("senior Linux systems engineer");
|
||||||
|
expect(prompt).toContain("RHEL");
|
||||||
|
expect(prompt).toContain("INCIDENT RESPONSE METHODOLOGY");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("preserves existing Kubernetes domain content", () => {
|
||||||
|
const prompt = getDomainPrompt("kubernetes");
|
||||||
|
expect(prompt).toContain("Kubernetes platform engineer");
|
||||||
|
expect(prompt).toContain("k3s");
|
||||||
|
expect(prompt).toContain("INCIDENT RESPONSE METHODOLOGY");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("preserves existing Network domain content", () => {
|
||||||
|
const prompt = getDomainPrompt("network");
|
||||||
|
expect(prompt).toContain("network engineer");
|
||||||
|
expect(prompt).toContain("Fortigate");
|
||||||
|
expect(prompt).toContain("INCIDENT RESPONSE METHODOLOGY");
|
||||||
|
});
|
||||||
|
});
|
||||||
@ -35,6 +35,7 @@ const mockIssueDetail = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
conversations: [],
|
conversations: [],
|
||||||
|
timeline_events: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
describe("Resolution Page", () => {
|
describe("Resolution Page", () => {
|
||||||
|
|||||||
54
tests/unit/timelineEvents.test.ts
Normal file
54
tests/unit/timelineEvents.test.ts
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||||
|
import { invoke } from "@tauri-apps/api/core";
|
||||||
|
|
||||||
|
const mockInvoke = vi.mocked(invoke);
|
||||||
|
|
||||||
|
describe("Timeline Event Commands", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockInvoke.mockReset();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("addTimelineEventCmd calls invoke with correct params", async () => {
|
||||||
|
const mockEvent = {
|
||||||
|
id: "te-1",
|
||||||
|
issue_id: "issue-1",
|
||||||
|
event_type: "triage_started",
|
||||||
|
description: "Started",
|
||||||
|
metadata: "{}",
|
||||||
|
created_at: "2025-01-15 10:00:00 UTC",
|
||||||
|
};
|
||||||
|
mockInvoke.mockResolvedValueOnce(mockEvent as never);
|
||||||
|
|
||||||
|
const { addTimelineEventCmd } = await import("@/lib/tauriCommands");
|
||||||
|
const result = await addTimelineEventCmd("issue-1", "triage_started", "Started");
|
||||||
|
expect(mockInvoke).toHaveBeenCalledWith("add_timeline_event", {
|
||||||
|
issueId: "issue-1",
|
||||||
|
eventType: "triage_started",
|
||||||
|
description: "Started",
|
||||||
|
metadata: null,
|
||||||
|
});
|
||||||
|
expect(result).toEqual(mockEvent);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("addTimelineEventCmd passes metadata when provided", async () => {
|
||||||
|
mockInvoke.mockResolvedValueOnce({} as never);
|
||||||
|
|
||||||
|
const { addTimelineEventCmd } = await import("@/lib/tauriCommands");
|
||||||
|
await addTimelineEventCmd("issue-1", "log_uploaded", "File uploaded", '{"file":"app.log"}');
|
||||||
|
expect(mockInvoke).toHaveBeenCalledWith("add_timeline_event", {
|
||||||
|
issueId: "issue-1",
|
||||||
|
eventType: "log_uploaded",
|
||||||
|
description: "File uploaded",
|
||||||
|
metadata: '{"file":"app.log"}',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("getTimelineEventsCmd calls invoke with correct params", async () => {
|
||||||
|
mockInvoke.mockResolvedValueOnce([] as never);
|
||||||
|
|
||||||
|
const { getTimelineEventsCmd } = await import("@/lib/tauriCommands");
|
||||||
|
const result = await getTimelineEventsCmd("issue-1");
|
||||||
|
expect(mockInvoke).toHaveBeenCalledWith("get_timeline_events", { issueId: "issue-1" });
|
||||||
|
expect(result).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
Loading…
Reference in New Issue
Block a user