Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
56 lines
1.4 KiB
JavaScript
56 lines
1.4 KiB
JavaScript
/**
|
|
* @import {
|
|
* Create,
|
|
* FullNormalizedExtension,
|
|
* InitialConstruct,
|
|
* ParseContext,
|
|
* ParseOptions
|
|
* } from 'micromark-util-types'
|
|
*/
|
|
|
|
import { combineExtensions } from 'micromark-util-combine-extensions';
|
|
import { content } from './initialize/content.js';
|
|
import { document } from './initialize/document.js';
|
|
import { flow } from './initialize/flow.js';
|
|
import { string, text } from './initialize/text.js';
|
|
import * as defaultConstructs from './constructs.js';
|
|
import { createTokenizer } from './create-tokenizer.js';
|
|
|
|
/**
|
|
* @param {ParseOptions | null | undefined} [options]
|
|
* Configuration (optional).
|
|
* @returns {ParseContext}
|
|
* Parser.
|
|
*/
|
|
export function parse(options) {
|
|
const settings = options || {};
|
|
const constructs = /** @type {FullNormalizedExtension} */
|
|
combineExtensions([defaultConstructs, ...(settings.extensions || [])]);
|
|
|
|
/** @type {ParseContext} */
|
|
const parser = {
|
|
constructs,
|
|
content: create(content),
|
|
defined: [],
|
|
document: create(document),
|
|
flow: create(flow),
|
|
lazy: {},
|
|
string: create(string),
|
|
text: create(text)
|
|
};
|
|
return parser;
|
|
|
|
/**
|
|
* @param {InitialConstruct} initial
|
|
* Construct to start with.
|
|
* @returns {Create}
|
|
* Create a tokenizer.
|
|
*/
|
|
function create(initial) {
|
|
return creator;
|
|
/** @type {Create} */
|
|
function creator(from) {
|
|
return createTokenizer(parser, initial, from);
|
|
}
|
|
}
|
|
} |