Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
46 lines
1.5 KiB
TypeScript
46 lines
1.5 KiB
TypeScript
/**
|
||
* Create a tokenizer.
|
||
* Tokenizers deal with one type of data (e.g., containers, flow, text).
|
||
* The parser is the object dealing with it all.
|
||
* `initialize` works like other constructs, except that only its `tokenize`
|
||
* function is used, in which case it doesn’t receive an `ok` or `nok`.
|
||
* `from` can be given to set the point before the first character, although
|
||
* when further lines are indented, they must be set with `defineSkip`.
|
||
*
|
||
* @param {ParseContext} parser
|
||
* Parser.
|
||
* @param {InitialConstruct} initialize
|
||
* Construct.
|
||
* @param {Omit<Point, '_bufferIndex' | '_index'> | undefined} [from]
|
||
* Point (optional).
|
||
* @returns {TokenizeContext}
|
||
* Context.
|
||
*/
|
||
export function createTokenizer(parser: ParseContext, initialize: InitialConstruct, from?: Omit<Point, "_bufferIndex" | "_index"> | undefined): TokenizeContext;
|
||
/**
|
||
* Restore the state.
|
||
*/
|
||
export type Restore = () => undefined;
|
||
/**
|
||
* Info.
|
||
*/
|
||
export type Info = {
|
||
/**
|
||
* Restore.
|
||
*/
|
||
restore: Restore;
|
||
/**
|
||
* From.
|
||
*/
|
||
from: number;
|
||
};
|
||
/**
|
||
* Handle a successful run.
|
||
*/
|
||
export type ReturnHandle = (construct: Construct, info: Info) => undefined;
|
||
import type { ParseContext } from 'micromark-util-types';
|
||
import type { InitialConstruct } from 'micromark-util-types';
|
||
import type { Point } from 'micromark-util-types';
|
||
import type { TokenizeContext } from 'micromark-util-types';
|
||
import type { Construct } from 'micromark-util-types';
|
||
//# sourceMappingURL=create-tokenizer.d.ts.map
|