Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
105 lines
3.0 KiB
JavaScript
105 lines
3.0 KiB
JavaScript
import {eat, finishToken, lookaheadTypeAndKeyword, match, nextTokenStart} from "../tokenizer/index";
|
|
|
|
import {formatTokenType, TokenType as tt} from "../tokenizer/types";
|
|
import {charCodes} from "../util/charcodes";
|
|
import {input, state} from "./base";
|
|
|
|
// ## Parser utilities
|
|
|
|
// Tests whether parsed token is a contextual keyword.
|
|
export function isContextual(contextualKeyword) {
|
|
return state.contextualKeyword === contextualKeyword;
|
|
}
|
|
|
|
export function isLookaheadContextual(contextualKeyword) {
|
|
const l = lookaheadTypeAndKeyword();
|
|
return l.type === tt.name && l.contextualKeyword === contextualKeyword;
|
|
}
|
|
|
|
// Consumes contextual keyword if possible.
|
|
export function eatContextual(contextualKeyword) {
|
|
return state.contextualKeyword === contextualKeyword && eat(tt.name);
|
|
}
|
|
|
|
// Asserts that following token is given contextual keyword.
|
|
export function expectContextual(contextualKeyword) {
|
|
if (!eatContextual(contextualKeyword)) {
|
|
unexpected();
|
|
}
|
|
}
|
|
|
|
// Test whether a semicolon can be inserted at the current position.
|
|
export function canInsertSemicolon() {
|
|
return match(tt.eof) || match(tt.braceR) || hasPrecedingLineBreak();
|
|
}
|
|
|
|
export function hasPrecedingLineBreak() {
|
|
const prevToken = state.tokens[state.tokens.length - 1];
|
|
const lastTokEnd = prevToken ? prevToken.end : 0;
|
|
for (let i = lastTokEnd; i < state.start; i++) {
|
|
const code = input.charCodeAt(i);
|
|
if (
|
|
code === charCodes.lineFeed ||
|
|
code === charCodes.carriageReturn ||
|
|
code === 0x2028 ||
|
|
code === 0x2029
|
|
) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
|
|
export function hasFollowingLineBreak() {
|
|
const nextStart = nextTokenStart();
|
|
for (let i = state.end; i < nextStart; i++) {
|
|
const code = input.charCodeAt(i);
|
|
if (
|
|
code === charCodes.lineFeed ||
|
|
code === charCodes.carriageReturn ||
|
|
code === 0x2028 ||
|
|
code === 0x2029
|
|
) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
|
|
export function isLineTerminator() {
|
|
return eat(tt.semi) || canInsertSemicolon();
|
|
}
|
|
|
|
// Consume a semicolon, or, failing that, see if we are allowed to
|
|
// pretend that there is a semicolon at this position.
|
|
export function semicolon() {
|
|
if (!isLineTerminator()) {
|
|
unexpected('Unexpected token, expected ";"');
|
|
}
|
|
}
|
|
|
|
// Expect a token of a given type. If found, consume it, otherwise,
|
|
// raise an unexpected token error at given pos.
|
|
export function expect(type) {
|
|
const matched = eat(type);
|
|
if (!matched) {
|
|
unexpected(`Unexpected token, expected "${formatTokenType(type)}"`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Transition the parser to an error state. All code needs to be written to naturally unwind in this
|
|
* state, which allows us to backtrack without exceptions and without error plumbing everywhere.
|
|
*/
|
|
export function unexpected(message = "Unexpected token", pos = state.start) {
|
|
if (state.error) {
|
|
return;
|
|
}
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
const err = new SyntaxError(message);
|
|
err.pos = pos;
|
|
state.error = err;
|
|
state.pos = input.length;
|
|
finishToken(tt.eof);
|
|
}
|