Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
61 lines
1.8 KiB
JavaScript
61 lines
1.8 KiB
JavaScript
"use strict";
|
|
const iconvLite = require("iconv-lite");
|
|
const supportedNames = require("./supported-names.json");
|
|
const labelsToNames = require("./labels-to-names.json");
|
|
|
|
const supportedNamesSet = new Set(supportedNames);
|
|
|
|
// https://encoding.spec.whatwg.org/#concept-encoding-get
|
|
exports.labelToName = label => {
|
|
label = String(label).trim().toLowerCase();
|
|
|
|
return labelsToNames[label] || null;
|
|
};
|
|
|
|
// https://encoding.spec.whatwg.org/#decode
|
|
exports.decode = (uint8Array, fallbackEncodingName) => {
|
|
let encoding = fallbackEncodingName;
|
|
if (!exports.isSupported(encoding)) {
|
|
throw new RangeError(`"${encoding}" is not a supported encoding name`);
|
|
}
|
|
|
|
const bomEncoding = exports.getBOMEncoding(uint8Array);
|
|
if (bomEncoding !== null) {
|
|
encoding = bomEncoding;
|
|
// iconv-lite will strip BOMs for us, so no need to do the extra byte removal that the spec does.
|
|
// Note that we won't end up in the x-user-defined case when there's a bomEncoding.
|
|
}
|
|
|
|
if (encoding === "x-user-defined") {
|
|
// https://encoding.spec.whatwg.org/#x-user-defined-decoder
|
|
let result = "";
|
|
for (const byte of uint8Array) {
|
|
if (byte <= 0x7F) {
|
|
result += String.fromCodePoint(byte);
|
|
} else {
|
|
result += String.fromCodePoint(0xF780 + byte - 0x80);
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
return iconvLite.decode(uint8Array, encoding);
|
|
};
|
|
|
|
// https://github.com/whatwg/html/issues/1910#issuecomment-254017369
|
|
exports.getBOMEncoding = uint8Array => {
|
|
if (uint8Array[0] === 0xFE && uint8Array[1] === 0xFF) {
|
|
return "UTF-16BE";
|
|
} else if (uint8Array[0] === 0xFF && uint8Array[1] === 0xFE) {
|
|
return "UTF-16LE";
|
|
} else if (uint8Array[0] === 0xEF && uint8Array[1] === 0xBB && uint8Array[2] === 0xBF) {
|
|
return "UTF-8";
|
|
}
|
|
|
|
return null;
|
|
};
|
|
|
|
exports.isSupported = name => {
|
|
return supportedNamesSet.has(String(name));
|
|
};
|