tftsr-devops_investigation/node_modules/encoding-sniffer/dist/commonjs/index.js

72 lines
2.7 KiB
JavaScript
Raw Normal View History

feat: initial implementation of TFTSR IT Triage & RCA application Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
2026-03-15 03:36:25 +00:00
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getEncoding = exports.DecodeStream = void 0;
exports.decodeBuffer = decodeBuffer;
const node_stream_1 = require("node:stream");
const iconv_lite_1 = __importDefault(require("iconv-lite"));
const sniffer_js_1 = require("./sniffer.js");
/**
* Sniff the encoding of a buffer, then decode it.
*
* @param buffer Buffer to be decoded
* @param options Options for the sniffer
* @returns The decoded buffer
*/
function decodeBuffer(buffer, options = {}) {
return iconv_lite_1.default.decode(buffer, (0, sniffer_js_1.getEncoding)(buffer, options));
}
/**
* Decodes a stream of buffers into a stream of strings.
*
* Reads the first 1024 bytes and passes them to the sniffer. Once an encoding
* has been determined, it passes all data to iconv-lite's stream and outputs
* the results.
*/
class DecodeStream extends node_stream_1.Transform {
constructor(options) {
var _a;
super({ decodeStrings: false, encoding: "utf-8" });
this.buffers = [];
/** The iconv decode stream. If it is set, we have read more than `options.maxBytes` bytes. */
this.iconv = null;
this.readBytes = 0;
this.sniffer = new sniffer_js_1.Sniffer(options);
this.maxBytes = (_a = options === null || options === void 0 ? void 0 : options.maxBytes) !== null && _a !== void 0 ? _a : 1024;
}
_transform(chunk, _encoding, callback) {
if (this.readBytes < this.maxBytes) {
this.sniffer.write(chunk);
this.readBytes += chunk.length;
if (this.readBytes < this.maxBytes) {
this.buffers.push(chunk);
callback();
return;
}
}
this.getIconvStream().write(chunk, callback);
}
getIconvStream() {
if (this.iconv) {
return this.iconv;
}
const stream = iconv_lite_1.default.decodeStream(this.sniffer.encoding);
stream.on("data", (chunk) => this.push(chunk, "utf-8"));
stream.on("end", () => this.push(null));
this.iconv = stream;
for (const buffer of this.buffers) {
stream.write(buffer);
}
this.buffers.length = 0;
return stream;
}
_flush(callback) {
this.getIconvStream().end(callback);
}
}
exports.DecodeStream = DecodeStream;
var sniffer_js_2 = require("./sniffer.js");
Object.defineProperty(exports, "getEncoding", { enumerable: true, get: function () { return sniffer_js_2.getEncoding; } });
//# sourceMappingURL=index.js.map