Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
214 lines
5.5 KiB
JavaScript
214 lines
5.5 KiB
JavaScript
import { a as normalizeBody, i as isBodyless, n as createTarPacker$1, r as transformHeader, t as createUnpacker } from "../unpacker-Dcww6JeE.js";
|
|
|
|
//#region src/web/compression.ts
|
|
function createGzipEncoder() {
|
|
return new CompressionStream("gzip");
|
|
}
|
|
function createGzipDecoder() {
|
|
return new DecompressionStream("gzip");
|
|
}
|
|
|
|
//#endregion
|
|
//#region src/web/pack.ts
|
|
function createTarPacker() {
|
|
let streamController;
|
|
let packer;
|
|
return {
|
|
readable: new ReadableStream({ start(controller) {
|
|
streamController = controller;
|
|
packer = createTarPacker$1(controller.enqueue.bind(controller), controller.error.bind(controller), controller.close.bind(controller));
|
|
} }),
|
|
controller: {
|
|
add(header) {
|
|
const bodyless = isBodyless(header);
|
|
const h = { ...header };
|
|
if (bodyless) h.size = 0;
|
|
packer.add(h);
|
|
if (bodyless) packer.endEntry();
|
|
return new WritableStream({
|
|
write(chunk) {
|
|
packer.write(chunk);
|
|
},
|
|
close() {
|
|
if (!bodyless) packer.endEntry();
|
|
},
|
|
abort(reason) {
|
|
streamController.error(reason);
|
|
}
|
|
});
|
|
},
|
|
finalize() {
|
|
packer.finalize();
|
|
},
|
|
error(err) {
|
|
streamController.error(err);
|
|
}
|
|
}
|
|
};
|
|
}
|
|
|
|
//#endregion
|
|
//#region src/web/stream-utils.ts
|
|
async function streamToBuffer(stream) {
|
|
const chunks = [];
|
|
const reader = stream.getReader();
|
|
let totalLength = 0;
|
|
try {
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) break;
|
|
chunks.push(value);
|
|
totalLength += value.length;
|
|
}
|
|
const result = new Uint8Array(totalLength);
|
|
let offset = 0;
|
|
for (const chunk of chunks) {
|
|
result.set(chunk, offset);
|
|
offset += chunk.length;
|
|
}
|
|
return result;
|
|
} finally {
|
|
reader.releaseLock();
|
|
}
|
|
}
|
|
const drain = (stream) => stream.pipeTo(new WritableStream());
|
|
|
|
//#endregion
|
|
//#region src/web/unpack.ts
|
|
function createTarDecoder(options = {}) {
|
|
const unpacker = createUnpacker(options);
|
|
let bodyController = null;
|
|
let pumping = false;
|
|
const pump = (controller) => {
|
|
if (pumping) return;
|
|
pumping = true;
|
|
try {
|
|
while (true) if (unpacker.isEntryActive()) {
|
|
if (bodyController) {
|
|
if (unpacker.streamBody((c) => (bodyController.enqueue(c), true)) === 0 && !unpacker.isBodyComplete()) break;
|
|
} else if (!unpacker.skipEntry()) break;
|
|
if (unpacker.isBodyComplete()) {
|
|
try {
|
|
bodyController?.close();
|
|
} catch {}
|
|
bodyController = null;
|
|
if (!unpacker.skipPadding()) break;
|
|
}
|
|
} else {
|
|
const header = unpacker.readHeader();
|
|
if (header === null || header === void 0) break;
|
|
controller.enqueue({
|
|
header,
|
|
body: new ReadableStream({
|
|
start(c) {
|
|
if (header.size === 0) c.close();
|
|
else bodyController = c;
|
|
},
|
|
pull: () => pump(controller),
|
|
cancel() {
|
|
bodyController = null;
|
|
pump(controller);
|
|
}
|
|
})
|
|
});
|
|
}
|
|
} catch (error) {
|
|
try {
|
|
bodyController?.error(error);
|
|
} catch {}
|
|
bodyController = null;
|
|
throw error;
|
|
} finally {
|
|
pumping = false;
|
|
}
|
|
};
|
|
return new TransformStream({
|
|
transform(chunk, controller) {
|
|
try {
|
|
unpacker.write(chunk);
|
|
pump(controller);
|
|
} catch (error) {
|
|
try {
|
|
bodyController?.error(error);
|
|
} catch {}
|
|
throw error;
|
|
}
|
|
},
|
|
flush(controller) {
|
|
try {
|
|
unpacker.end();
|
|
pump(controller);
|
|
unpacker.validateEOF();
|
|
if (unpacker.isEntryActive() && !unpacker.isBodyComplete()) try {
|
|
bodyController?.close();
|
|
} catch {}
|
|
} catch (error) {
|
|
try {
|
|
bodyController?.error(error);
|
|
} catch {}
|
|
throw error;
|
|
}
|
|
}
|
|
}, void 0, { highWaterMark: 1 });
|
|
}
|
|
|
|
//#endregion
|
|
//#region src/web/helpers.ts
|
|
async function packTar(entries) {
|
|
const { readable, controller } = createTarPacker();
|
|
await (async () => {
|
|
for (const entry of entries) {
|
|
const entryStream = controller.add(entry.header);
|
|
const body = "body" in entry ? entry.body : entry.data;
|
|
if (!body) {
|
|
await entryStream.close();
|
|
continue;
|
|
}
|
|
if (body instanceof ReadableStream) await body.pipeTo(entryStream);
|
|
else if (body instanceof Blob) await body.stream().pipeTo(entryStream);
|
|
else try {
|
|
const chunk = await normalizeBody(body);
|
|
if (chunk.length > 0) {
|
|
const writer = entryStream.getWriter();
|
|
await writer.write(chunk);
|
|
await writer.close();
|
|
} else await entryStream.close();
|
|
} catch {
|
|
throw new TypeError(`Unsupported content type for entry "${entry.header.name}".`);
|
|
}
|
|
}
|
|
})().then(() => controller.finalize()).catch((err) => controller.error(err));
|
|
return new Uint8Array(await streamToBuffer(readable));
|
|
}
|
|
async function unpackTar(archive, options = {}) {
|
|
const sourceStream = archive instanceof ReadableStream ? archive : new ReadableStream({ start(controller) {
|
|
controller.enqueue(archive instanceof Uint8Array ? archive : new Uint8Array(archive));
|
|
controller.close();
|
|
} });
|
|
const results = [];
|
|
const entryStream = sourceStream.pipeThrough(createTarDecoder(options));
|
|
for await (const entry of entryStream) {
|
|
let processedHeader;
|
|
try {
|
|
processedHeader = transformHeader(entry.header, options);
|
|
} catch (error) {
|
|
await entry.body.cancel();
|
|
throw error;
|
|
}
|
|
if (processedHeader === null) {
|
|
await drain(entry.body);
|
|
continue;
|
|
}
|
|
if (isBodyless(processedHeader)) {
|
|
await drain(entry.body);
|
|
results.push({ header: processedHeader });
|
|
} else results.push({
|
|
header: processedHeader,
|
|
data: await streamToBuffer(entry.body)
|
|
});
|
|
}
|
|
return results;
|
|
}
|
|
|
|
//#endregion
|
|
export { createGzipDecoder, createGzipEncoder, createTarDecoder, createTarPacker, packTar, unpackTar }; |