Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
122 lines
2.7 KiB
JavaScript
122 lines
2.7 KiB
JavaScript
import {getAsyncIterable} from './stream.js';
|
|
|
|
export const getStreamContents = async (stream, {init, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, finalize}, {maxBuffer = Number.POSITIVE_INFINITY} = {}) => {
|
|
const asyncIterable = getAsyncIterable(stream);
|
|
|
|
const state = init();
|
|
state.length = 0;
|
|
|
|
try {
|
|
for await (const chunk of asyncIterable) {
|
|
const chunkType = getChunkType(chunk);
|
|
const convertedChunk = convertChunk[chunkType](chunk, state);
|
|
appendChunk({
|
|
convertedChunk,
|
|
state,
|
|
getSize,
|
|
truncateChunk,
|
|
addChunk,
|
|
maxBuffer,
|
|
});
|
|
}
|
|
|
|
appendFinalChunk({
|
|
state,
|
|
convertChunk,
|
|
getSize,
|
|
truncateChunk,
|
|
addChunk,
|
|
getFinalChunk,
|
|
maxBuffer,
|
|
});
|
|
return finalize(state);
|
|
} catch (error) {
|
|
const normalizedError = typeof error === 'object' && error !== null ? error : new Error(error);
|
|
normalizedError.bufferedData = finalize(state);
|
|
throw normalizedError;
|
|
}
|
|
};
|
|
|
|
const appendFinalChunk = ({state, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer}) => {
|
|
const convertedChunk = getFinalChunk(state);
|
|
if (convertedChunk !== undefined) {
|
|
appendChunk({
|
|
convertedChunk,
|
|
state,
|
|
getSize,
|
|
truncateChunk,
|
|
addChunk,
|
|
maxBuffer,
|
|
});
|
|
}
|
|
};
|
|
|
|
const appendChunk = ({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}) => {
|
|
const chunkSize = getSize(convertedChunk);
|
|
const newLength = state.length + chunkSize;
|
|
|
|
if (newLength <= maxBuffer) {
|
|
addNewChunk(convertedChunk, state, addChunk, newLength);
|
|
return;
|
|
}
|
|
|
|
const truncatedChunk = truncateChunk(convertedChunk, maxBuffer - state.length);
|
|
|
|
if (truncatedChunk !== undefined) {
|
|
addNewChunk(truncatedChunk, state, addChunk, maxBuffer);
|
|
}
|
|
|
|
throw new MaxBufferError();
|
|
};
|
|
|
|
const addNewChunk = (convertedChunk, state, addChunk, newLength) => {
|
|
state.contents = addChunk(convertedChunk, state, newLength);
|
|
state.length = newLength;
|
|
};
|
|
|
|
const getChunkType = chunk => {
|
|
const typeOfChunk = typeof chunk;
|
|
|
|
if (typeOfChunk === 'string') {
|
|
return 'string';
|
|
}
|
|
|
|
if (typeOfChunk !== 'object' || chunk === null) {
|
|
return 'others';
|
|
}
|
|
|
|
if (globalThis.Buffer?.isBuffer(chunk)) {
|
|
return 'buffer';
|
|
}
|
|
|
|
const prototypeName = objectToString.call(chunk);
|
|
|
|
if (prototypeName === '[object ArrayBuffer]') {
|
|
return 'arrayBuffer';
|
|
}
|
|
|
|
if (prototypeName === '[object DataView]') {
|
|
return 'dataView';
|
|
}
|
|
|
|
if (
|
|
Number.isInteger(chunk.byteLength)
|
|
&& Number.isInteger(chunk.byteOffset)
|
|
&& objectToString.call(chunk.buffer) === '[object ArrayBuffer]'
|
|
) {
|
|
return 'typedArray';
|
|
}
|
|
|
|
return 'others';
|
|
};
|
|
|
|
const {toString: objectToString} = Object.prototype;
|
|
|
|
export class MaxBufferError extends Error {
|
|
name = 'MaxBufferError';
|
|
|
|
constructor() {
|
|
super('maxBuffer exceeded');
|
|
}
|
|
}
|