Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
66 lines
1.9 KiB
JavaScript
66 lines
1.9 KiB
JavaScript
import {isReadableStream} from 'is-stream';
|
|
import {asyncIterator} from '@sec-ant/readable-stream/ponyfill';
|
|
|
|
export const getAsyncIterable = stream => {
|
|
if (isReadableStream(stream, {checkOpen: false}) && nodeImports.on !== undefined) {
|
|
return getStreamIterable(stream);
|
|
}
|
|
|
|
if (typeof stream?.[Symbol.asyncIterator] === 'function') {
|
|
return stream;
|
|
}
|
|
|
|
// `ReadableStream[Symbol.asyncIterator]` support is missing in multiple browsers, so we ponyfill it
|
|
if (toString.call(stream) === '[object ReadableStream]') {
|
|
return asyncIterator.call(stream);
|
|
}
|
|
|
|
throw new TypeError('The first argument must be a Readable, a ReadableStream, or an async iterable.');
|
|
};
|
|
|
|
const {toString} = Object.prototype;
|
|
|
|
// The default iterable for Node.js streams does not allow for multiple readers at once, so we re-implement it
|
|
const getStreamIterable = async function * (stream) {
|
|
const controller = new AbortController();
|
|
const state = {};
|
|
handleStreamEnd(stream, controller, state);
|
|
|
|
try {
|
|
for await (const [chunk] of nodeImports.on(stream, 'data', {signal: controller.signal})) {
|
|
yield chunk;
|
|
}
|
|
} catch (error) {
|
|
// Stream failure, for example due to `stream.destroy(error)`
|
|
if (state.error !== undefined) {
|
|
throw state.error;
|
|
// `error` event directly emitted on stream
|
|
} else if (!controller.signal.aborted) {
|
|
throw error;
|
|
// Otherwise, stream completed successfully
|
|
}
|
|
// The `finally` block also runs when the caller throws, for example due to the `maxBuffer` option
|
|
} finally {
|
|
stream.destroy();
|
|
}
|
|
};
|
|
|
|
const handleStreamEnd = async (stream, controller, state) => {
|
|
try {
|
|
await nodeImports.finished(stream, {
|
|
cleanup: true,
|
|
readable: true,
|
|
writable: false,
|
|
error: false,
|
|
});
|
|
} catch (error) {
|
|
state.error = error;
|
|
} finally {
|
|
controller.abort();
|
|
}
|
|
};
|
|
|
|
// Loaded by the Node entrypoint, but not by the browser one.
|
|
// This prevents using dynamic imports.
|
|
export const nodeImports = {};
|