tftsr-devops_investigation/node_modules/execa/lib/stdio/duplicate.js
Shaun Arman 8839075805 feat: initial implementation of TFTSR IT Triage & RCA application
Implements Phases 1-8 of the TFTSR implementation plan.

Rust backend (Tauri 2.x, src-tauri/):
- Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama
- PII detection engine: 11 regex patterns with overlap resolution
- SQLCipher AES-256 encrypted database with 10 versioned migrations
- 28 Tauri IPC commands for triage, analysis, document, and system ops
- Ollama: hardware probe, model recommendations, pull/delete with events
- RCA and blameless post-mortem Markdown document generators
- PDF export via printpdf
- Audit log: SHA-256 hash of every external data send
- Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2)

Frontend (React 18 + TypeScript + Vite, src/):
- 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings
- 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives
- 3 Zustand stores: session, settings (persisted), history
- Type-safe tauriCommands.ts matching Rust backend types exactly
- 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs)

DevOps:
- .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push
- .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload

Verified:
- cargo check: zero errors
- tsc --noEmit: zero errors
- vitest run: 13/13 unit tests passing

Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
2026-03-14 22:36:25 -05:00

117 lines
3.9 KiB
JavaScript

import {
SPECIAL_DUPLICATE_TYPES_SYNC,
SPECIAL_DUPLICATE_TYPES,
FORBID_DUPLICATE_TYPES,
TYPE_TO_MESSAGE,
} from './type.js';
// Duplicates in the same file descriptor is most likely an error.
// However, this can be useful with generators.
export const filterDuplicates = stdioItems => stdioItems.filter((stdioItemOne, indexOne) =>
stdioItems.every((stdioItemTwo, indexTwo) => stdioItemOne.value !== stdioItemTwo.value
|| indexOne >= indexTwo
|| stdioItemOne.type === 'generator'
|| stdioItemOne.type === 'asyncGenerator'));
// Check if two file descriptors are sharing the same target.
// For example `{stdout: {file: './output.txt'}, stderr: {file: './output.txt'}}`.
export const getDuplicateStream = ({stdioItem: {type, value, optionName}, direction, fileDescriptors, isSync}) => {
const otherStdioItems = getOtherStdioItems(fileDescriptors, type);
if (otherStdioItems.length === 0) {
return;
}
if (isSync) {
validateDuplicateStreamSync({
otherStdioItems,
type,
value,
optionName,
direction,
});
return;
}
if (SPECIAL_DUPLICATE_TYPES.has(type)) {
return getDuplicateStreamInstance({
otherStdioItems,
type,
value,
optionName,
direction,
});
}
if (FORBID_DUPLICATE_TYPES.has(type)) {
validateDuplicateTransform({
otherStdioItems,
type,
value,
optionName,
});
}
};
// Values shared by multiple file descriptors
const getOtherStdioItems = (fileDescriptors, type) => fileDescriptors
.flatMap(({direction, stdioItems}) => stdioItems
.filter(stdioItem => stdioItem.type === type)
.map((stdioItem => ({...stdioItem, direction}))));
// With `execaSync()`, do not allow setting a file path both in input and output
const validateDuplicateStreamSync = ({otherStdioItems, type, value, optionName, direction}) => {
if (SPECIAL_DUPLICATE_TYPES_SYNC.has(type)) {
getDuplicateStreamInstance({
otherStdioItems,
type,
value,
optionName,
direction,
});
}
};
// When two file descriptors share the file or stream, we need to re-use the same underlying stream.
// Otherwise, the stream would be closed twice when piping ends.
// This is only an issue with output file descriptors.
// This is not a problem with generator functions since those create a new instance for each file descriptor.
// We also forbid input and output file descriptors sharing the same file or stream, since that does not make sense.
const getDuplicateStreamInstance = ({otherStdioItems, type, value, optionName, direction}) => {
const duplicateStdioItems = otherStdioItems.filter(stdioItem => hasSameValue(stdioItem, value));
if (duplicateStdioItems.length === 0) {
return;
}
const differentStdioItem = duplicateStdioItems.find(stdioItem => stdioItem.direction !== direction);
throwOnDuplicateStream(differentStdioItem, optionName, type);
return direction === 'output' ? duplicateStdioItems[0].stream : undefined;
};
const hasSameValue = ({type, value}, secondValue) => {
if (type === 'filePath') {
return value.file === secondValue.file;
}
if (type === 'fileUrl') {
return value.href === secondValue.href;
}
return value === secondValue;
};
// We do not allow two file descriptors to share the same Duplex or TransformStream.
// This is because those are set directly to `subprocess.std*`.
// For example, this could result in `subprocess.stdout` and `subprocess.stderr` being the same value.
// This means reading from either would get data from both stdout and stderr.
const validateDuplicateTransform = ({otherStdioItems, type, value, optionName}) => {
const duplicateStdioItem = otherStdioItems.find(({value: {transform}}) => transform === value.transform);
throwOnDuplicateStream(duplicateStdioItem, optionName, type);
};
const throwOnDuplicateStream = (stdioItem, optionName, type) => {
if (stdioItem !== undefined) {
throw new TypeError(`The \`${stdioItem.optionName}\` and \`${optionName}\` options must not target ${TYPE_TO_MESSAGE[type]} that is the same.`);
}
};