tftsr-devops_investigation/node_modules/@puppeteer/browsers/lib/esm/fileUtil.js
Shaun Arman 8839075805 feat: initial implementation of TFTSR IT Triage & RCA application
Implements Phases 1-8 of the TFTSR implementation plan.

Rust backend (Tauri 2.x, src-tauri/):
- Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama
- PII detection engine: 11 regex patterns with overlap resolution
- SQLCipher AES-256 encrypted database with 10 versioned migrations
- 28 Tauri IPC commands for triage, analysis, document, and system ops
- Ollama: hardware probe, model recommendations, pull/delete with events
- RCA and blameless post-mortem Markdown document generators
- PDF export via printpdf
- Audit log: SHA-256 hash of every external data send
- Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2)

Frontend (React 18 + TypeScript + Vite, src/):
- 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings
- 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives
- 3 Zustand stores: session, settings (persisted), history
- Type-safe tauriCommands.ts matching Rust backend types exactly
- 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs)

DevOps:
- .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push
- .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload

Verified:
- cargo check: zero errors
- tsc --noEmit: zero errors
- vitest run: 13/13 unit tests passing

Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
2026-03-14 22:36:25 -05:00

156 lines
4.7 KiB
JavaScript

/**
* @license
* Copyright 2023 Google Inc.
* SPDX-License-Identifier: Apache-2.0
*/
import { spawnSync, spawn } from 'node:child_process';
import { createReadStream } from 'node:fs';
import { mkdir, readdir } from 'node:fs/promises';
import * as path from 'node:path';
import { Stream } from 'node:stream';
import debug from 'debug';
const debugFileUtil = debug('puppeteer:browsers:fileUtil');
/**
* @internal
*/
export async function unpackArchive(archivePath, folderPath) {
if (!path.isAbsolute(folderPath)) {
folderPath = path.resolve(process.cwd(), folderPath);
}
if (archivePath.endsWith('.zip')) {
const extractZip = await import('extract-zip');
await extractZip.default(archivePath, { dir: folderPath });
}
else if (archivePath.endsWith('.tar.bz2')) {
await extractTar(archivePath, folderPath, 'bzip2');
}
else if (archivePath.endsWith('.dmg')) {
await mkdir(folderPath);
await installDMG(archivePath, folderPath);
}
else if (archivePath.endsWith('.exe')) {
// Firefox on Windows.
const result = spawnSync(archivePath, [`/ExtractDir=${folderPath}`], {
env: {
__compat_layer: 'RunAsInvoker',
},
});
if (result.status !== 0) {
throw new Error(`Failed to extract ${archivePath} to ${folderPath}: ${result.output}`);
}
}
else if (archivePath.endsWith('.tar.xz')) {
await extractTar(archivePath, folderPath, 'xz');
}
else {
throw new Error(`Unsupported archive format: ${archivePath}`);
}
}
function createTransformStream(child) {
const stream = new Stream.Transform({
transform(chunk, encoding, callback) {
if (!child.stdin.write(chunk, encoding)) {
child.stdin.once('drain', callback);
}
else {
callback();
}
},
flush(callback) {
if (child.stdout.destroyed) {
callback();
}
else {
child.stdin.end();
child.stdout.on('close', callback);
}
},
});
child.stdin.on('error', e => {
if ('code' in e && e.code === 'EPIPE') {
// finished before reading the file finished (i.e. head)
stream.emit('end');
}
else {
stream.destroy(e);
}
});
child.stdout
.on('data', data => {
return stream.push(data);
})
.on('error', e => {
return stream.destroy(e);
});
child.once('close', () => {
return stream.end();
});
return stream;
}
/**
* @internal
*/
export const internalConstantsForTesting = {
xz: 'xz',
bzip2: 'bzip2',
};
/**
* @internal
*/
async function extractTar(tarPath, folderPath, decompressUtilityName) {
const tarFs = await import('tar-fs');
return await new Promise((fulfill, reject) => {
function handleError(utilityName) {
return (error) => {
if ('code' in error && error.code === 'ENOENT') {
error = new Error(`\`${utilityName}\` utility is required to unpack this archive`, {
cause: error,
});
}
reject(error);
};
}
const unpack = spawn(internalConstantsForTesting[decompressUtilityName], ['-d'], {
stdio: ['pipe', 'pipe', 'inherit'],
})
.once('error', handleError(decompressUtilityName))
.once('exit', code => {
debugFileUtil(`${decompressUtilityName} exited, code=${code}`);
});
const tar = tarFs.extract(folderPath);
tar.once('error', handleError('tar'));
tar.once('finish', fulfill);
createReadStream(tarPath).pipe(createTransformStream(unpack)).pipe(tar);
});
}
/**
* @internal
*/
async function installDMG(dmgPath, folderPath) {
const { stdout } = spawnSync(`hdiutil`, [
'attach',
'-nobrowse',
'-noautoopen',
dmgPath,
]);
const volumes = stdout.toString('utf8').match(/\/Volumes\/(.*)/m);
if (!volumes) {
throw new Error(`Could not find volume path in ${stdout}`);
}
const mountPath = volumes[0];
try {
const fileNames = await readdir(mountPath);
const appName = fileNames.find(item => {
return typeof item === 'string' && item.endsWith('.app');
});
if (!appName) {
throw new Error(`Cannot find app in ${mountPath}`);
}
const mountedPath = path.join(mountPath, appName);
spawnSync('cp', ['-R', mountedPath, folderPath]);
}
finally {
spawnSync('hdiutil', ['detach', mountPath, '-quiet']);
}
}
//# sourceMappingURL=fileUtil.js.map