Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
77 lines
3.2 KiB
JavaScript
77 lines
3.2 KiB
JavaScript
import os from 'node:os';
|
|
import path from 'node:path';
|
|
import { pipeline } from 'node:stream/promises';
|
|
import fsp, { writeFile } from 'node:fs/promises';
|
|
import zlib from 'node:zlib';
|
|
import logger from '@wdio/logger';
|
|
import { HttpsProxyAgent } from 'https-proxy-agent';
|
|
import { HttpProxyAgent } from 'http-proxy-agent';
|
|
import { unpackTar } from 'modern-tar/fs';
|
|
import { BlobReader, BlobWriter, ZipReader } from '@zip.js/zip.js';
|
|
import { BINARY_FILE, GECKODRIVER_CARGO_YAML } from './constants.js';
|
|
import { hasAccess, getDownloadUrl, retryFetch } from './utils.js';
|
|
const log = logger('geckodriver');
|
|
const fetchOpts = {};
|
|
if (process.env.HTTPS_PROXY) {
|
|
fetchOpts.agent = new HttpsProxyAgent(process.env.HTTPS_PROXY);
|
|
}
|
|
else if (process.env.HTTP_PROXY) {
|
|
fetchOpts.agent = new HttpProxyAgent(process.env.HTTP_PROXY);
|
|
}
|
|
export async function download(geckodriverVersion = process.env.GECKODRIVER_VERSION, cacheDir = process.env.GECKODRIVER_CACHE_DIR || os.tmpdir()) {
|
|
const binaryFilePath = path.resolve(cacheDir, BINARY_FILE);
|
|
if (await hasAccess(binaryFilePath)) {
|
|
return binaryFilePath;
|
|
}
|
|
/**
|
|
* get latest version of Geckodriver
|
|
*/
|
|
if (!geckodriverVersion) {
|
|
const res = await retryFetch(GECKODRIVER_CARGO_YAML, fetchOpts);
|
|
const toml = await res.text();
|
|
const version = toml.split('\n').find((l) => l.startsWith('version = '));
|
|
if (!version) {
|
|
throw new Error(`Couldn't find version property in Cargo.toml file: ${JSON.stringify(toml)}`);
|
|
}
|
|
geckodriverVersion = version.split(' = ').pop().slice(1, -1);
|
|
log.info(`Detected Geckodriver v${geckodriverVersion} to be latest`);
|
|
}
|
|
const url = getDownloadUrl(geckodriverVersion);
|
|
log.info(`Downloading Geckodriver from ${url}`);
|
|
const res = await retryFetch(url, fetchOpts);
|
|
if (res.status !== 200) {
|
|
throw new Error(`Failed to download binary (statusCode ${res.status}): ${res.statusText}`);
|
|
}
|
|
await fsp.mkdir(cacheDir, { recursive: true });
|
|
await (url.endsWith('.zip')
|
|
? downloadZip(res, cacheDir)
|
|
: pipeline(res.body, zlib.createGunzip(), unpackTar(cacheDir)));
|
|
await fsp.chmod(binaryFilePath, '755');
|
|
return binaryFilePath;
|
|
}
|
|
async function downloadZip(res, cacheDir) {
|
|
const zipBlob = await res.blob();
|
|
const zip = new ZipReader(new BlobReader(zipBlob));
|
|
for (const entry of await zip.getEntries()) {
|
|
const unzippedFilePath = path.join(cacheDir, entry.filename);
|
|
if (entry.directory) {
|
|
continue;
|
|
}
|
|
const fileEntry = entry;
|
|
if (!await hasAccess(path.dirname(unzippedFilePath))) {
|
|
await fsp.mkdir(path.dirname(unzippedFilePath), { recursive: true });
|
|
}
|
|
const content = await fileEntry.getData(new BlobWriter());
|
|
await writeFile(unzippedFilePath, content.stream());
|
|
}
|
|
}
|
|
/**
|
|
* download on install
|
|
*/
|
|
const installJsPath = path.join('dist', 'install.js');
|
|
if (process.argv[1] &&
|
|
path.normalize(process.argv[1]).endsWith(path.sep + installJsPath) &&
|
|
process.env.GECKODRIVER_AUTO_INSTALL) {
|
|
await download().then(() => log.info('Success!'), (err) => log.error(`Failed to install Geckodriver: ${err.stack}`));
|
|
}
|
|
//# sourceMappingURL=install.js.map
|