Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
79 lines
1.5 KiB
JavaScript
79 lines
1.5 KiB
JavaScript
var fs = require('fs');
|
|
var path = require('path');
|
|
var pify = require('pify');
|
|
|
|
var stat = pify(fs.stat);
|
|
var readFile = pify(fs.readFile);
|
|
var resolve = path.resolve;
|
|
|
|
var cache = Object.create(null);
|
|
|
|
function convert(content, encoding) {
|
|
if (Buffer.isEncoding(encoding)) {
|
|
return content.toString(encoding);
|
|
}
|
|
return content;
|
|
}
|
|
|
|
module.exports = function (path, encoding) {
|
|
path = resolve(path);
|
|
|
|
return stat(path).then(function (stats) {
|
|
var item = cache[path];
|
|
|
|
if (item && item.mtime.getTime() === stats.mtime.getTime()) {
|
|
return convert(item.content, encoding);
|
|
}
|
|
|
|
return readFile(path).then(function (data) {
|
|
cache[path] = {
|
|
mtime: stats.mtime,
|
|
content: data
|
|
};
|
|
|
|
return convert(data, encoding);
|
|
});
|
|
}).catch(function (err) {
|
|
cache[path] = null;
|
|
return Promise.reject(err);
|
|
});
|
|
};
|
|
|
|
module.exports.sync = function (path, encoding) {
|
|
path = resolve(path);
|
|
|
|
try {
|
|
var stats = fs.statSync(path);
|
|
var item = cache[path];
|
|
|
|
if (item && item.mtime.getTime() === stats.mtime.getTime()) {
|
|
return convert(item.content, encoding);
|
|
}
|
|
|
|
var data = fs.readFileSync(path);
|
|
|
|
cache[path] = {
|
|
mtime: stats.mtime,
|
|
content: data
|
|
};
|
|
|
|
return convert(data, encoding);
|
|
} catch (err) {
|
|
cache[path] = null;
|
|
throw err;
|
|
}
|
|
|
|
};
|
|
|
|
module.exports.get = function (path, encoding) {
|
|
path = resolve(path);
|
|
if (cache[path]) {
|
|
return convert(cache[path].content, encoding);
|
|
}
|
|
return null;
|
|
};
|
|
|
|
module.exports.clear = function () {
|
|
cache = Object.create(null);
|
|
};
|