tftsr-devops_investigation/node_modules/jake/jakefile.js
Shaun Arman 8839075805 feat: initial implementation of TFTSR IT Triage & RCA application
Implements Phases 1-8 of the TFTSR implementation plan.

Rust backend (Tauri 2.x, src-tauri/):
- Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama
- PII detection engine: 11 regex patterns with overlap resolution
- SQLCipher AES-256 encrypted database with 10 versioned migrations
- 28 Tauri IPC commands for triage, analysis, document, and system ops
- Ollama: hardware probe, model recommendations, pull/delete with events
- RCA and blameless post-mortem Markdown document generators
- PDF export via printpdf
- Audit log: SHA-256 hash of every external data send
- Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2)

Frontend (React 18 + TypeScript + Vite, src/):
- 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings
- 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives
- 3 Zustand stores: session, settings (persisted), history
- Type-safe tauriCommands.ts matching Rust backend types exactly
- 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs)

DevOps:
- .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push
- .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload

Verified:
- cargo check: zero errors
- tsc --noEmit: zero errors
- vitest run: 13/13 unit tests passing

Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
2026-03-14 22:36:25 -05:00

113 lines
2.6 KiB
JavaScript

let proc = require('child_process');
const PROJECT_DIR = process.cwd();
process.env.PROJECT_DIR = PROJECT_DIR;
namespace('doc', function () {
task('generate', ['doc:clobber'], function () {
var cmd = '../node-jsdoc-toolkit/app/run.js -n -r=100 ' +
'-t=../node-jsdoc-toolkit/templates/codeview -d=./doc/ ./lib';
jake.logger.log('Generating docs ...');
jake.exec([cmd], function () {
jake.logger.log('Done.');
complete();
});
}, {async: true});
task('clobber', function () {
var cmd = 'rm -fr ./doc/*';
jake.exec([cmd], function () {
jake.logger.log('Clobbered old docs.');
complete();
});
}, {async: true});
});
desc('Generate docs for Jake');
task('doc', ['doc:generate']);
npmPublishTask('jake', function () {
this.packageFiles.include([
'Makefile',
'jakefile.js',
'README.md',
'package.json',
'usage.txt',
'lib/**',
'bin/**',
'test/**'
]);
this.packageFiles.exclude([
'test/tmp'
]);
});
jake.Task['publish:package'].directory = PROJECT_DIR;
namespace('test', function () {
let integrationTest = task('integration', async function () {
let testArgs = [];
if (process.env.filter) {
testArgs.push(process.env.filter);
}
else {
testArgs.push('*.js');
}
let spawned = proc.spawn(`${PROJECT_DIR}/node_modules/.bin/mocha`, testArgs, {
stdio: 'inherit'
});
return new Promise((resolve, reject) => {
spawned.on('exit', () => {
resolve();
});
});
});
integrationTest.directory = `${PROJECT_DIR}/test/integration`;
let integrationClobber = task('integrationClobber', function () {
proc.execSync('rm -rf package.json pkg tmp_publish');
});
integrationClobber.directory = `${PROJECT_DIR}/test/integration`;
let unitTest = task('unit', async function () {
let testArgs = [];
if (process.env.filter) {
testArgs.push(process.env.filter);
}
else {
testArgs.push('*.js');
}
let spawned = proc.spawn(`${PROJECT_DIR}/node_modules/.bin/mocha`, testArgs, {
stdio: 'inherit'
});
});
unitTest.directory = `${PROJECT_DIR}/test/unit`;
});
desc('Runs all tests');
task('test', ['test:unit', 'test:integration', 'test:integrationClobber']);
desc('Runs eslint for both lib and test directories');
task('lint', function (doFix) {
let cmd = 'eslint --format codeframe "lib/**/*.js" "test/**/*.js"';
if (doFix) {
cmd += ' --fix';
}
try {
proc.execSync(cmd);
}
catch (err) {
console.log(err.message);
console.log(err.stderr.toString());
console.log(err.stdout.toString());
fail('eslint failed');
}
});