tftsr-devops_investigation/node_modules/sucrase/dist/esm/index.js

134 lines
4.2 KiB
JavaScript
Raw Normal View History

feat: initial implementation of TFTSR IT Triage & RCA application Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
2026-03-15 03:36:25 +00:00
import CJSImportProcessor from "./CJSImportProcessor";
import computeSourceMap, {} from "./computeSourceMap";
import {HelperManager} from "./HelperManager";
import identifyShadowedGlobals from "./identifyShadowedGlobals";
import NameManager from "./NameManager";
import {validateOptions} from "./Options";
import {parse} from "./parser";
import TokenProcessor from "./TokenProcessor";
import RootTransformer from "./transformers/RootTransformer";
import formatTokens from "./util/formatTokens";
import getTSImportedNames from "./util/getTSImportedNames";
;
export function getVersion() {
/* istanbul ignore next */
return "3.35.1";
}
export function transform(code, options) {
validateOptions(options);
try {
const sucraseContext = getSucraseContext(code, options);
const transformer = new RootTransformer(
sucraseContext,
options.transforms,
Boolean(options.enableLegacyBabel5ModuleInterop),
options,
);
const transformerResult = transformer.transform();
let result = {code: transformerResult.code};
if (options.sourceMapOptions) {
if (!options.filePath) {
throw new Error("filePath must be specified when generating a source map.");
}
result = {
...result,
sourceMap: computeSourceMap(
transformerResult,
options.filePath,
options.sourceMapOptions,
code,
sucraseContext.tokenProcessor.tokens,
),
};
}
return result;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (e) {
if (options.filePath) {
e.message = `Error transforming ${options.filePath}: ${e.message}`;
}
throw e;
}
}
/**
* Return a string representation of the sucrase tokens, mostly useful for
* diagnostic purposes.
*/
export function getFormattedTokens(code, options) {
const tokens = getSucraseContext(code, options).tokenProcessor.tokens;
return formatTokens(code, tokens);
}
/**
* Call into the parser/tokenizer and do some further preprocessing:
* - Come up with a set of used names so that we can assign new names.
* - Preprocess all import/export statements so we know which globals we are interested in.
* - Compute situations where any of those globals are shadowed.
*
* In the future, some of these preprocessing steps can be skipped based on what actual work is
* being done.
*/
function getSucraseContext(code, options) {
const isJSXEnabled = options.transforms.includes("jsx");
const isTypeScriptEnabled = options.transforms.includes("typescript");
const isFlowEnabled = options.transforms.includes("flow");
const disableESTransforms = options.disableESTransforms === true;
const file = parse(code, isJSXEnabled, isTypeScriptEnabled, isFlowEnabled);
const tokens = file.tokens;
const scopes = file.scopes;
const nameManager = new NameManager(code, tokens);
const helperManager = new HelperManager(nameManager);
const tokenProcessor = new TokenProcessor(
code,
tokens,
isFlowEnabled,
disableESTransforms,
helperManager,
);
const enableLegacyTypeScriptModuleInterop = Boolean(options.enableLegacyTypeScriptModuleInterop);
let importProcessor = null;
if (options.transforms.includes("imports")) {
importProcessor = new CJSImportProcessor(
nameManager,
tokenProcessor,
enableLegacyTypeScriptModuleInterop,
options,
options.transforms.includes("typescript"),
Boolean(options.keepUnusedImports),
helperManager,
);
importProcessor.preprocessTokens();
// We need to mark shadowed globals after processing imports so we know that the globals are,
// but before type-only import pruning, since that relies on shadowing information.
identifyShadowedGlobals(tokenProcessor, scopes, importProcessor.getGlobalNames());
if (options.transforms.includes("typescript") && !options.keepUnusedImports) {
importProcessor.pruneTypeOnlyImports();
}
} else if (options.transforms.includes("typescript") && !options.keepUnusedImports) {
// Shadowed global detection is needed for TS implicit elision of imported names.
identifyShadowedGlobals(tokenProcessor, scopes, getTSImportedNames(tokenProcessor));
}
return {tokenProcessor, scopes, nameManager, importProcessor, helperManager};
}