Implements Phases 1-8 of the TFTSR implementation plan. Rust backend (Tauri 2.x, src-tauri/): - Multi-provider AI: OpenAI-compatible, Anthropic, Gemini, Mistral, Ollama - PII detection engine: 11 regex patterns with overlap resolution - SQLCipher AES-256 encrypted database with 10 versioned migrations - 28 Tauri IPC commands for triage, analysis, document, and system ops - Ollama: hardware probe, model recommendations, pull/delete with events - RCA and blameless post-mortem Markdown document generators - PDF export via printpdf - Audit log: SHA-256 hash of every external data send - Integration stubs for Confluence, ServiceNow, Azure DevOps (v0.2) Frontend (React 18 + TypeScript + Vite, src/): - 9 pages: full triage workflow NewIssue→LogUpload→Triage→Resolution→RCA→Postmortem→History+Settings - 7 components: ChatWindow, TriageProgress, PiiDiffViewer, DocEditor, HardwareReport, ModelSelector, UI primitives - 3 Zustand stores: session, settings (persisted), history - Type-safe tauriCommands.ts matching Rust backend types exactly - 8 IT domain system prompts (Linux, Windows, Network, K8s, DB, Virt, HW, Obs) DevOps: - .woodpecker/test.yml: rustfmt, clippy, cargo test, tsc, vitest on every push - .woodpecker/release.yml: linux/amd64 + linux/arm64 builds, Gogs release upload Verified: - cargo check: zero errors - tsc --noEmit: zero errors - vitest run: 13/13 unit tests passing Co-Authored-By: Claude Sonnet 4.6 (1M context) <noreply@anthropic.com>
184 lines
5.2 KiB
JavaScript
184 lines
5.2 KiB
JavaScript
/**
|
|
* @import {Options} from 'micromark-extension-gfm-strikethrough'
|
|
* @import {Event, Extension, Resolver, State, Token, TokenizeContext, Tokenizer} from 'micromark-util-types'
|
|
*/
|
|
|
|
import {ok as assert} from 'devlop'
|
|
import {splice} from 'micromark-util-chunked'
|
|
import {classifyCharacter} from 'micromark-util-classify-character'
|
|
import {resolveAll} from 'micromark-util-resolve-all'
|
|
import {codes, constants, types} from 'micromark-util-symbol'
|
|
|
|
/**
|
|
* Create an extension for `micromark` to enable GFM strikethrough syntax.
|
|
*
|
|
* @param {Options | null | undefined} [options={}]
|
|
* Configuration.
|
|
* @returns {Extension}
|
|
* Extension for `micromark` that can be passed in `extensions`, to
|
|
* enable GFM strikethrough syntax.
|
|
*/
|
|
export function gfmStrikethrough(options) {
|
|
const options_ = options || {}
|
|
let single = options_.singleTilde
|
|
const tokenizer = {
|
|
name: 'strikethrough',
|
|
tokenize: tokenizeStrikethrough,
|
|
resolveAll: resolveAllStrikethrough
|
|
}
|
|
|
|
if (single === null || single === undefined) {
|
|
single = true
|
|
}
|
|
|
|
return {
|
|
text: {[codes.tilde]: tokenizer},
|
|
insideSpan: {null: [tokenizer]},
|
|
attentionMarkers: {null: [codes.tilde]}
|
|
}
|
|
|
|
/**
|
|
* Take events and resolve strikethrough.
|
|
*
|
|
* @type {Resolver}
|
|
*/
|
|
function resolveAllStrikethrough(events, context) {
|
|
let index = -1
|
|
|
|
// Walk through all events.
|
|
while (++index < events.length) {
|
|
// Find a token that can close.
|
|
if (
|
|
events[index][0] === 'enter' &&
|
|
events[index][1].type === 'strikethroughSequenceTemporary' &&
|
|
events[index][1]._close
|
|
) {
|
|
let open = index
|
|
|
|
// Now walk back to find an opener.
|
|
while (open--) {
|
|
// Find a token that can open the closer.
|
|
if (
|
|
events[open][0] === 'exit' &&
|
|
events[open][1].type === 'strikethroughSequenceTemporary' &&
|
|
events[open][1]._open &&
|
|
// If the sizes are the same:
|
|
events[index][1].end.offset - events[index][1].start.offset ===
|
|
events[open][1].end.offset - events[open][1].start.offset
|
|
) {
|
|
events[index][1].type = 'strikethroughSequence'
|
|
events[open][1].type = 'strikethroughSequence'
|
|
|
|
/** @type {Token} */
|
|
const strikethrough = {
|
|
type: 'strikethrough',
|
|
start: Object.assign({}, events[open][1].start),
|
|
end: Object.assign({}, events[index][1].end)
|
|
}
|
|
|
|
/** @type {Token} */
|
|
const text = {
|
|
type: 'strikethroughText',
|
|
start: Object.assign({}, events[open][1].end),
|
|
end: Object.assign({}, events[index][1].start)
|
|
}
|
|
|
|
// Opening.
|
|
/** @type {Array<Event>} */
|
|
const nextEvents = [
|
|
['enter', strikethrough, context],
|
|
['enter', events[open][1], context],
|
|
['exit', events[open][1], context],
|
|
['enter', text, context]
|
|
]
|
|
|
|
const insideSpan = context.parser.constructs.insideSpan.null
|
|
|
|
if (insideSpan) {
|
|
// Between.
|
|
splice(
|
|
nextEvents,
|
|
nextEvents.length,
|
|
0,
|
|
resolveAll(insideSpan, events.slice(open + 1, index), context)
|
|
)
|
|
}
|
|
|
|
// Closing.
|
|
splice(nextEvents, nextEvents.length, 0, [
|
|
['exit', text, context],
|
|
['enter', events[index][1], context],
|
|
['exit', events[index][1], context],
|
|
['exit', strikethrough, context]
|
|
])
|
|
|
|
splice(events, open - 1, index - open + 3, nextEvents)
|
|
|
|
index = open + nextEvents.length - 2
|
|
break
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
index = -1
|
|
|
|
while (++index < events.length) {
|
|
if (events[index][1].type === 'strikethroughSequenceTemporary') {
|
|
events[index][1].type = types.data
|
|
}
|
|
}
|
|
|
|
return events
|
|
}
|
|
|
|
/**
|
|
* @this {TokenizeContext}
|
|
* @type {Tokenizer}
|
|
*/
|
|
function tokenizeStrikethrough(effects, ok, nok) {
|
|
const previous = this.previous
|
|
const events = this.events
|
|
let size = 0
|
|
|
|
return start
|
|
|
|
/** @type {State} */
|
|
function start(code) {
|
|
assert(code === codes.tilde, 'expected `~`')
|
|
|
|
if (
|
|
previous === codes.tilde &&
|
|
events[events.length - 1][1].type !== types.characterEscape
|
|
) {
|
|
return nok(code)
|
|
}
|
|
|
|
effects.enter('strikethroughSequenceTemporary')
|
|
return more(code)
|
|
}
|
|
|
|
/** @type {State} */
|
|
function more(code) {
|
|
const before = classifyCharacter(previous)
|
|
|
|
if (code === codes.tilde) {
|
|
// If this is the third marker, exit.
|
|
if (size > 1) return nok(code)
|
|
effects.consume(code)
|
|
size++
|
|
return more
|
|
}
|
|
|
|
if (size < 2 && !single) return nok(code)
|
|
const token = effects.exit('strikethroughSequenceTemporary')
|
|
const after = classifyCharacter(code)
|
|
token._open =
|
|
!after || (after === constants.attentionSideAfter && Boolean(before))
|
|
token._close =
|
|
!before || (before === constants.attentionSideAfter && Boolean(after))
|
|
return ok(code)
|
|
}
|
|
}
|
|
}
|