mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-10-03 14:19:38 +02:00
Compare commits
5 Commits
fix-update
...
antler_ng_
Author | SHA1 | Date | |
---|---|---|---|
![]() |
32896b8020 | ||
![]() |
e344c81557 | ||
![]() |
54b8f6aec3 | ||
![]() |
42d50fa2f5 | ||
![]() |
9b13785674 |
@@ -1,5 +0,0 @@
|
|||||||
---
|
|
||||||
'mermaid': patch
|
|
||||||
---
|
|
||||||
|
|
||||||
fix: Resolve gantt chart crash due to invalid array length
|
|
2
.github/workflows/e2e-timings.yml
vendored
2
.github/workflows/e2e-timings.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
|||||||
echo "EOF" >> $GITHUB_OUTPUT
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Commit and create pull request
|
- name: Commit and create pull request
|
||||||
uses: peter-evans/create-pull-request@915d841dae6a4f191bb78faf61a257411d7be4d2
|
uses: peter-evans/create-pull-request@18e469570b1cf0dfc11d60ec121099f8ff3e617a
|
||||||
with:
|
with:
|
||||||
add-paths: |
|
add-paths: |
|
||||||
cypress/timings.json
|
cypress/timings.json
|
||||||
|
BIN
antlr-4.13.1-complete.jar
Normal file
BIN
antlr-4.13.1-complete.jar
Normal file
Binary file not shown.
BIN
antlr-4.13.2-complete.jar
Normal file
BIN
antlr-4.13.2-complete.jar
Normal file
Binary file not shown.
@@ -803,34 +803,4 @@ describe('Gantt diagram', () => {
|
|||||||
{}
|
{}
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
it('should handle numeric timestamps with dateFormat x', () => {
|
|
||||||
imgSnapshotTest(
|
|
||||||
`
|
|
||||||
gantt
|
|
||||||
title Process time profile (ms)
|
|
||||||
dateFormat x
|
|
||||||
axisFormat %L
|
|
||||||
tickInterval 250millisecond
|
|
||||||
|
|
||||||
section Pipeline
|
|
||||||
Parse JSON p1: 000, 120
|
|
||||||
`,
|
|
||||||
{}
|
|
||||||
);
|
|
||||||
});
|
|
||||||
it('should handle numeric timestamps with dateFormat X', () => {
|
|
||||||
imgSnapshotTest(
|
|
||||||
`
|
|
||||||
gantt
|
|
||||||
title Process time profile (ms)
|
|
||||||
dateFormat X
|
|
||||||
axisFormat %L
|
|
||||||
tickInterval 250millisecond
|
|
||||||
|
|
||||||
section Pipeline
|
|
||||||
Parse JSON p1: 000, 120
|
|
||||||
`,
|
|
||||||
{}
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
@@ -38,5 +38,3 @@ Each user journey is split into sections, these describe the part of the task
|
|||||||
the user is trying to complete.
|
the user is trying to complete.
|
||||||
|
|
||||||
Tasks syntax is `Task name: <score>: <comma separated list of actors>`
|
Tasks syntax is `Task name: <score>: <comma separated list of actors>`
|
||||||
|
|
||||||
Score is a number between 1 and 5, inclusive.
|
|
||||||
|
@@ -3,7 +3,6 @@
|
|||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"description": "Mermaid examples package",
|
"description": "Mermaid examples package",
|
||||||
"author": "Sidharth Vinod",
|
"author": "Sidharth Vinod",
|
||||||
"license": "MIT",
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"module": "./dist/mermaid-examples.core.mjs",
|
"module": "./dist/mermaid-examples.core.mjs",
|
||||||
"types": "./dist/mermaid.d.ts",
|
"types": "./dist/mermaid.d.ts",
|
||||||
|
@@ -48,6 +48,10 @@
|
|||||||
"types:build-config": "tsx scripts/create-types-from-json-schema.mts",
|
"types:build-config": "tsx scripts/create-types-from-json-schema.mts",
|
||||||
"types:verify-config": "tsx scripts/create-types-from-json-schema.mts --verify",
|
"types:verify-config": "tsx scripts/create-types-from-json-schema.mts --verify",
|
||||||
"checkCircle": "npx madge --circular ./src",
|
"checkCircle": "npx madge --circular ./src",
|
||||||
|
"antlr:sequence:clean": "rimraf src/diagrams/sequence/parser/antlr/generated",
|
||||||
|
"antlr:sequence": "pnpm run antlr:sequence:clean && antlr4ng -Dlanguage=TypeScript -Xexact-output-dir -o src/diagrams/sequence/parser/antlr/generated src/diagrams/sequence/parser/antlr/SequenceLexer.g4 src/diagrams/sequence/parser/antlr/SequenceParser.g4",
|
||||||
|
"antlr:class:clean": "rimraf src/diagrams/class/parser/antlr/generated",
|
||||||
|
"antlr:class": "pnpm run antlr:class:clean && antlr4ng -Dlanguage=TypeScript -Xexact-output-dir -o src/diagrams/class/parser/antlr/generated src/diagrams/class/parser/antlr/ClassLexer.g4 src/diagrams/class/parser/antlr/ClassParser.g4",
|
||||||
"prepublishOnly": "pnpm docs:verify-version"
|
"prepublishOnly": "pnpm docs:verify-version"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
@@ -71,6 +75,8 @@
|
|||||||
"@iconify/utils": "^3.0.1",
|
"@iconify/utils": "^3.0.1",
|
||||||
"@mermaid-js/parser": "workspace:^",
|
"@mermaid-js/parser": "workspace:^",
|
||||||
"@types/d3": "^7.4.3",
|
"@types/d3": "^7.4.3",
|
||||||
|
"antlr-ng": "^1.0.10",
|
||||||
|
"antlr4ng": "^3.0.16",
|
||||||
"cytoscape": "^3.29.3",
|
"cytoscape": "^3.29.3",
|
||||||
"cytoscape-cose-bilkent": "^4.1.0",
|
"cytoscape-cose-bilkent": "^4.1.0",
|
||||||
"cytoscape-fcose": "^2.2.0",
|
"cytoscape-fcose": "^2.2.0",
|
||||||
@@ -129,7 +135,8 @@
|
|||||||
"unist-util-flatmap": "^1.0.0",
|
"unist-util-flatmap": "^1.0.0",
|
||||||
"unist-util-visit": "^5.0.0",
|
"unist-util-visit": "^5.0.0",
|
||||||
"vitepress": "^1.0.2",
|
"vitepress": "^1.0.2",
|
||||||
"vitepress-plugin-search": "1.0.4-alpha.22"
|
"vitepress-plugin-search": "1.0.4-alpha.22",
|
||||||
|
"antlr4ng-cli": "^2.0.0"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"dist/",
|
"dist/",
|
||||||
|
147
packages/mermaid/src/diagrams/class/ANTLR_MIGRATION.md
Normal file
147
packages/mermaid/src/diagrams/class/ANTLR_MIGRATION.md
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
## ANTLR migration plan for Class Diagrams (parity with Sequence)
|
||||||
|
|
||||||
|
This guide summarizes how to migrate the Class diagram parser from Jison to ANTLR (antlr4ng), following the approach used for Sequence diagrams. The goal is full feature parity and 100% test pass rate, while keeping the Jison implementation as the reference until the ANTLR path is green.
|
||||||
|
|
||||||
|
### Objectives
|
||||||
|
|
||||||
|
- Keep the existing Jison parser as the authoritative reference until parity is achieved
|
||||||
|
- Add an ANTLR parser behind a runtime flag (`USE_ANTLR_PARSER=true`), mirroring Sequence
|
||||||
|
- Achieve 100% test compatibility with the current Jison behavior, including error cases
|
||||||
|
- Keep the public DB and rendering contracts unchanged
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1) Prep and references
|
||||||
|
|
||||||
|
- Use the Sequence migration as a template for structure, scripts, and patterns:
|
||||||
|
- antlr4ng grammar files: `SequenceLexer.g4`, `SequenceParser.g4`
|
||||||
|
- wrapper: `antlr-parser.ts` providing a Jison-compatible `parse()` and `yy`
|
||||||
|
- generation script: `pnpm --filter mermaid run antlr:sequence`
|
||||||
|
- For Class diagrams, identify analogous files:
|
||||||
|
- Jison grammar: `packages/mermaid/src/diagrams/class/parser/classDiagram.jison`
|
||||||
|
- DB: `packages/mermaid/src/diagrams/class/classDb.ts`
|
||||||
|
- Tests: `packages/mermaid/src/diagrams/class/classDiagram.spec.js`
|
||||||
|
- Confirm Class diagram features in the Jison grammar and tests: classes, interfaces, enums, relationships (e.g., `--`, `*--`, `o--`, `<|--`, `--|>`), visibility markers (`+`, `-`, `#`, `~`), generics (`<T>`, nested), static/abstract indicators, fields/properties, methods (with parameters and return types), stereotypes (`<< >>`), notes, direction, style/config lines, and titles/accessibility lines if supported.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2) Create ANTLR grammars
|
||||||
|
|
||||||
|
- Create `ClassLexer.g4` and `ClassParser.g4` under `packages/mermaid/src/diagrams/class/parser/antlr/`
|
||||||
|
- Lexer design guidelines (mirror Sequence approach):
|
||||||
|
- Implement stateful lexing with modes to replicate Jison behavior (e.g., default, line/rest-of-line, config/title/acc modes if used)
|
||||||
|
- Ensure token precedence resolves conflicts between relation arrows and generics (`<|--` vs `<T>`). Prefer longest-match arrow tokens and handle generics in parser context
|
||||||
|
- Accept identifiers that include special characters that Jison allowed (quotes, underscores, digits, unicode as applicable)
|
||||||
|
- Provide tokens for core keywords and symbols: `class`, `interface`, `enum`, relationship operators, visibility markers, `<< >>` stereotypes, `{ }` blocks, `:` type separators, `,` parameter separators, `[` `]` arrays, `<` `>` generics
|
||||||
|
- Reuse common tokens shared across diagrams where appropriate (e.g., `TITLE`, `ACC_...`) if Class supports them
|
||||||
|
- Parser design guidelines:
|
||||||
|
- Follow the Jison grammar structure closely to minimize semantic drift
|
||||||
|
- Allow the final statement in the file to omit a trailing newline (to avoid EOF vs NEWLINE mismatches)
|
||||||
|
- Keep non-ambiguous rules for:
|
||||||
|
- Class declarations and bodies (members split into fields/properties vs methods)
|
||||||
|
- Modifiers (visibility, static, abstract)
|
||||||
|
- Types (simple, namespaced, generic with nesting)
|
||||||
|
- Relationships with labels (left->right/right->left forms) and multiplicities
|
||||||
|
- Stereotypes and notes
|
||||||
|
- Optional global lines (title, accTitle, accDescr) if supported by class diagrams
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3) Add the wrapper and flag switch
|
||||||
|
|
||||||
|
- Add `packages/mermaid/src/diagrams/class/parser/antlr/antlr-parser.ts`:
|
||||||
|
- Export an object `{ parse, parser, yy }` that mirrors the Jison parser shape
|
||||||
|
- `parse(input)` should:
|
||||||
|
- `this.yy.clear()` to reset DB (same as Sequence)
|
||||||
|
- Build ANTLR's lexer/parser, set `BailErrorStrategy` to fail-fast on syntax errors
|
||||||
|
- Walk the tree with a listener that calls classDb methods
|
||||||
|
- Implement no-op bodies for `visitTerminal`, `visitErrorNode`, `enterEveryRule`, `exitEveryRule` (required by ParseTreeWalker)
|
||||||
|
- Avoid `require()`; import from `antlr4ng`
|
||||||
|
- Use minimal `any`; when casting is unavoidable, add clear comments
|
||||||
|
- Add `packages/mermaid/src/diagrams/class/parser/classParser.ts` similar to Sequence `sequenceParser.ts`:
|
||||||
|
- Import both the Jison parser and the ANTLR wrapper
|
||||||
|
- Gate on `process.env.USE_ANTLR_PARSER === 'true'`
|
||||||
|
- Normalize whitespace if Jison relies on specific newlines (keep parity with Sequence patterns)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4) Implement the listener (semantic actions)
|
||||||
|
|
||||||
|
Map parsed constructs to classDb calls. Typical handlers include:
|
||||||
|
|
||||||
|
- Class-like declarations
|
||||||
|
- `db.addClass(id, { type: 'class'|'interface'|'enum', ... })`
|
||||||
|
- `db.addClassMember(id, member)` for fields/properties/methods (capture visibility, static/abstract, types, params)
|
||||||
|
- Stereotypes, annotations, notes: `db.addAnnotation(...)`, `db.addNote(...)` if applicable
|
||||||
|
- Relationships
|
||||||
|
- Parse arrow/operator to relation type; map to db constants (composition/aggregation/inheritance/realization/association)
|
||||||
|
- `db.addRelation(lhs, rhs, { type, label, multiplicity })`
|
||||||
|
- Title/Accessibility (if supported in Class diagrams)
|
||||||
|
- `db.setDiagramTitle(...)`, `db.setAccTitle(...)`, `db.setAccDescription(...)`
|
||||||
|
- Styles/Directives/Config lines as supported by the Jison grammar
|
||||||
|
|
||||||
|
Error handling:
|
||||||
|
|
||||||
|
- Use BailErrorStrategy; let invalid constructs throw where Jison tests expect failure
|
||||||
|
- For robustness parity, only swallow exceptions in places where Jison tolerated malformed content without aborting
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5) Scripts and generation
|
||||||
|
|
||||||
|
- Add package scripts similar to Sequence in `packages/mermaid/package.json`:
|
||||||
|
- `antlr:class:clean`: remove generated TS
|
||||||
|
- `antlr:class`: run antlr4ng to generate TS into `parser/antlr/generated`
|
||||||
|
- Example command (once scripts exist):
|
||||||
|
- `pnpm --filter mermaid run antlr:class`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6) Tests (Vitest)
|
||||||
|
|
||||||
|
- Run existing Class tests with the ANTLR parser enabled:
|
||||||
|
- `USE_ANTLR_PARSER=true pnpm vitest packages/mermaid/src/diagrams/class/classDiagram.spec.js --run`
|
||||||
|
- Start by making a small focused subset pass, then expand to the full suite
|
||||||
|
- Add targeted tests for areas where the ANTLR grammar needs extra coverage (e.g., nested generics, tricky arrow/operator precedence, stereotypes, notes)
|
||||||
|
- Keep test expectations identical to Jison’s behavior; only adjust if Jison’s behavior was explicitly flaky and already tolerated in the repo
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7) Linting and quality
|
||||||
|
|
||||||
|
- Satisfy ESLint rules enforced in the repo:
|
||||||
|
- Prefer imports over `require()`; no empty methods, avoid untyped `any` where reasonable
|
||||||
|
- If `@ts-ignore` is necessary, include a descriptive reason (≥10 chars)
|
||||||
|
- Provide minimal types for listener contexts where helpful; keep casts localized and commented
|
||||||
|
- Prefix diagnostic debug logs with the project’s preferred prefix if temporary logging is needed (and clean up before commit)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8) Common pitfalls and tips
|
||||||
|
|
||||||
|
- NEWLINE vs EOF: allow the last statement without a trailing newline to prevent InputMismatch
|
||||||
|
- Token conflicts: order matters; ensure relationship operators (e.g., `<|--`, `--|>`, `*--`, `o--`) win over generic `<`/`>` in the right contexts
|
||||||
|
- Identifiers: match Jison’s permissiveness (quoted names, digits where allowed) and avoid over-greedy tokens that eat operators
|
||||||
|
- Listener resilience: ensure classes and endpoints exist before adding relations (create implicitly if Jison did so)
|
||||||
|
- Error parity: do not swallow exceptions for cases where tests expect failure
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9) Rollout checklist
|
||||||
|
|
||||||
|
- [ ] Grammar compiles and generated files are committed
|
||||||
|
- [ ] `USE_ANTLR_PARSER=true` passes all Class diagram tests
|
||||||
|
- [ ] Sequence and other diagram suites remain green
|
||||||
|
- [ ] No new ESLint errors; warnings minimized
|
||||||
|
- [ ] PR includes notes on parity and how to run the ANTLR tests
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10) Quick command reference
|
||||||
|
|
||||||
|
- Generate ANTLR targets (after adding scripts):
|
||||||
|
- `pnpm --filter mermaid run antlr:class`
|
||||||
|
- Run Class tests with ANTLR parser:
|
||||||
|
- `USE_ANTLR_PARSER=true pnpm vitest packages/mermaid/src/diagrams/class/classDiagram.spec.js --run`
|
||||||
|
- Run a single test:
|
||||||
|
- `USE_ANTLR_PARSER=true pnpm vitest packages/mermaid/src/diagrams/class/classDiagram.spec.js -t "some test name" --run`
|
@@ -1,4 +1,4 @@
|
|||||||
import { parser } from './parser/classDiagram.jison';
|
import { parser } from './parser/classParser.ts';
|
||||||
import { ClassDB } from './classDb.js';
|
import { ClassDB } from './classDb.js';
|
||||||
|
|
||||||
describe('class diagram, ', function () {
|
describe('class diagram, ', function () {
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import type { DiagramDefinition } from '../../diagram-api/types.js';
|
import type { DiagramDefinition } from '../../diagram-api/types.js';
|
||||||
// @ts-ignore: JISON doesn't support types
|
// @ts-ignore: JISON doesn't support types
|
||||||
import parser from './parser/classDiagram.jison';
|
import parser from './parser/classParser.ts';
|
||||||
import { ClassDB } from './classDb.js';
|
import { ClassDB } from './classDb.js';
|
||||||
import styles from './styles.js';
|
import styles from './styles.js';
|
||||||
import renderer from './classRenderer-v3-unified.js';
|
import renderer from './classRenderer-v3-unified.js';
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
/* eslint-disable @typescript-eslint/unbound-method -- Broken for Vitest mocks, see https://github.com/vitest-dev/eslint-plugin-vitest/pull/286 */
|
/* eslint-disable @typescript-eslint/unbound-method -- Broken for Vitest mocks, see https://github.com/vitest-dev/eslint-plugin-vitest/pull/286 */
|
||||||
// @ts-expect-error Jison doesn't export types
|
// @ts-expect-error Parser exposes mutable yy property without typings
|
||||||
import { parser } from './parser/classDiagram.jison';
|
import { parser } from './parser/classParser.ts';
|
||||||
import { ClassDB } from './classDb.js';
|
import { ClassDB } from './classDb.js';
|
||||||
import { vi, describe, it, expect } from 'vitest';
|
import { vi, describe, it, expect } from 'vitest';
|
||||||
import type { ClassMap, NamespaceNode } from './classTypes.js';
|
import type { ClassMap, NamespaceNode } from './classTypes.js';
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import type { DiagramDefinition } from '../../diagram-api/types.js';
|
import type { DiagramDefinition } from '../../diagram-api/types.js';
|
||||||
// @ts-ignore: JISON doesn't support types
|
// @ts-ignore: JISON doesn't support types
|
||||||
import parser from './parser/classDiagram.jison';
|
import parser from './parser/classParser.ts';
|
||||||
import { ClassDB } from './classDb.js';
|
import { ClassDB } from './classDb.js';
|
||||||
import styles from './styles.js';
|
import styles from './styles.js';
|
||||||
import renderer from './classRenderer-v3-unified.js';
|
import renderer from './classRenderer-v3-unified.js';
|
||||||
|
229
packages/mermaid/src/diagrams/class/parser/antlr/ClassLexer.g4
Normal file
229
packages/mermaid/src/diagrams/class/parser/antlr/ClassLexer.g4
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
lexer grammar ClassLexer;
|
||||||
|
|
||||||
|
tokens {
|
||||||
|
ACC_TITLE_VALUE,
|
||||||
|
ACC_DESCR_VALUE,
|
||||||
|
ACC_DESCR_MULTILINE_VALUE,
|
||||||
|
ACC_DESCR_MULTI_END,
|
||||||
|
OPEN_IN_STRUCT,
|
||||||
|
MEMBER
|
||||||
|
}
|
||||||
|
|
||||||
|
@members {
|
||||||
|
private pendingClassBody = false;
|
||||||
|
private pendingNamespaceBody = false;
|
||||||
|
|
||||||
|
private clearPendingScopes(): void {
|
||||||
|
this.pendingClassBody = false;
|
||||||
|
this.pendingNamespaceBody = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Common fragments
|
||||||
|
fragment WS_INLINE: [ \t]+;
|
||||||
|
fragment DIGIT: [0-9];
|
||||||
|
fragment LETTER: [A-Za-z_];
|
||||||
|
fragment IDENT_PART: [A-Za-z0-9_\-];
|
||||||
|
fragment NOT_DQUOTE: ~[""];
|
||||||
|
|
||||||
|
|
||||||
|
// Comments and whitespace
|
||||||
|
COMMENT: '%%' ~[\r\n]* -> skip;
|
||||||
|
NEWLINE: ('\r'? '\n')+ { this.clearPendingScopes(); };
|
||||||
|
WS: [ \t]+ -> skip;
|
||||||
|
|
||||||
|
// Diagram title declaration
|
||||||
|
CLASS_DIAGRAM_V2: 'classDiagram-v2' -> type(CLASS_DIAGRAM);
|
||||||
|
CLASS_DIAGRAM: 'classDiagram';
|
||||||
|
|
||||||
|
// Directions
|
||||||
|
DIRECTION_TB: 'direction' WS_INLINE+ 'TB';
|
||||||
|
DIRECTION_BT: 'direction' WS_INLINE+ 'BT';
|
||||||
|
DIRECTION_LR: 'direction' WS_INLINE+ 'LR';
|
||||||
|
DIRECTION_RL: 'direction' WS_INLINE+ 'RL';
|
||||||
|
|
||||||
|
// Accessibility tokens
|
||||||
|
ACC_TITLE: 'accTitle' WS_INLINE* ':' WS_INLINE* -> pushMode(ACC_TITLE_MODE);
|
||||||
|
ACC_DESCR: 'accDescr' WS_INLINE* ':' WS_INLINE* -> pushMode(ACC_DESCR_MODE);
|
||||||
|
ACC_DESCR_MULTI: 'accDescr' WS_INLINE* '{' -> pushMode(ACC_DESCR_MULTILINE_MODE);
|
||||||
|
|
||||||
|
// Statements captured as raw lines for semantic handling in listener
|
||||||
|
STYLE_LINE: 'style' WS_INLINE+ ~[\r\n]*;
|
||||||
|
CLASSDEF_LINE: 'classDef' ~[\r\n]*;
|
||||||
|
CSSCLASS_LINE: 'cssClass' ~[\r\n]*;
|
||||||
|
CALLBACK_LINE: 'callback' ~[\r\n]*;
|
||||||
|
CLICK_LINE: 'click' ~[\r\n]*;
|
||||||
|
LINK_LINE: 'link' ~[\r\n]*;
|
||||||
|
CALL_LINE: 'call' ~[\r\n]*;
|
||||||
|
|
||||||
|
// Notes
|
||||||
|
NOTE_FOR: 'note' WS_INLINE+ 'for';
|
||||||
|
NOTE: 'note';
|
||||||
|
|
||||||
|
// Keywords that affect block handling
|
||||||
|
CLASS: 'class' { this.pendingClassBody = true; };
|
||||||
|
NAMESPACE: 'namespace' { this.pendingNamespaceBody = true; };
|
||||||
|
|
||||||
|
// Structural tokens
|
||||||
|
STYLE_SEPARATOR: ':::';
|
||||||
|
ANNOTATION_START: '<<';
|
||||||
|
ANNOTATION_END: '>>';
|
||||||
|
LBRACKET: '[';
|
||||||
|
RBRACKET: ']';
|
||||||
|
COMMA: ',';
|
||||||
|
DOT: '.';
|
||||||
|
EDGE_STATE: '[*]';
|
||||||
|
GENERIC: '~' (~[~\r\n])+ '~';
|
||||||
|
// Match strings without escape semantics to mirror Jison behavior
|
||||||
|
// Allow any chars except an unescaped closing double-quote; permit newlines
|
||||||
|
STRING: '"' NOT_DQUOTE* '"';
|
||||||
|
BACKTICK_ID: '`' (~[`])* '`';
|
||||||
|
LABEL: ':' (~[':\r\n;])*;
|
||||||
|
|
||||||
|
RELATION_ARROW
|
||||||
|
: (LEFT_HEAD)? LINE_BODY (RIGHT_HEAD)?
|
||||||
|
;
|
||||||
|
fragment LEFT_HEAD
|
||||||
|
: '<|'
|
||||||
|
| '<'
|
||||||
|
| 'o'
|
||||||
|
| '*'
|
||||||
|
| '()'
|
||||||
|
;
|
||||||
|
fragment RIGHT_HEAD
|
||||||
|
: '|>'
|
||||||
|
| '>'
|
||||||
|
| 'o'
|
||||||
|
| '*'
|
||||||
|
| '()'
|
||||||
|
;
|
||||||
|
fragment LINE_BODY
|
||||||
|
: '--'
|
||||||
|
| '..'
|
||||||
|
;
|
||||||
|
|
||||||
|
// Identifiers and numbers
|
||||||
|
IDENTIFIER
|
||||||
|
: (LETTER | DIGIT) IDENT_PART*
|
||||||
|
;
|
||||||
|
NUMBER: DIGIT+;
|
||||||
|
PLUS: '+';
|
||||||
|
MINUS: '-';
|
||||||
|
HASH: '#';
|
||||||
|
PERCENT: '%';
|
||||||
|
STAR: '*';
|
||||||
|
SLASH: '/';
|
||||||
|
LPAREN: '(';
|
||||||
|
RPAREN: ')';
|
||||||
|
|
||||||
|
// Structural braces with mode management
|
||||||
|
STRUCT_START
|
||||||
|
: '{'
|
||||||
|
{
|
||||||
|
if (this.pendingClassBody) {
|
||||||
|
this.pendingClassBody = false;
|
||||||
|
this.pushMode(ClassLexer.CLASS_BODY);
|
||||||
|
} else {
|
||||||
|
if (this.pendingNamespaceBody) {
|
||||||
|
this.pendingNamespaceBody = false;
|
||||||
|
}
|
||||||
|
this.pushMode(ClassLexer.BLOCK);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
STRUCT_END: '}' { /* default mode only */ };
|
||||||
|
|
||||||
|
// Default fallback (should not normally trigger)
|
||||||
|
UNKNOWN: .;
|
||||||
|
|
||||||
|
// ===== Mode: ACC_TITLE =====
|
||||||
|
mode ACC_TITLE_MODE;
|
||||||
|
ACC_TITLE_MODE_WS: [ \t]+ -> skip;
|
||||||
|
ACC_TITLE_VALUE: ~[\r\n;#]+ -> type(ACC_TITLE_VALUE), popMode;
|
||||||
|
ACC_TITLE_MODE_NEWLINE: ('\r'? '\n')+ { this.popMode(); this.clearPendingScopes(); } -> type(NEWLINE);
|
||||||
|
|
||||||
|
// ===== Mode: ACC_DESCR =====
|
||||||
|
mode ACC_DESCR_MODE;
|
||||||
|
ACC_DESCR_MODE_WS: [ \t]+ -> skip;
|
||||||
|
ACC_DESCR_VALUE: ~[\r\n;#]+ -> type(ACC_DESCR_VALUE), popMode;
|
||||||
|
ACC_DESCR_MODE_NEWLINE: ('\r'? '\n')+ { this.popMode(); this.clearPendingScopes(); } -> type(NEWLINE);
|
||||||
|
|
||||||
|
// ===== Mode: ACC_DESCR_MULTILINE =====
|
||||||
|
mode ACC_DESCR_MULTILINE_MODE;
|
||||||
|
ACC_DESCR_MULTILINE_VALUE: (~[}])+ -> type(ACC_DESCR_MULTILINE_VALUE);
|
||||||
|
ACC_DESCR_MULTI_END: '}' -> popMode, type(ACC_DESCR_MULTI_END);
|
||||||
|
|
||||||
|
// ===== Mode: CLASS_BODY =====
|
||||||
|
mode CLASS_BODY;
|
||||||
|
CLASS_BODY_WS: [ \t]+ -> skip;
|
||||||
|
CLASS_BODY_COMMENT: '%%' ~[\r\n]* -> skip;
|
||||||
|
CLASS_BODY_NEWLINE: ('\r'? '\n')+ -> type(NEWLINE);
|
||||||
|
CLASS_BODY_STRUCT_END: '}' -> popMode, type(STRUCT_END);
|
||||||
|
CLASS_BODY_OPEN_BRACE: '{' -> type(OPEN_IN_STRUCT);
|
||||||
|
CLASS_BODY_EDGE_STATE: '[*]' -> type(EDGE_STATE);
|
||||||
|
CLASS_BODY_MEMBER: ~[{}\r\n]+ -> type(MEMBER);
|
||||||
|
|
||||||
|
// ===== Mode: BLOCK =====
|
||||||
|
mode BLOCK;
|
||||||
|
BLOCK_WS: [ \t]+ -> skip;
|
||||||
|
BLOCK_COMMENT: '%%' ~[\r\n]* -> skip;
|
||||||
|
BLOCK_NEWLINE: ('\r'? '\n')+ -> type(NEWLINE);
|
||||||
|
BLOCK_CLASS: 'class' { this.pendingClassBody = true; } -> type(CLASS);
|
||||||
|
BLOCK_NAMESPACE: 'namespace' { this.pendingNamespaceBody = true; } -> type(NAMESPACE);
|
||||||
|
BLOCK_STYLE_LINE: 'style' WS_INLINE+ ~[\r\n]* -> type(STYLE_LINE);
|
||||||
|
BLOCK_CLASSDEF_LINE: 'classDef' ~[\r\n]* -> type(CLASSDEF_LINE);
|
||||||
|
BLOCK_CSSCLASS_LINE: 'cssClass' ~[\r\n]* -> type(CSSCLASS_LINE);
|
||||||
|
BLOCK_CALLBACK_LINE: 'callback' ~[\r\n]* -> type(CALLBACK_LINE);
|
||||||
|
BLOCK_CLICK_LINE: 'click' ~[\r\n]* -> type(CLICK_LINE);
|
||||||
|
BLOCK_LINK_LINE: 'link' ~[\r\n]* -> type(LINK_LINE);
|
||||||
|
BLOCK_CALL_LINE: 'call' ~[\r\n]* -> type(CALL_LINE);
|
||||||
|
BLOCK_NOTE_FOR: 'note' WS_INLINE+ 'for' -> type(NOTE_FOR);
|
||||||
|
BLOCK_NOTE: 'note' -> type(NOTE);
|
||||||
|
BLOCK_ACC_TITLE: 'accTitle' WS_INLINE* ':' WS_INLINE* -> type(ACC_TITLE), pushMode(ACC_TITLE_MODE);
|
||||||
|
BLOCK_ACC_DESCR: 'accDescr' WS_INLINE* ':' WS_INLINE* -> type(ACC_DESCR), pushMode(ACC_DESCR_MODE);
|
||||||
|
BLOCK_ACC_DESCR_MULTI: 'accDescr' WS_INLINE* '{' -> type(ACC_DESCR_MULTI), pushMode(ACC_DESCR_MULTILINE_MODE);
|
||||||
|
BLOCK_STRUCT_START
|
||||||
|
: '{'
|
||||||
|
{
|
||||||
|
if (this.pendingClassBody) {
|
||||||
|
this.pendingClassBody = false;
|
||||||
|
this.pushMode(ClassLexer.CLASS_BODY);
|
||||||
|
} else {
|
||||||
|
if (this.pendingNamespaceBody) {
|
||||||
|
this.pendingNamespaceBody = false;
|
||||||
|
}
|
||||||
|
this.pushMode(ClassLexer.BLOCK);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
-> type(STRUCT_START)
|
||||||
|
;
|
||||||
|
BLOCK_STRUCT_END: '}' -> popMode, type(STRUCT_END);
|
||||||
|
BLOCK_STYLE_SEPARATOR: ':::' -> type(STYLE_SEPARATOR);
|
||||||
|
BLOCK_ANNOTATION_START: '<<' -> type(ANNOTATION_START);
|
||||||
|
BLOCK_ANNOTATION_END: '>>' -> type(ANNOTATION_END);
|
||||||
|
BLOCK_LBRACKET: '[' -> type(LBRACKET);
|
||||||
|
BLOCK_RBRACKET: ']' -> type(RBRACKET);
|
||||||
|
BLOCK_COMMA: ',' -> type(COMMA);
|
||||||
|
BLOCK_DOT: '.' -> type(DOT);
|
||||||
|
BLOCK_EDGE_STATE: '[*]' -> type(EDGE_STATE);
|
||||||
|
BLOCK_GENERIC: '~' (~[~\r\n])+ '~' -> type(GENERIC);
|
||||||
|
// Mirror Jison: no escape semantics inside strings in BLOCK mode as well
|
||||||
|
BLOCK_STRING: '"' NOT_DQUOTE* '"' -> type(STRING);
|
||||||
|
BLOCK_BACKTICK_ID: '`' (~[`])* '`' -> type(BACKTICK_ID);
|
||||||
|
BLOCK_LABEL: ':' (~[':\r\n;])* -> type(LABEL);
|
||||||
|
BLOCK_RELATION_ARROW
|
||||||
|
: (LEFT_HEAD)? LINE_BODY (RIGHT_HEAD)?
|
||||||
|
-> type(RELATION_ARROW)
|
||||||
|
;
|
||||||
|
BLOCK_IDENTIFIER: (LETTER | DIGIT) IDENT_PART* -> type(IDENTIFIER);
|
||||||
|
BLOCK_NUMBER: DIGIT+ -> type(NUMBER);
|
||||||
|
BLOCK_PLUS: '+' -> type(PLUS);
|
||||||
|
BLOCK_MINUS: '-' -> type(MINUS);
|
||||||
|
BLOCK_HASH: '#' -> type(HASH);
|
||||||
|
BLOCK_PERCENT: '%' -> type(PERCENT);
|
||||||
|
BLOCK_STAR: '*' -> type(STAR);
|
||||||
|
BLOCK_SLASH: '/' -> type(SLASH);
|
||||||
|
BLOCK_LPAREN: '(' -> type(LPAREN);
|
||||||
|
BLOCK_RPAREN: ')' -> type(RPAREN);
|
||||||
|
BLOCK_UNKNOWN: . -> type(UNKNOWN);
|
204
packages/mermaid/src/diagrams/class/parser/antlr/ClassParser.g4
Normal file
204
packages/mermaid/src/diagrams/class/parser/antlr/ClassParser.g4
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
parser grammar ClassParser;
|
||||||
|
|
||||||
|
options {
|
||||||
|
tokenVocab = ClassLexer;
|
||||||
|
}
|
||||||
|
|
||||||
|
start
|
||||||
|
: (NEWLINE)* classDiagramSection EOF
|
||||||
|
;
|
||||||
|
|
||||||
|
classDiagramSection
|
||||||
|
: CLASS_DIAGRAM (NEWLINE)+ document
|
||||||
|
;
|
||||||
|
|
||||||
|
document
|
||||||
|
: (line)* statement?
|
||||||
|
;
|
||||||
|
|
||||||
|
line
|
||||||
|
: statement? NEWLINE
|
||||||
|
;
|
||||||
|
|
||||||
|
statement
|
||||||
|
: classStatement
|
||||||
|
| namespaceStatement
|
||||||
|
| relationStatement
|
||||||
|
| noteStatement
|
||||||
|
| annotationStatement
|
||||||
|
| memberStatement
|
||||||
|
| classDefStatement
|
||||||
|
| styleStatement
|
||||||
|
| cssClassStatement
|
||||||
|
| directionStatement
|
||||||
|
| accTitleStatement
|
||||||
|
| accDescrStatement
|
||||||
|
| accDescrMultilineStatement
|
||||||
|
| callbackStatement
|
||||||
|
| clickStatement
|
||||||
|
| linkStatement
|
||||||
|
| callStatement
|
||||||
|
;
|
||||||
|
|
||||||
|
classStatement
|
||||||
|
: classIdentifier classStatementTail?
|
||||||
|
;
|
||||||
|
|
||||||
|
classStatementTail
|
||||||
|
: STRUCT_START classMembers? STRUCT_END
|
||||||
|
| STYLE_SEPARATOR cssClassRef classStatementCssTail?
|
||||||
|
;
|
||||||
|
|
||||||
|
classStatementCssTail
|
||||||
|
: STRUCT_START classMembers? STRUCT_END
|
||||||
|
;
|
||||||
|
|
||||||
|
classIdentifier
|
||||||
|
: CLASS className classLabel?
|
||||||
|
;
|
||||||
|
|
||||||
|
classLabel
|
||||||
|
: LBRACKET stringLiteral RBRACKET
|
||||||
|
;
|
||||||
|
|
||||||
|
cssClassRef
|
||||||
|
: className
|
||||||
|
| IDENTIFIER
|
||||||
|
;
|
||||||
|
|
||||||
|
classMembers
|
||||||
|
: (NEWLINE | classMember)*
|
||||||
|
;
|
||||||
|
|
||||||
|
classMember
|
||||||
|
: MEMBER
|
||||||
|
| EDGE_STATE
|
||||||
|
;
|
||||||
|
|
||||||
|
namespaceStatement
|
||||||
|
: namespaceIdentifier namespaceBlock
|
||||||
|
;
|
||||||
|
|
||||||
|
namespaceIdentifier
|
||||||
|
: NAMESPACE namespaceName
|
||||||
|
;
|
||||||
|
|
||||||
|
namespaceName
|
||||||
|
: className
|
||||||
|
;
|
||||||
|
|
||||||
|
namespaceBlock
|
||||||
|
: STRUCT_START (NEWLINE)* namespaceBody? STRUCT_END
|
||||||
|
;
|
||||||
|
|
||||||
|
namespaceBody
|
||||||
|
: namespaceLine+
|
||||||
|
;
|
||||||
|
|
||||||
|
namespaceLine
|
||||||
|
: (classStatement | namespaceStatement)? NEWLINE
|
||||||
|
| classStatement
|
||||||
|
| namespaceStatement
|
||||||
|
;
|
||||||
|
|
||||||
|
relationStatement
|
||||||
|
: className relation className relationLabel?
|
||||||
|
| className stringLiteral relation className relationLabel?
|
||||||
|
| className relation stringLiteral className relationLabel?
|
||||||
|
| className stringLiteral relation stringLiteral className relationLabel?
|
||||||
|
;
|
||||||
|
|
||||||
|
relation
|
||||||
|
: RELATION_ARROW
|
||||||
|
;
|
||||||
|
|
||||||
|
relationLabel
|
||||||
|
: LABEL
|
||||||
|
;
|
||||||
|
|
||||||
|
noteStatement
|
||||||
|
: NOTE_FOR className noteBody
|
||||||
|
| NOTE noteBody
|
||||||
|
;
|
||||||
|
|
||||||
|
noteBody
|
||||||
|
: stringLiteral
|
||||||
|
;
|
||||||
|
|
||||||
|
annotationStatement
|
||||||
|
: ANNOTATION_START annotationName ANNOTATION_END className
|
||||||
|
;
|
||||||
|
|
||||||
|
annotationName
|
||||||
|
: IDENTIFIER
|
||||||
|
| stringLiteral
|
||||||
|
;
|
||||||
|
|
||||||
|
memberStatement
|
||||||
|
: className LABEL
|
||||||
|
;
|
||||||
|
|
||||||
|
classDefStatement
|
||||||
|
: CLASSDEF_LINE
|
||||||
|
;
|
||||||
|
|
||||||
|
styleStatement
|
||||||
|
: STYLE_LINE
|
||||||
|
;
|
||||||
|
|
||||||
|
cssClassStatement
|
||||||
|
: CSSCLASS_LINE
|
||||||
|
;
|
||||||
|
|
||||||
|
directionStatement
|
||||||
|
: DIRECTION_TB
|
||||||
|
| DIRECTION_BT
|
||||||
|
| DIRECTION_LR
|
||||||
|
| DIRECTION_RL
|
||||||
|
;
|
||||||
|
|
||||||
|
accTitleStatement
|
||||||
|
: ACC_TITLE ACC_TITLE_VALUE
|
||||||
|
;
|
||||||
|
|
||||||
|
accDescrStatement
|
||||||
|
: ACC_DESCR ACC_DESCR_VALUE
|
||||||
|
;
|
||||||
|
|
||||||
|
accDescrMultilineStatement
|
||||||
|
: ACC_DESCR_MULTI ACC_DESCR_MULTILINE_VALUE ACC_DESCR_MULTI_END
|
||||||
|
;
|
||||||
|
|
||||||
|
callbackStatement
|
||||||
|
: CALLBACK_LINE
|
||||||
|
;
|
||||||
|
|
||||||
|
clickStatement
|
||||||
|
: CLICK_LINE
|
||||||
|
;
|
||||||
|
|
||||||
|
linkStatement
|
||||||
|
: LINK_LINE
|
||||||
|
;
|
||||||
|
|
||||||
|
callStatement
|
||||||
|
: CALL_LINE
|
||||||
|
;
|
||||||
|
|
||||||
|
stringLiteral
|
||||||
|
: STRING
|
||||||
|
;
|
||||||
|
|
||||||
|
className
|
||||||
|
: classNameSegment (DOT classNameSegment)*
|
||||||
|
;
|
||||||
|
|
||||||
|
classNameSegment
|
||||||
|
: IDENTIFIER genericSuffix?
|
||||||
|
| BACKTICK_ID genericSuffix?
|
||||||
|
| EDGE_STATE
|
||||||
|
;
|
||||||
|
|
||||||
|
genericSuffix
|
||||||
|
: GENERIC
|
||||||
|
;
|
729
packages/mermaid/src/diagrams/class/parser/antlr/antlr-parser.ts
Normal file
729
packages/mermaid/src/diagrams/class/parser/antlr/antlr-parser.ts
Normal file
@@ -0,0 +1,729 @@
|
|||||||
|
import type { ParseTreeListener } from 'antlr4ng';
|
||||||
|
import {
|
||||||
|
BailErrorStrategy,
|
||||||
|
CharStream,
|
||||||
|
CommonTokenStream,
|
||||||
|
ParseCancellationException,
|
||||||
|
ParseTreeWalker,
|
||||||
|
RecognitionException,
|
||||||
|
type Token,
|
||||||
|
} from 'antlr4ng';
|
||||||
|
import {
|
||||||
|
ClassParser,
|
||||||
|
type ClassIdentifierContext,
|
||||||
|
type ClassMembersContext,
|
||||||
|
type ClassNameContext,
|
||||||
|
type ClassNameSegmentContext,
|
||||||
|
type ClassStatementContext,
|
||||||
|
type NamespaceIdentifierContext,
|
||||||
|
type RelationStatementContext,
|
||||||
|
type NoteStatementContext,
|
||||||
|
type AnnotationStatementContext,
|
||||||
|
type MemberStatementContext,
|
||||||
|
type ClassDefStatementContext,
|
||||||
|
type StyleStatementContext,
|
||||||
|
type CssClassStatementContext,
|
||||||
|
type DirectionStatementContext,
|
||||||
|
type AccTitleStatementContext,
|
||||||
|
type AccDescrStatementContext,
|
||||||
|
type AccDescrMultilineStatementContext,
|
||||||
|
type CallbackStatementContext,
|
||||||
|
type ClickStatementContext,
|
||||||
|
type LinkStatementContext,
|
||||||
|
type CallStatementContext,
|
||||||
|
type CssClassRefContext,
|
||||||
|
type StringLiteralContext,
|
||||||
|
} from './generated/ClassParser.js';
|
||||||
|
import { ClassParserListener } from './generated/ClassParserListener.js';
|
||||||
|
import { ClassLexer } from './generated/ClassLexer.js';
|
||||||
|
|
||||||
|
type ClassDbLike = Record<string, any>;
|
||||||
|
|
||||||
|
const stripQuotes = (value: string): string => {
|
||||||
|
const trimmed = value.trim();
|
||||||
|
if (trimmed.length >= 2 && trimmed.startsWith('"') && trimmed.endsWith('"')) {
|
||||||
|
try {
|
||||||
|
return JSON.parse(trimmed.replace(/\r?\n/g, '\\n')) as string;
|
||||||
|
} catch {
|
||||||
|
return trimmed.slice(1, -1).replace(/\\"/g, '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return trimmed;
|
||||||
|
};
|
||||||
|
|
||||||
|
const stripBackticks = (value: string): string => {
|
||||||
|
const trimmed = value.trim();
|
||||||
|
if (trimmed.length >= 2 && trimmed.startsWith('`') && trimmed.endsWith('`')) {
|
||||||
|
return trimmed.slice(1, -1);
|
||||||
|
}
|
||||||
|
return trimmed;
|
||||||
|
};
|
||||||
|
|
||||||
|
const splitCommaSeparated = (text: string): string[] =>
|
||||||
|
text
|
||||||
|
.split(',')
|
||||||
|
.map((part) => part.trim())
|
||||||
|
.filter((part) => part.length > 0);
|
||||||
|
|
||||||
|
const getStringFromLiteral = (ctx: StringLiteralContext | undefined | null): string | undefined => {
|
||||||
|
if (!ctx) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return stripQuotes(ctx.getText());
|
||||||
|
};
|
||||||
|
|
||||||
|
const getClassNameText = (ctx: ClassNameContext): string => {
|
||||||
|
const segments = ctx.classNameSegment();
|
||||||
|
const parts: string[] = [];
|
||||||
|
for (const segment of segments) {
|
||||||
|
parts.push(getClassNameSegmentText(segment));
|
||||||
|
}
|
||||||
|
return parts.join('.');
|
||||||
|
};
|
||||||
|
|
||||||
|
const getClassNameSegmentText = (ctx: ClassNameSegmentContext): string => {
|
||||||
|
if (ctx.BACKTICK_ID()) {
|
||||||
|
return stripBackticks(ctx.BACKTICK_ID()!.getText());
|
||||||
|
}
|
||||||
|
if (ctx.EDGE_STATE()) {
|
||||||
|
return ctx.EDGE_STATE()!.getText();
|
||||||
|
}
|
||||||
|
return ctx.getText();
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseRelationArrow = (arrow: string, db: ClassDbLike) => {
|
||||||
|
const relation = {
|
||||||
|
type1: 'none',
|
||||||
|
type2: 'none',
|
||||||
|
lineType: db.lineType?.LINE ?? 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
const trimmed = arrow.trim();
|
||||||
|
if (trimmed.includes('..')) {
|
||||||
|
relation.lineType = db.lineType?.DOTTED_LINE ?? relation.lineType;
|
||||||
|
}
|
||||||
|
|
||||||
|
const leftHeads: [string, keyof typeof db.relationType][] = [
|
||||||
|
['<|', 'EXTENSION'],
|
||||||
|
['()', 'LOLLIPOP'],
|
||||||
|
['o', 'AGGREGATION'],
|
||||||
|
['*', 'COMPOSITION'],
|
||||||
|
['<', 'DEPENDENCY'],
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const [prefix, key] of leftHeads) {
|
||||||
|
if (trimmed.startsWith(prefix)) {
|
||||||
|
relation.type1 = db.relationType?.[key] ?? relation.type1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const rightHeads: [string, keyof typeof db.relationType][] = [
|
||||||
|
['|>', 'EXTENSION'],
|
||||||
|
['()', 'LOLLIPOP'],
|
||||||
|
['o', 'AGGREGATION'],
|
||||||
|
['*', 'COMPOSITION'],
|
||||||
|
['>', 'DEPENDENCY'],
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const [suffix, key] of rightHeads) {
|
||||||
|
if (trimmed.endsWith(suffix)) {
|
||||||
|
relation.type2 = db.relationType?.[key] ?? relation.type2;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return relation;
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseStyleLine = (db: ClassDbLike, line: string) => {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
const body = trimmed.slice('style'.length).trim();
|
||||||
|
if (!body) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const match = /^(\S+)(\s+.+)?$/.exec(body);
|
||||||
|
if (!match) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const classId = match[1];
|
||||||
|
const styleBody = match[2]?.trim() ?? '';
|
||||||
|
if (!styleBody) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const styles = splitCommaSeparated(styleBody);
|
||||||
|
if (styles.length) {
|
||||||
|
db.setCssStyle?.(classId, styles);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseClassDefLine = (db: ClassDbLike, line: string) => {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
const body = trimmed.slice('classDef'.length).trim();
|
||||||
|
if (!body) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const match = /^(\S+)(\s+.+)?$/.exec(body);
|
||||||
|
if (!match) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const idPart = match[1];
|
||||||
|
const stylePart = match[2]?.trim() ?? '';
|
||||||
|
const ids = splitCommaSeparated(idPart);
|
||||||
|
const styles = stylePart ? splitCommaSeparated(stylePart) : [];
|
||||||
|
db.defineClass?.(ids, styles);
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseCssClassLine = (db: ClassDbLike, line: string) => {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
const body = trimmed.slice('cssClass'.length).trim();
|
||||||
|
if (!body) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const match = /^("[^"]*"|\S+)\s+(\S+)/.exec(body);
|
||||||
|
if (!match) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const idsRaw = stripQuotes(match[1]);
|
||||||
|
const className = match[2];
|
||||||
|
db.setCssClass?.(idsRaw, className);
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseCallbackLine = (db: ClassDbLike, line: string) => {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
const match = /^callback\s+(\S+)\s+("[^"]*")(?:\s+("[^"]*"))?\s*$/.exec(trimmed);
|
||||||
|
if (!match) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const target = match[1];
|
||||||
|
const fn = stripQuotes(match[2]);
|
||||||
|
const tooltip = match[3] ? stripQuotes(match[3]) : undefined;
|
||||||
|
db.setClickEvent?.(target, fn);
|
||||||
|
if (tooltip) {
|
||||||
|
db.setTooltip?.(target, tooltip);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseClickLine = (db: ClassDbLike, line: string) => {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
const callMatch = /^click\s+(\S+)\s+call\s+([^(]+)\(([^)]*)\)(?:\s+("[^"]*"))?\s*$/.exec(trimmed);
|
||||||
|
if (callMatch) {
|
||||||
|
const target = callMatch[1];
|
||||||
|
const fnName = callMatch[2].trim();
|
||||||
|
const args = callMatch[3].trim();
|
||||||
|
const tooltip = callMatch[4] ? stripQuotes(callMatch[4]) : undefined;
|
||||||
|
if (args.length > 0) {
|
||||||
|
db.setClickEvent?.(target, fnName, args);
|
||||||
|
} else {
|
||||||
|
db.setClickEvent?.(target, fnName);
|
||||||
|
}
|
||||||
|
if (tooltip) {
|
||||||
|
db.setTooltip?.(target, tooltip);
|
||||||
|
}
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
|
||||||
|
const hrefMatch = /^click\s+(\S+)\s+href\s+("[^"]*")(?:\s+("[^"]*"))?(?:\s+(\S+))?\s*$/.exec(
|
||||||
|
trimmed
|
||||||
|
);
|
||||||
|
if (hrefMatch) {
|
||||||
|
const target = hrefMatch[1];
|
||||||
|
const url = stripQuotes(hrefMatch[2]);
|
||||||
|
const tooltip = hrefMatch[3] ? stripQuotes(hrefMatch[3]) : undefined;
|
||||||
|
const targetWindow = hrefMatch[4];
|
||||||
|
if (targetWindow) {
|
||||||
|
db.setLink?.(target, url, targetWindow);
|
||||||
|
} else {
|
||||||
|
db.setLink?.(target, url);
|
||||||
|
}
|
||||||
|
if (tooltip) {
|
||||||
|
db.setTooltip?.(target, tooltip);
|
||||||
|
}
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
|
||||||
|
const genericMatch = /^click\s+(\S+)\s+("[^"]*")(?:\s+("[^"]*"))?\s*$/.exec(trimmed);
|
||||||
|
if (genericMatch) {
|
||||||
|
const target = genericMatch[1];
|
||||||
|
const link = stripQuotes(genericMatch[2]);
|
||||||
|
const tooltip = genericMatch[3] ? stripQuotes(genericMatch[3]) : undefined;
|
||||||
|
db.setLink?.(target, link);
|
||||||
|
if (tooltip) {
|
||||||
|
db.setTooltip?.(target, tooltip);
|
||||||
|
}
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseLinkLine = (db: ClassDbLike, line: string) => {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
const match = /^link\s+(\S+)\s+("[^"]*")(?:\s+("[^"]*"))?(?:\s+(\S+))?\s*$/.exec(trimmed);
|
||||||
|
if (!match) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const target = match[1];
|
||||||
|
const href = stripQuotes(match[2]);
|
||||||
|
const tooltip = match[3] ? stripQuotes(match[3]) : undefined;
|
||||||
|
const targetWindow = match[4];
|
||||||
|
|
||||||
|
if (targetWindow) {
|
||||||
|
db.setLink?.(target, href, targetWindow);
|
||||||
|
} else {
|
||||||
|
db.setLink?.(target, href);
|
||||||
|
}
|
||||||
|
if (tooltip) {
|
||||||
|
db.setTooltip?.(target, tooltip);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseCallLine = (db: ClassDbLike, lastTarget: string | undefined, line: string) => {
|
||||||
|
if (!lastTarget) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const trimmed = line.trim();
|
||||||
|
const match = /^call\s+([^(]+)\(([^)]*)\)\s*("[^"]*")?\s*$/.exec(trimmed);
|
||||||
|
if (!match) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const fnName = match[1].trim();
|
||||||
|
const args = match[2].trim();
|
||||||
|
const tooltip = match[3] ? stripQuotes(match[3]) : undefined;
|
||||||
|
if (args.length > 0) {
|
||||||
|
db.setClickEvent?.(lastTarget, fnName, args);
|
||||||
|
} else {
|
||||||
|
db.setClickEvent?.(lastTarget, fnName);
|
||||||
|
}
|
||||||
|
if (tooltip) {
|
||||||
|
db.setTooltip?.(lastTarget, tooltip);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
interface NamespaceFrame {
|
||||||
|
name?: string;
|
||||||
|
classes: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
class ClassDiagramParseListener extends ClassParserListener implements ParseTreeListener {
|
||||||
|
private readonly classNames = new WeakMap<ClassIdentifierContext, string>();
|
||||||
|
private readonly memberLists = new WeakMap<ClassMembersContext, string[]>();
|
||||||
|
private readonly namespaceStack: NamespaceFrame[] = [];
|
||||||
|
private lastClickTarget?: string;
|
||||||
|
|
||||||
|
constructor(private readonly db: ClassDbLike) {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
private recordClassInCurrentNamespace(name: string) {
|
||||||
|
const current = this.namespaceStack[this.namespaceStack.length - 1];
|
||||||
|
if (current?.name) {
|
||||||
|
current.classes.push(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override enterNamespaceStatement = (): void => {
|
||||||
|
this.namespaceStack.push({ classes: [] });
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitNamespaceIdentifier = (ctx: NamespaceIdentifierContext): void => {
|
||||||
|
const frame = this.namespaceStack[this.namespaceStack.length - 1];
|
||||||
|
if (!frame) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const classNameCtx = ctx.namespaceName()?.className();
|
||||||
|
if (!classNameCtx) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const name = getClassNameText(classNameCtx);
|
||||||
|
frame.name = name;
|
||||||
|
this.db.addNamespace?.(name);
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitNamespaceStatement = (): void => {
|
||||||
|
const frame = this.namespaceStack.pop();
|
||||||
|
if (!frame?.name) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (frame.classes.length) {
|
||||||
|
this.db.addClassesToNamespace?.(frame.name, frame.classes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitClassIdentifier = (ctx: ClassIdentifierContext): void => {
|
||||||
|
const id = getClassNameText(ctx.className());
|
||||||
|
this.classNames.set(ctx, id);
|
||||||
|
this.db.addClass?.(id);
|
||||||
|
this.recordClassInCurrentNamespace(id);
|
||||||
|
|
||||||
|
const labelCtx = ctx.classLabel?.();
|
||||||
|
if (labelCtx) {
|
||||||
|
const label = getStringFromLiteral(labelCtx.stringLiteral());
|
||||||
|
if (label !== undefined) {
|
||||||
|
this.db.setClassLabel?.(id, label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitClassMembers = (ctx: ClassMembersContext): void => {
|
||||||
|
const members: string[] = [];
|
||||||
|
for (const memberCtx of ctx.classMember() ?? []) {
|
||||||
|
if (memberCtx.MEMBER()) {
|
||||||
|
members.push(memberCtx.MEMBER()!.getText());
|
||||||
|
} else if (memberCtx.EDGE_STATE()) {
|
||||||
|
members.push(memberCtx.EDGE_STATE()!.getText());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
members.reverse();
|
||||||
|
this.memberLists.set(ctx, members);
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitClassStatement = (ctx: ClassStatementContext): void => {
|
||||||
|
const identifierCtx = ctx.classIdentifier();
|
||||||
|
if (!identifierCtx) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const classId = this.classNames.get(identifierCtx);
|
||||||
|
if (!classId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const tailCtx = ctx.classStatementTail?.();
|
||||||
|
const cssRefCtx = tailCtx?.cssClassRef?.();
|
||||||
|
if (cssRefCtx) {
|
||||||
|
const cssTarget = this.resolveCssClassRef(cssRefCtx);
|
||||||
|
if (cssTarget) {
|
||||||
|
this.db.setCssClass?.(classId, cssTarget);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const memberContexts: ClassMembersContext[] = [];
|
||||||
|
const cm1 = tailCtx?.classMembers();
|
||||||
|
if (cm1) {
|
||||||
|
memberContexts.push(cm1);
|
||||||
|
}
|
||||||
|
const cssTailCtx = tailCtx?.classStatementCssTail?.();
|
||||||
|
const cm2 = cssTailCtx?.classMembers();
|
||||||
|
if (cm2) {
|
||||||
|
memberContexts.push(cm2);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const membersCtx of memberContexts) {
|
||||||
|
const members = this.memberLists.get(membersCtx) ?? [];
|
||||||
|
if (members.length) {
|
||||||
|
this.db.addMembers?.(classId, members);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private resolveCssClassRef(ctx: CssClassRefContext): string | undefined {
|
||||||
|
if (ctx.className()) {
|
||||||
|
return getClassNameText(ctx.className()!);
|
||||||
|
}
|
||||||
|
if (ctx.IDENTIFIER()) {
|
||||||
|
return ctx.IDENTIFIER()!.getText();
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
override exitRelationStatement = (ctx: RelationStatementContext): void => {
|
||||||
|
const classNames = ctx.className();
|
||||||
|
if (classNames.length < 2) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const id1 = getClassNameText(classNames[0]);
|
||||||
|
const id2 = getClassNameText(classNames[classNames.length - 1]);
|
||||||
|
|
||||||
|
const arrow = ctx.relation()?.getText() ?? '';
|
||||||
|
const relation = parseRelationArrow(arrow, this.db);
|
||||||
|
|
||||||
|
let relationTitle1 = 'none';
|
||||||
|
let relationTitle2 = 'none';
|
||||||
|
const stringLiterals = ctx.stringLiteral();
|
||||||
|
if (stringLiterals.length === 1 && ctx.children) {
|
||||||
|
const stringCtx = stringLiterals[0];
|
||||||
|
const children = ctx.children as unknown[];
|
||||||
|
const stringIndex = children.indexOf(stringCtx);
|
||||||
|
const relationCtx = ctx.relation();
|
||||||
|
const relationIndex = relationCtx ? children.indexOf(relationCtx) : -1;
|
||||||
|
if (relationIndex >= 0 && stringIndex >= 0 && stringIndex < relationIndex) {
|
||||||
|
relationTitle1 = getStringFromLiteral(stringCtx) ?? 'none';
|
||||||
|
} else {
|
||||||
|
relationTitle2 = getStringFromLiteral(stringCtx) ?? 'none';
|
||||||
|
}
|
||||||
|
} else if (stringLiterals.length >= 2) {
|
||||||
|
relationTitle1 = getStringFromLiteral(stringLiterals[0]) ?? 'none';
|
||||||
|
relationTitle2 = getStringFromLiteral(stringLiterals[1]) ?? 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
let title = 'none';
|
||||||
|
const labelCtx = ctx.relationLabel?.();
|
||||||
|
if (labelCtx?.LABEL()) {
|
||||||
|
title = this.db.cleanupLabel?.(labelCtx.LABEL().getText()) ?? 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
this.db.addRelation?.({
|
||||||
|
id1,
|
||||||
|
id2,
|
||||||
|
relation,
|
||||||
|
relationTitle1,
|
||||||
|
relationTitle2,
|
||||||
|
title,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitNoteStatement = (ctx: NoteStatementContext): void => {
|
||||||
|
const noteCtx = ctx.noteBody();
|
||||||
|
const literalText = noteCtx?.getText?.();
|
||||||
|
const text = literalText !== undefined ? stripQuotes(literalText) : undefined;
|
||||||
|
if (text === undefined) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (ctx.NOTE_FOR()) {
|
||||||
|
const className = getClassNameText(ctx.className()!);
|
||||||
|
this.db.addNote?.(text, className);
|
||||||
|
} else {
|
||||||
|
this.db.addNote?.(text);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitAnnotationStatement = (ctx: AnnotationStatementContext): void => {
|
||||||
|
const className = getClassNameText(ctx.className());
|
||||||
|
const nameCtx = ctx.annotationName();
|
||||||
|
let annotation: string | undefined;
|
||||||
|
if (nameCtx.IDENTIFIER()) {
|
||||||
|
annotation = nameCtx.IDENTIFIER()!.getText();
|
||||||
|
} else {
|
||||||
|
annotation = getStringFromLiteral(nameCtx.stringLiteral());
|
||||||
|
}
|
||||||
|
if (annotation !== undefined) {
|
||||||
|
this.db.addAnnotation?.(className, annotation);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitMemberStatement = (ctx: MemberStatementContext): void => {
|
||||||
|
const className = getClassNameText(ctx.className());
|
||||||
|
const labelToken = ctx.LABEL();
|
||||||
|
if (!labelToken) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const cleaned = this.db.cleanupLabel?.(labelToken.getText()) ?? labelToken.getText();
|
||||||
|
this.db.addMember?.(className, cleaned);
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitClassDefStatement = (ctx: ClassDefStatementContext): void => {
|
||||||
|
const token = ctx.CLASSDEF_LINE()?.getSymbol()?.text;
|
||||||
|
if (token) {
|
||||||
|
parseClassDefLine(this.db, token);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitStyleStatement = (ctx: StyleStatementContext): void => {
|
||||||
|
const token = ctx.STYLE_LINE()?.getSymbol()?.text;
|
||||||
|
if (token) {
|
||||||
|
parseStyleLine(this.db, token);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitCssClassStatement = (ctx: CssClassStatementContext): void => {
|
||||||
|
const token = ctx.CSSCLASS_LINE()?.getSymbol()?.text;
|
||||||
|
if (token) {
|
||||||
|
parseCssClassLine(this.db, token);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitDirectionStatement = (ctx: DirectionStatementContext): void => {
|
||||||
|
if (ctx.DIRECTION_TB()) {
|
||||||
|
this.db.setDirection?.('TB');
|
||||||
|
} else if (ctx.DIRECTION_BT()) {
|
||||||
|
this.db.setDirection?.('BT');
|
||||||
|
} else if (ctx.DIRECTION_LR()) {
|
||||||
|
this.db.setDirection?.('LR');
|
||||||
|
} else if (ctx.DIRECTION_RL()) {
|
||||||
|
this.db.setDirection?.('RL');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitAccTitleStatement = (ctx: AccTitleStatementContext): void => {
|
||||||
|
const value = ctx.ACC_TITLE_VALUE()?.getText();
|
||||||
|
if (value !== undefined) {
|
||||||
|
this.db.setAccTitle?.(value.trim());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitAccDescrStatement = (ctx: AccDescrStatementContext): void => {
|
||||||
|
const value = ctx.ACC_DESCR_VALUE()?.getText();
|
||||||
|
if (value !== undefined) {
|
||||||
|
this.db.setAccDescription?.(value.trim());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitAccDescrMultilineStatement = (ctx: AccDescrMultilineStatementContext): void => {
|
||||||
|
const value = ctx.ACC_DESCR_MULTILINE_VALUE()?.getText();
|
||||||
|
if (value !== undefined) {
|
||||||
|
this.db.setAccDescription?.(value.trim());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitCallbackStatement = (ctx: CallbackStatementContext): void => {
|
||||||
|
const token = ctx.CALLBACK_LINE()?.getSymbol()?.text;
|
||||||
|
if (token) {
|
||||||
|
parseCallbackLine(this.db, token);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitClickStatement = (ctx: ClickStatementContext): void => {
|
||||||
|
const token = ctx.CLICK_LINE()?.getSymbol()?.text;
|
||||||
|
if (!token) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const target = parseClickLine(this.db, token);
|
||||||
|
if (target) {
|
||||||
|
this.lastClickTarget = target;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitLinkStatement = (ctx: LinkStatementContext): void => {
|
||||||
|
const token = ctx.LINK_LINE()?.getSymbol()?.text;
|
||||||
|
if (token) {
|
||||||
|
parseLinkLine(this.db, token);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
override exitCallStatement = (ctx: CallStatementContext): void => {
|
||||||
|
const token = ctx.CALL_LINE()?.getSymbol()?.text;
|
||||||
|
if (token) {
|
||||||
|
parseCallLine(this.db, this.lastClickTarget, token);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
class ANTLRClassParser {
|
||||||
|
yy: ClassDbLike | null = null;
|
||||||
|
|
||||||
|
parse(input: string): unknown {
|
||||||
|
if (!this.yy) {
|
||||||
|
throw new Error('Class ANTLR parser missing yy (database).');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.yy.clear?.();
|
||||||
|
|
||||||
|
const inputStream = CharStream.fromString(input);
|
||||||
|
const lexer = new ClassLexer(inputStream);
|
||||||
|
const tokenStream = new CommonTokenStream(lexer);
|
||||||
|
const parser = new ClassParser(tokenStream);
|
||||||
|
|
||||||
|
const anyParser = parser as unknown as {
|
||||||
|
getErrorHandler?: () => unknown;
|
||||||
|
setErrorHandler?: (handler: unknown) => void;
|
||||||
|
errorHandler?: unknown;
|
||||||
|
};
|
||||||
|
const currentHandler = anyParser.getErrorHandler?.() ?? anyParser.errorHandler;
|
||||||
|
const handlerName = (currentHandler as { constructor?: { name?: string } } | undefined)
|
||||||
|
?.constructor?.name;
|
||||||
|
if (!currentHandler || handlerName !== 'BailErrorStrategy') {
|
||||||
|
if (typeof anyParser.setErrorHandler === 'function') {
|
||||||
|
anyParser.setErrorHandler(new BailErrorStrategy());
|
||||||
|
} else {
|
||||||
|
(parser as unknown as { errorHandler: unknown }).errorHandler = new BailErrorStrategy();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const tree = parser.start();
|
||||||
|
const listener = new ClassDiagramParseListener(this.yy);
|
||||||
|
ParseTreeWalker.DEFAULT.walk(listener, tree);
|
||||||
|
return tree;
|
||||||
|
} catch (error) {
|
||||||
|
throw this.transformParseError(error, parser);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private transformParseError(error: unknown, parser: ClassParser): Error {
|
||||||
|
const recognitionError = this.unwrapRecognitionError(error);
|
||||||
|
const offendingToken = this.resolveOffendingToken(recognitionError, parser);
|
||||||
|
const line = offendingToken?.line ?? 0;
|
||||||
|
const column = offendingToken?.column ?? 0;
|
||||||
|
const message = `Parse error on line ${line}: Expecting 'STR'`;
|
||||||
|
const cause = error instanceof Error ? error : undefined;
|
||||||
|
const formatted = cause ? new Error(message, { cause }) : new Error(message);
|
||||||
|
|
||||||
|
Object.assign(formatted, {
|
||||||
|
hash: {
|
||||||
|
line,
|
||||||
|
loc: {
|
||||||
|
first_line: line,
|
||||||
|
last_line: line,
|
||||||
|
first_column: column,
|
||||||
|
last_column: column,
|
||||||
|
},
|
||||||
|
text: offendingToken?.text ?? '',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return formatted;
|
||||||
|
}
|
||||||
|
|
||||||
|
private unwrapRecognitionError(error: unknown): RecognitionException | undefined {
|
||||||
|
if (!error) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (error instanceof RecognitionException) {
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
if (error instanceof ParseCancellationException) {
|
||||||
|
const cause = (error as { cause?: unknown }).cause;
|
||||||
|
if (cause instanceof RecognitionException) {
|
||||||
|
return cause;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (typeof error === 'object' && error !== null && 'cause' in error) {
|
||||||
|
const cause = (error as { cause?: unknown }).cause;
|
||||||
|
if (cause instanceof RecognitionException) {
|
||||||
|
return cause;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
private resolveOffendingToken(
|
||||||
|
error: RecognitionException | undefined,
|
||||||
|
parser: ClassParser
|
||||||
|
): Token | undefined {
|
||||||
|
const candidate = (error as { offendingToken?: Token })?.offendingToken;
|
||||||
|
if (candidate) {
|
||||||
|
return candidate;
|
||||||
|
}
|
||||||
|
|
||||||
|
const current = (
|
||||||
|
parser as unknown as { getCurrentToken?: () => Token | undefined }
|
||||||
|
).getCurrentToken?.();
|
||||||
|
if (current) {
|
||||||
|
return current;
|
||||||
|
}
|
||||||
|
|
||||||
|
const stream = (
|
||||||
|
parser as unknown as { _input?: { LT?: (offset: number) => Token | undefined } }
|
||||||
|
)._input;
|
||||||
|
return stream?.LT?.(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const parserInstance = new ANTLRClassParser();
|
||||||
|
|
||||||
|
const exportedParser = {
|
||||||
|
parse: (text: string) => parserInstance.parse(text),
|
||||||
|
parser: parserInstance,
|
||||||
|
yy: null as ClassDbLike | null,
|
||||||
|
};
|
||||||
|
|
||||||
|
Object.defineProperty(exportedParser, 'yy', {
|
||||||
|
get() {
|
||||||
|
return parserInstance.yy;
|
||||||
|
},
|
||||||
|
set(value: ClassDbLike | null) {
|
||||||
|
parserInstance.yy = value;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export default exportedParser;
|
31
packages/mermaid/src/diagrams/class/parser/classParser.ts
Normal file
31
packages/mermaid/src/diagrams/class/parser/classParser.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
// @ts-ignore: JISON parser lacks type definitions
|
||||||
|
import jisonParser from './classDiagram.jison';
|
||||||
|
import antlrParser from './antlr/antlr-parser.js';
|
||||||
|
|
||||||
|
const USE_ANTLR_PARSER = process.env.USE_ANTLR_PARSER === 'true';
|
||||||
|
|
||||||
|
const baseParser: any = USE_ANTLR_PARSER ? antlrParser : jisonParser;
|
||||||
|
|
||||||
|
const selectedParser: any = Object.create(baseParser);
|
||||||
|
|
||||||
|
selectedParser.parse = (source: string): unknown => {
|
||||||
|
const normalized = source.replace(/\r\n/g, '\n');
|
||||||
|
if (USE_ANTLR_PARSER) {
|
||||||
|
return antlrParser.parse(normalized);
|
||||||
|
}
|
||||||
|
return jisonParser.parse(normalized);
|
||||||
|
};
|
||||||
|
|
||||||
|
Object.defineProperty(selectedParser, 'yy', {
|
||||||
|
get() {
|
||||||
|
return baseParser.yy;
|
||||||
|
},
|
||||||
|
set(value) {
|
||||||
|
baseParser.yy = value;
|
||||||
|
},
|
||||||
|
enumerable: true,
|
||||||
|
configurable: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
export default selectedParser;
|
||||||
|
export const parser = selectedParser;
|
@@ -0,0 +1,251 @@
|
|||||||
|
lexer grammar FlowLexer;
|
||||||
|
|
||||||
|
// Virtual tokens for parser
|
||||||
|
tokens {
|
||||||
|
NODIR, DIR, PIPE, PE, SQE, DIAMOND_STOP, STADIUMEND, SUBROUTINEEND, CYLINDEREND, DOUBLECIRCLEEND,
|
||||||
|
ELLIPSE_END_TOKEN, TRAPEND, INVTRAPEND, PS, SQS, TEXT, CIRCLEEND, STR
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lexer modes to match Jison's state-based lexing
|
||||||
|
// Based on Jison: %x string, md_string, acc_title, acc_descr, acc_descr_multiline, dir, vertex, text, etc.
|
||||||
|
|
||||||
|
// Shape data tokens - MUST be defined FIRST for absolute precedence over LINK_ID
|
||||||
|
// Match exactly "@{" like Jison does (no whitespace allowed between @ and {)
|
||||||
|
SHAPE_DATA_START: '@{' -> pushMode(SHAPE_DATA_MODE);
|
||||||
|
|
||||||
|
// Accessibility tokens
|
||||||
|
ACC_TITLE: 'accTitle' WS* ':' WS* -> pushMode(ACC_TITLE_MODE);
|
||||||
|
ACC_DESCR: 'accDescr' WS* ':' WS* -> pushMode(ACC_DESCR_MODE);
|
||||||
|
ACC_DESCR_MULTI: 'accDescr' WS* '{' WS* -> pushMode(ACC_DESCR_MULTILINE_MODE);
|
||||||
|
|
||||||
|
// Interactivity tokens
|
||||||
|
CALL: 'call' WS+ -> pushMode(CALLBACKNAME_MODE);
|
||||||
|
HREF: 'href' WS;
|
||||||
|
// CLICK token - matches 'click' + whitespace + node ID (like Jison)
|
||||||
|
CLICK: 'click' WS+ [A-Za-z0-9_]+ -> pushMode(CLICK_MODE);
|
||||||
|
|
||||||
|
// Graph declaration tokens - these trigger direction mode
|
||||||
|
GRAPH: ('flowchart-elk' | 'graph' | 'flowchart') -> pushMode(DIR_MODE);
|
||||||
|
SUBGRAPH: 'subgraph';
|
||||||
|
END: 'end';
|
||||||
|
|
||||||
|
// Link targets
|
||||||
|
LINK_TARGET: ('_self' | '_blank' | '_parent' | '_top');
|
||||||
|
|
||||||
|
// Style and class tokens
|
||||||
|
STYLE: 'style';
|
||||||
|
DEFAULT: 'default';
|
||||||
|
LINKSTYLE: 'linkStyle';
|
||||||
|
INTERPOLATE: 'interpolate';
|
||||||
|
CLASSDEF: 'classDef';
|
||||||
|
CLASS: 'class';
|
||||||
|
|
||||||
|
// String tokens - must come early to avoid conflicts with QUOTE
|
||||||
|
MD_STRING_START: '"`' -> pushMode(MD_STRING_MODE);
|
||||||
|
|
||||||
|
// Direction tokens - matches Jison's direction_tb, direction_bt, etc.
|
||||||
|
// These handle "direction TB", "direction BT", etc. statements within subgraphs
|
||||||
|
DIRECTION_TB: 'direction' WS+ 'TB' ~[\n]*;
|
||||||
|
DIRECTION_BT: 'direction' WS+ 'BT' ~[\n]*;
|
||||||
|
DIRECTION_RL: 'direction' WS+ 'RL' ~[\n]*;
|
||||||
|
DIRECTION_LR: 'direction' WS+ 'LR' ~[\n]*;
|
||||||
|
|
||||||
|
// ELLIPSE_START must come very early to avoid conflicts with PAREN_START
|
||||||
|
ELLIPSE_START: '(-' -> pushMode(ELLIPSE_TEXT_MODE);
|
||||||
|
|
||||||
|
// Link ID token - matches edge IDs like "e1@" when followed by link patterns
|
||||||
|
// Uses a negative lookahead pattern to match the Jison lookahead (?=[^\{\"])
|
||||||
|
// This prevents LINK_ID from matching "e1@{" and allows SHAPE_DATA_START to match "@{" correctly
|
||||||
|
// The pattern matches any non-whitespace followed by @ but only when NOT followed by { or "
|
||||||
|
LINK_ID: ~[ \t\r\n"]+ '@' {this.inputStream.LA(1) != '{'.charCodeAt(0) && this.inputStream.LA(1) != '"'.charCodeAt(0)}?;
|
||||||
|
|
||||||
|
NUM: [0-9]+;
|
||||||
|
BRKT: '#';
|
||||||
|
STYLE_SEPARATOR: ':::';
|
||||||
|
COLON: ':';
|
||||||
|
AMP: '&';
|
||||||
|
SEMI: ';';
|
||||||
|
COMMA: ',';
|
||||||
|
MULT: '*';
|
||||||
|
|
||||||
|
// Edge patterns - these are complex in Jison, need careful translation
|
||||||
|
// Normal edges without text: A-->B (matches Jison: \s*[xo<]?\-\-+[-xo>]\s*) - must come first to avoid conflicts
|
||||||
|
LINK_NORMAL: WS* [xo<]? '--' '-'* [-xo>] WS*;
|
||||||
|
// Normal edges with text: A-- text ---B (matches Jison: <INITIAL>\s*[xo<]?\-\-\s* -> START_LINK)
|
||||||
|
START_LINK_NORMAL: WS* [xo<]? '--' WS+ -> pushMode(EDGE_TEXT_MODE);
|
||||||
|
// Normal edges with text (no space): A--text---B - match -- followed by any non-dash character
|
||||||
|
START_LINK_NORMAL_NOSPACE: WS* [xo<]? '--' -> pushMode(EDGE_TEXT_MODE);
|
||||||
|
// Pipe-delimited edge text: A--x| (linkStatement for arrowText) - matches Jison linkStatement pattern
|
||||||
|
LINK_STATEMENT_NORMAL: WS* [xo<]? '--' '-'* [xo<]?;
|
||||||
|
|
||||||
|
// Thick edges with text: A== text ===B (matches Jison: <INITIAL>\s*[xo<]?\=\=\s* -> START_LINK)
|
||||||
|
START_LINK_THICK: WS* [xo<]? '==' WS+ -> pushMode(THICK_EDGE_TEXT_MODE);
|
||||||
|
// Thick edges without text: A==>B (matches Jison: \s*[xo<]?\=\=+[=xo>]\s*)
|
||||||
|
LINK_THICK: WS* [xo<]? '==' '='* [=xo>] WS*;
|
||||||
|
LINK_STATEMENT_THICK: WS* [xo<]? '==' '='* [xo<]?;
|
||||||
|
|
||||||
|
// Dotted edges with text: A-. text .->B (matches Jison: <INITIAL>\s*[xo<]?\-\.\s* -> START_LINK)
|
||||||
|
START_LINK_DOTTED: WS* [xo<]? '-.' WS* -> pushMode(DOTTED_EDGE_TEXT_MODE);
|
||||||
|
// Dotted edges without text: A-.->B (matches Jison: \s*[xo<]?\-?\.+\-[xo>]?\s*)
|
||||||
|
LINK_DOTTED: WS* [xo<]? '-' '.'+ '-' [xo>]? WS*;
|
||||||
|
LINK_STATEMENT_DOTTED: WS* [xo<]? '-' '.'+ [xo<]?;
|
||||||
|
|
||||||
|
// Special link
|
||||||
|
LINK_INVISIBLE: WS* '~~' '~'+ WS*;
|
||||||
|
|
||||||
|
// PIPE handling: push to TEXT_MODE to handle content between pipes
|
||||||
|
// Put this AFTER link patterns to avoid interference with edge parsing
|
||||||
|
PIPE: '|' -> pushMode(TEXT_MODE);
|
||||||
|
|
||||||
|
// Vertex shape tokens - MUST come first (longer patterns before shorter ones)
|
||||||
|
DOUBLECIRCLE_START: '(((' -> pushMode(TEXT_MODE);
|
||||||
|
CIRCLE_START: '((' -> pushMode(TEXT_MODE);
|
||||||
|
// ELLIPSE_START moved to top of file for precedence
|
||||||
|
|
||||||
|
// Basic shape tokens - shorter patterns after longer ones
|
||||||
|
SQUARE_START: '[' -> pushMode(TEXT_MODE), type(SQS);
|
||||||
|
// PAREN_START must come AFTER ELLIPSE_START to avoid consuming '(' before '(-' can match
|
||||||
|
PAREN_START: '(' -> pushMode(TEXT_MODE), type(PS);
|
||||||
|
DIAMOND_START: '{' -> pushMode(TEXT_MODE);
|
||||||
|
// PIPE_START removed - conflicts with PIPE token. Context-sensitive pipe handling in TEXT_MODE
|
||||||
|
STADIUM_START: '([' -> pushMode(TEXT_MODE);
|
||||||
|
SUBROUTINE_START: '[[' -> pushMode(TEXT_MODE);
|
||||||
|
VERTEX_WITH_PROPS_START: '[|';
|
||||||
|
CYLINDER_START: '[(' -> pushMode(TEXT_MODE);
|
||||||
|
TRAP_START: '[/' -> pushMode(TRAP_TEXT_MODE);
|
||||||
|
INVTRAP_START: '[\\' -> pushMode(TRAP_TEXT_MODE);
|
||||||
|
|
||||||
|
// Other basic shape tokens
|
||||||
|
TAGSTART: '<';
|
||||||
|
TAGEND: '>' -> pushMode(TEXT_MODE);
|
||||||
|
UP: '^';
|
||||||
|
DOWN: 'v';
|
||||||
|
MINUS: '-';
|
||||||
|
|
||||||
|
// Node string - allow dashes with lookahead to prevent conflicts with links (matches Jison pattern)
|
||||||
|
// Pattern: ([A-Za-z0-9!"\#$%&'*+\.`?\\_\/]|\-(?=[^\>\-\.])|=(?!=))+
|
||||||
|
NODE_STRING: ([A-Za-z0-9!"#$%&'*+.`?\\/_] | '-' ~[>\-.] | '=' ~'=')+;
|
||||||
|
|
||||||
|
// Unicode text support (simplified from Jison's extensive Unicode ranges)
|
||||||
|
UNICODE_TEXT: [\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE]+;
|
||||||
|
|
||||||
|
// String handling - matches Jison's <*>["] behavior (any mode can enter string mode)
|
||||||
|
QUOTE: '"' -> pushMode(STRING_MODE), skip;
|
||||||
|
|
||||||
|
NEWLINE: ('\r'? '\n')+;
|
||||||
|
WS: [ \t]+;
|
||||||
|
|
||||||
|
// Lexer modes
|
||||||
|
mode ACC_TITLE_MODE;
|
||||||
|
ACC_TITLE_VALUE: (~[\n;#])* -> popMode;
|
||||||
|
|
||||||
|
mode ACC_DESCR_MODE;
|
||||||
|
ACC_DESCR_VALUE: (~[\n;#])* -> popMode;
|
||||||
|
|
||||||
|
mode ACC_DESCR_MULTILINE_MODE;
|
||||||
|
ACC_DESCR_MULTILINE_END: '}' -> popMode;
|
||||||
|
ACC_DESCR_MULTILINE_VALUE: (~[}])*;
|
||||||
|
|
||||||
|
mode SHAPE_DATA_MODE;
|
||||||
|
SHAPE_DATA_STRING_START: '"' -> pushMode(SHAPE_DATA_STRING_MODE);
|
||||||
|
SHAPE_DATA_CONTENT: (~[}"]+);
|
||||||
|
SHAPE_DATA_END: '}' -> popMode;
|
||||||
|
|
||||||
|
mode SHAPE_DATA_STRING_MODE;
|
||||||
|
SHAPE_DATA_STRING_END: '"' -> popMode;
|
||||||
|
SHAPE_DATA_STRING_CONTENT: (~["]+);
|
||||||
|
|
||||||
|
mode CALLBACKNAME_MODE;
|
||||||
|
CALLBACKNAME_PAREN_EMPTY: '(' WS* ')' -> popMode, type(CALLBACKARGS);
|
||||||
|
CALLBACKNAME_PAREN_START: '(' -> popMode, pushMode(CALLBACKARGS_MODE);
|
||||||
|
CALLBACKNAME: (~[(])*;
|
||||||
|
|
||||||
|
mode CALLBACKARGS_MODE;
|
||||||
|
CALLBACKARGS_END: ')' -> popMode;
|
||||||
|
CALLBACKARGS: (~[)])*;
|
||||||
|
|
||||||
|
mode CLICK_MODE;
|
||||||
|
CLICK_NEWLINE: ('\r'? '\n')+ -> popMode, type(NEWLINE);
|
||||||
|
CLICK_WS: WS -> skip;
|
||||||
|
CLICK_CALL: 'call' WS+ -> type(CALL), pushMode(CALLBACKNAME_MODE);
|
||||||
|
CLICK_HREF: 'href' -> type(HREF);
|
||||||
|
CLICK_STR: '"' (~["])* '"' -> type(STR);
|
||||||
|
CLICK_LINK_TARGET: ('_self' | '_blank' | '_parent' | '_top') -> type(LINK_TARGET);
|
||||||
|
CLICK_CALLBACKNAME: [A-Za-z0-9_]+ -> type(CALLBACKNAME);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
mode DIR_MODE;
|
||||||
|
DIR_NEWLINE: ('\r'? '\n')* WS* '\n' -> popMode, type(NODIR);
|
||||||
|
DIR_LR: WS* 'LR' -> popMode, type(DIR);
|
||||||
|
DIR_RL: WS* 'RL' -> popMode, type(DIR);
|
||||||
|
DIR_TB: WS* 'TB' -> popMode, type(DIR);
|
||||||
|
DIR_BT: WS* 'BT' -> popMode, type(DIR);
|
||||||
|
DIR_TD: WS* 'TD' -> popMode, type(DIR);
|
||||||
|
DIR_BR: WS* 'BR' -> popMode, type(DIR);
|
||||||
|
DIR_LEFT: WS* '<' -> popMode, type(DIR);
|
||||||
|
DIR_RIGHT: WS* '>' -> popMode, type(DIR);
|
||||||
|
DIR_UP: WS* '^' -> popMode, type(DIR);
|
||||||
|
DIR_DOWN: WS* 'v' -> popMode, type(DIR);
|
||||||
|
|
||||||
|
mode STRING_MODE;
|
||||||
|
STRING_END: '"' -> popMode, skip;
|
||||||
|
STR: (~["]+);
|
||||||
|
|
||||||
|
mode MD_STRING_MODE;
|
||||||
|
MD_STRING_END: '`"' -> popMode;
|
||||||
|
MD_STR: (~[`"])+;
|
||||||
|
|
||||||
|
mode TEXT_MODE;
|
||||||
|
// Allow nested diamond starts (for hexagon nodes)
|
||||||
|
TEXT_DIAMOND_START: '{' -> pushMode(TEXT_MODE), type(DIAMOND_START);
|
||||||
|
|
||||||
|
// Handle nested parentheses and brackets like Jison
|
||||||
|
TEXT_PAREN_START: '(' -> pushMode(TEXT_MODE), type(PS);
|
||||||
|
TEXT_SQUARE_START: '[' -> pushMode(TEXT_MODE), type(SQS);
|
||||||
|
|
||||||
|
// Handle quoted strings in text mode - matches Jison's <*>["] behavior
|
||||||
|
// Skip the opening quote token, just push to STRING_MODE like Jison does
|
||||||
|
TEXT_STRING_START: '"' -> pushMode(STRING_MODE), skip;
|
||||||
|
|
||||||
|
// Handle closing pipe in text mode - pop back to default mode
|
||||||
|
TEXT_PIPE_END: '|' -> popMode, type(PIPE);
|
||||||
|
|
||||||
|
TEXT_PAREN_END: ')' -> popMode, type(PE);
|
||||||
|
TEXT_SQUARE_END: ']' -> popMode, type(SQE);
|
||||||
|
TEXT_DIAMOND_END: '}' -> popMode, type(DIAMOND_STOP);
|
||||||
|
TEXT_STADIUM_END: '])' -> popMode, type(STADIUMEND);
|
||||||
|
TEXT_SUBROUTINE_END: ']]' -> popMode, type(SUBROUTINEEND);
|
||||||
|
TEXT_CYLINDER_END: ')]' -> popMode, type(CYLINDEREND);
|
||||||
|
TEXT_DOUBLECIRCLE_END: ')))' -> popMode, type(DOUBLECIRCLEEND);
|
||||||
|
TEXT_CIRCLE_END: '))' -> popMode, type(CIRCLEEND);
|
||||||
|
// Now allow all characters except the specific end tokens for this mode
|
||||||
|
TEXT_CONTENT: (~[(){}|\]"])+;
|
||||||
|
|
||||||
|
mode ELLIPSE_TEXT_MODE;
|
||||||
|
ELLIPSE_END: '-)' -> popMode, type(ELLIPSE_END_TOKEN);
|
||||||
|
ELLIPSE_TEXT: (~[-)])+;
|
||||||
|
|
||||||
|
mode TRAP_TEXT_MODE;
|
||||||
|
TRAP_END_BRACKET: '\\]' -> popMode, type(TRAPEND);
|
||||||
|
INVTRAP_END_BRACKET: '/]' -> popMode, type(INVTRAPEND);
|
||||||
|
TRAP_TEXT: (~[\\/\]])+;
|
||||||
|
|
||||||
|
mode EDGE_TEXT_MODE;
|
||||||
|
// Handle space-delimited pattern: A-- text ----B or A-- text -->B (matches Jison: [^-]|\-(?!\-)+)
|
||||||
|
// Must handle both cases: extra dashes without arrow (----) and dashes with arrow (-->)
|
||||||
|
EDGE_TEXT_LINK_END: WS* '--' '-'* [-xo>]? WS* -> popMode, type(LINK_NORMAL);
|
||||||
|
// Match any character including spaces and single dashes, but not double dashes
|
||||||
|
EDGE_TEXT: (~[-] | '-' ~[-])+;
|
||||||
|
|
||||||
|
mode THICK_EDGE_TEXT_MODE;
|
||||||
|
// Handle thick edge patterns: A== text ====B or A== text ==>B
|
||||||
|
THICK_EDGE_TEXT_LINK_END: WS* '==' '='* [=xo>]? WS* -> popMode, type(LINK_THICK);
|
||||||
|
THICK_EDGE_TEXT: (~[=] | '=' ~[=])+;
|
||||||
|
|
||||||
|
mode DOTTED_EDGE_TEXT_MODE;
|
||||||
|
// Handle dotted edge patterns: A-. text ...-B or A-. text .->B
|
||||||
|
DOTTED_EDGE_TEXT_LINK_END: WS* '.'+ '-' [xo>]? WS* -> popMode, type(LINK_DOTTED);
|
||||||
|
DOTTED_EDGE_TEXT: ~[.]+;
|
||||||
|
|
||||||
|
|
@@ -0,0 +1,286 @@
|
|||||||
|
parser grammar FlowParser;
|
||||||
|
|
||||||
|
options {
|
||||||
|
tokenVocab = FlowLexer;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Entry point - matches Jison's "start: graphConfig document"
|
||||||
|
start: graphConfig document;
|
||||||
|
|
||||||
|
// Document structure - matches Jison's document rule
|
||||||
|
document:
|
||||||
|
line*
|
||||||
|
;
|
||||||
|
|
||||||
|
// Line structure - matches Jison's line rule
|
||||||
|
line:
|
||||||
|
statement
|
||||||
|
| SEMI
|
||||||
|
| NEWLINE
|
||||||
|
| WS
|
||||||
|
;
|
||||||
|
|
||||||
|
// Graph configuration - matches Jison's graphConfig rule
|
||||||
|
graphConfig:
|
||||||
|
WS graphConfig
|
||||||
|
| NEWLINE graphConfig
|
||||||
|
| GRAPH NODIR // Default TB direction
|
||||||
|
| GRAPH DIR firstStmtSeparator // Explicit direction
|
||||||
|
;
|
||||||
|
|
||||||
|
// Statement types - matches Jison's statement rule
|
||||||
|
statement:
|
||||||
|
vertexStatement separator
|
||||||
|
| standaloneVertex separator // For edge property statements like e1@{curve: basis}
|
||||||
|
| styleStatement separator
|
||||||
|
| linkStyleStatement separator
|
||||||
|
| classDefStatement separator
|
||||||
|
| classStatement separator
|
||||||
|
| clickStatement separator
|
||||||
|
| subgraphStatement separator
|
||||||
|
| direction
|
||||||
|
| accTitle
|
||||||
|
| accDescr
|
||||||
|
;
|
||||||
|
|
||||||
|
// Separators
|
||||||
|
separator: NEWLINE | SEMI | EOF;
|
||||||
|
firstStmtSeparator: SEMI | NEWLINE | spaceList NEWLINE;
|
||||||
|
spaceList: WS spaceList | WS;
|
||||||
|
|
||||||
|
// Vertex statement - matches Jison's vertexStatement rule
|
||||||
|
vertexStatement:
|
||||||
|
vertexStatement link node shapeData // Chain with shape data
|
||||||
|
| vertexStatement link node // Chain without shape data
|
||||||
|
| vertexStatement link node spaceList // Chain with trailing space
|
||||||
|
| node spaceList // Single node with space
|
||||||
|
| node shapeData // Single node with shape data
|
||||||
|
| node // Single node
|
||||||
|
;
|
||||||
|
|
||||||
|
// Standalone vertex - for edge property statements like e1@{curve: basis}
|
||||||
|
standaloneVertex:
|
||||||
|
NODE_STRING shapeData
|
||||||
|
| LINK_ID shapeData // For edge IDs like e1@{curve: basis}
|
||||||
|
;
|
||||||
|
|
||||||
|
// Node definition - matches Jison's node rule
|
||||||
|
node:
|
||||||
|
styledVertex
|
||||||
|
| node shapeData spaceList AMP spaceList styledVertex
|
||||||
|
| node spaceList AMP spaceList styledVertex
|
||||||
|
;
|
||||||
|
|
||||||
|
// Styled vertex - matches Jison's styledVertex rule
|
||||||
|
styledVertex:
|
||||||
|
vertex
|
||||||
|
| vertex STYLE_SEPARATOR idString
|
||||||
|
;
|
||||||
|
|
||||||
|
// Vertex shapes - matches Jison's vertex rule
|
||||||
|
vertex:
|
||||||
|
idString SQS text SQE // Square: [text]
|
||||||
|
| idString DOUBLECIRCLE_START text DOUBLECIRCLEEND // Double circle: (((text)))
|
||||||
|
| idString CIRCLE_START text CIRCLEEND // Circle: ((text))
|
||||||
|
| idString ELLIPSE_START text ELLIPSE_END_TOKEN // Ellipse: (-text-)
|
||||||
|
| idString STADIUM_START text STADIUMEND // Stadium: ([text])
|
||||||
|
| idString SUBROUTINE_START text SUBROUTINEEND // Subroutine: [[text]]
|
||||||
|
| idString VERTEX_WITH_PROPS_START NODE_STRING COLON NODE_STRING PIPE text SQE // Props: [|field:value|text]
|
||||||
|
| idString CYLINDER_START text CYLINDEREND // Cylinder: [(text)]
|
||||||
|
| idString PS text PE // Round: (text)
|
||||||
|
| idString DIAMOND_START text DIAMOND_STOP // Diamond: {text}
|
||||||
|
| idString DIAMOND_START DIAMOND_START text DIAMOND_STOP DIAMOND_STOP // Hexagon: {{text}}
|
||||||
|
| idString TAGEND text SQE // Odd: >text]
|
||||||
|
| idString TRAP_START text TRAPEND // Trapezoid: [/text\]
|
||||||
|
| idString INVTRAP_START text INVTRAPEND // Inv trapezoid: [\text/]
|
||||||
|
| idString TRAP_START text INVTRAPEND // Lean right: [/text/]
|
||||||
|
| idString INVTRAP_START text TRAPEND // Lean left: [\text\]
|
||||||
|
| idString // Plain node
|
||||||
|
;
|
||||||
|
|
||||||
|
// Link definition - matches Jison's link rule
|
||||||
|
link:
|
||||||
|
linkStatement arrowText spaceList?
|
||||||
|
| linkStatement
|
||||||
|
| START_LINK_NORMAL edgeText LINK_NORMAL
|
||||||
|
| START_LINK_NORMAL_NOSPACE edgeText LINK_NORMAL
|
||||||
|
| START_LINK_THICK edgeText LINK_THICK
|
||||||
|
| START_LINK_DOTTED edgeText LINK_DOTTED
|
||||||
|
| LINK_ID START_LINK_NORMAL edgeText LINK_NORMAL
|
||||||
|
| LINK_ID START_LINK_NORMAL_NOSPACE edgeText LINK_NORMAL
|
||||||
|
| LINK_ID START_LINK_THICK edgeText LINK_THICK
|
||||||
|
| LINK_ID START_LINK_DOTTED edgeText LINK_DOTTED
|
||||||
|
;
|
||||||
|
|
||||||
|
// Link statement - matches Jison's linkStatement rule
|
||||||
|
linkStatement:
|
||||||
|
LINK_NORMAL
|
||||||
|
| LINK_THICK
|
||||||
|
| LINK_DOTTED
|
||||||
|
| LINK_INVISIBLE
|
||||||
|
| LINK_STATEMENT_NORMAL
|
||||||
|
| LINK_STATEMENT_DOTTED
|
||||||
|
| LINK_ID LINK_NORMAL
|
||||||
|
| LINK_ID LINK_THICK
|
||||||
|
| LINK_ID LINK_DOTTED
|
||||||
|
| LINK_ID LINK_INVISIBLE
|
||||||
|
| LINK_ID LINK_STATEMENT_NORMAL
|
||||||
|
| LINK_ID LINK_STATEMENT_THICK
|
||||||
|
;
|
||||||
|
|
||||||
|
// Edge text - matches Jison's edgeText rule
|
||||||
|
edgeText:
|
||||||
|
edgeTextToken
|
||||||
|
| edgeText edgeTextToken
|
||||||
|
| stringLiteral
|
||||||
|
| MD_STR
|
||||||
|
;
|
||||||
|
|
||||||
|
// Arrow text - matches Jison's arrowText rule
|
||||||
|
arrowText:
|
||||||
|
PIPE text PIPE
|
||||||
|
;
|
||||||
|
|
||||||
|
// Text definition - matches Jison's text rule
|
||||||
|
text:
|
||||||
|
textToken
|
||||||
|
| text textToken
|
||||||
|
| stringLiteral
|
||||||
|
| MD_STR
|
||||||
|
| NODE_STRING
|
||||||
|
| TEXT_CONTENT
|
||||||
|
| ELLIPSE_TEXT
|
||||||
|
| TRAP_TEXT
|
||||||
|
;
|
||||||
|
|
||||||
|
// Shape data - matches Jison's shapeData rule
|
||||||
|
shapeData:
|
||||||
|
SHAPE_DATA_START shapeDataContent SHAPE_DATA_END
|
||||||
|
;
|
||||||
|
|
||||||
|
shapeDataContent:
|
||||||
|
shapeDataContent SHAPE_DATA_CONTENT
|
||||||
|
| shapeDataContent SHAPE_DATA_STRING_START SHAPE_DATA_STRING_CONTENT SHAPE_DATA_STRING_END
|
||||||
|
| SHAPE_DATA_CONTENT
|
||||||
|
| SHAPE_DATA_STRING_START SHAPE_DATA_STRING_CONTENT SHAPE_DATA_STRING_END
|
||||||
|
|
|
||||||
|
;
|
||||||
|
|
||||||
|
// Style statement - matches Jison's styleStatement rule
|
||||||
|
styleStatement:
|
||||||
|
STYLE WS idString WS stylesOpt
|
||||||
|
;
|
||||||
|
|
||||||
|
// Link style statement - matches Jison's linkStyleStatement rule
|
||||||
|
linkStyleStatement:
|
||||||
|
LINKSTYLE WS DEFAULT WS stylesOpt
|
||||||
|
| LINKSTYLE WS numList WS stylesOpt
|
||||||
|
| LINKSTYLE WS DEFAULT WS INTERPOLATE WS alphaNum WS stylesOpt
|
||||||
|
| LINKSTYLE WS numList WS INTERPOLATE WS alphaNum WS stylesOpt
|
||||||
|
| LINKSTYLE WS DEFAULT WS INTERPOLATE WS alphaNum
|
||||||
|
| LINKSTYLE WS numList WS INTERPOLATE WS alphaNum
|
||||||
|
;
|
||||||
|
|
||||||
|
// Class definition statement - matches Jison's classDefStatement rule
|
||||||
|
classDefStatement:
|
||||||
|
CLASSDEF WS idString WS stylesOpt
|
||||||
|
;
|
||||||
|
|
||||||
|
// Class statement - matches Jison's classStatement rule
|
||||||
|
classStatement:
|
||||||
|
CLASS WS idString WS idString
|
||||||
|
;
|
||||||
|
|
||||||
|
// String rule to handle STR patterns
|
||||||
|
stringLiteral:
|
||||||
|
STR
|
||||||
|
;
|
||||||
|
|
||||||
|
// Click statement - matches Jison's clickStatement rule
|
||||||
|
// CLICK token now contains both 'click' and node ID (like Jison)
|
||||||
|
clickStatement:
|
||||||
|
CLICK CALLBACKNAME
|
||||||
|
| CLICK CALLBACKNAME stringLiteral
|
||||||
|
| CLICK CALLBACKNAME CALLBACKARGS
|
||||||
|
| CLICK CALLBACKNAME CALLBACKARGS stringLiteral
|
||||||
|
| CLICK CALL CALLBACKNAME
|
||||||
|
| CLICK CALL CALLBACKNAME stringLiteral
|
||||||
|
| CLICK CALL CALLBACKNAME CALLBACKARGS
|
||||||
|
| CLICK CALL CALLBACKNAME CALLBACKARGS stringLiteral
|
||||||
|
| CLICK HREF stringLiteral
|
||||||
|
| CLICK HREF stringLiteral stringLiteral
|
||||||
|
| CLICK HREF stringLiteral LINK_TARGET
|
||||||
|
| CLICK HREF stringLiteral stringLiteral LINK_TARGET
|
||||||
|
| CLICK stringLiteral // CLICK STR - direct click with URL
|
||||||
|
| CLICK stringLiteral stringLiteral // CLICK STR STR - click with URL and tooltip
|
||||||
|
| CLICK stringLiteral LINK_TARGET // CLICK STR LINK_TARGET - click with URL and target
|
||||||
|
| CLICK stringLiteral stringLiteral LINK_TARGET // CLICK STR STR LINK_TARGET - click with URL, tooltip, and target
|
||||||
|
;
|
||||||
|
|
||||||
|
// Subgraph statement - matches Jison's subgraph rules
|
||||||
|
subgraphStatement:
|
||||||
|
SUBGRAPH WS textNoTags SQS text SQE separator document END
|
||||||
|
| SUBGRAPH WS textNoTags separator document END
|
||||||
|
| SUBGRAPH separator document END
|
||||||
|
;
|
||||||
|
|
||||||
|
// Direction statement - matches Jison's direction rule
|
||||||
|
direction:
|
||||||
|
DIRECTION_TB
|
||||||
|
| DIRECTION_BT
|
||||||
|
| DIRECTION_RL
|
||||||
|
| DIRECTION_LR
|
||||||
|
;
|
||||||
|
|
||||||
|
// Accessibility statements
|
||||||
|
accTitle: ACC_TITLE ACC_TITLE_VALUE;
|
||||||
|
accDescr: ACC_DESCR ACC_DESCR_VALUE | ACC_DESCR_MULTI ACC_DESCR_MULTILINE_VALUE ACC_DESCR_MULTILINE_END;
|
||||||
|
|
||||||
|
// Number list - matches Jison's numList rule
|
||||||
|
numList:
|
||||||
|
NUM
|
||||||
|
| numList COMMA NUM
|
||||||
|
;
|
||||||
|
|
||||||
|
// Styles - matches Jison's stylesOpt rule
|
||||||
|
stylesOpt:
|
||||||
|
style
|
||||||
|
| stylesOpt COMMA style
|
||||||
|
;
|
||||||
|
|
||||||
|
// Style components - matches Jison's style rule
|
||||||
|
style:
|
||||||
|
styleComponent
|
||||||
|
| style styleComponent
|
||||||
|
;
|
||||||
|
|
||||||
|
// Style component - matches Jison's styleComponent rule
|
||||||
|
styleComponent: NUM | NODE_STRING | COLON | WS | BRKT | STYLE | MULT | MINUS;
|
||||||
|
|
||||||
|
// Token definitions - matches Jison's token lists
|
||||||
|
idString:
|
||||||
|
idStringToken
|
||||||
|
| idString idStringToken
|
||||||
|
;
|
||||||
|
|
||||||
|
alphaNum:
|
||||||
|
alphaNumToken
|
||||||
|
| alphaNum alphaNumToken
|
||||||
|
;
|
||||||
|
|
||||||
|
textNoTags:
|
||||||
|
textNoTagsToken
|
||||||
|
| textNoTags textNoTagsToken
|
||||||
|
| stringLiteral
|
||||||
|
| MD_STR
|
||||||
|
;
|
||||||
|
|
||||||
|
// Token types - matches Jison's token definitions
|
||||||
|
idStringToken: NUM | NODE_STRING | DOWN | MINUS | DEFAULT | COMMA | COLON | AMP | BRKT | MULT | UNICODE_TEXT;
|
||||||
|
textToken: TEXT_CONTENT | TAGSTART | TAGEND | UNICODE_TEXT | NODE_STRING | WS;
|
||||||
|
textNoTagsToken: NUM | NODE_STRING | WS | MINUS | AMP | UNICODE_TEXT | COLON | MULT | BRKT | keywords | START_LINK_NORMAL;
|
||||||
|
edgeTextToken: EDGE_TEXT | THICK_EDGE_TEXT | DOTTED_EDGE_TEXT | UNICODE_TEXT;
|
||||||
|
alphaNumToken: NUM | UNICODE_TEXT | NODE_STRING | DIR | DOWN | MINUS | COMMA | COLON | AMP | BRKT | MULT;
|
||||||
|
|
||||||
|
// Keywords - matches Jison's keywords rule
|
||||||
|
keywords: STYLE | LINKSTYLE | CLASSDEF | CLASS | CLICK | GRAPH | DIR | SUBGRAPH | END | DOWN | UP;
|
File diff suppressed because it is too large
Load Diff
1696
packages/mermaid/src/diagrams/flowchart/parser/antlr/antlr-parser.ts
Normal file
1696
packages/mermaid/src/diagrams/flowchart/parser/antlr/antlr-parser.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,15 @@
|
|||||||
|
const { CharStream } = require('antlr4ng');
|
||||||
|
const { FlowLexer } = require('./generated/FlowLexer.ts');
|
||||||
|
|
||||||
|
const input = 'D@{ shape: rounded }';
|
||||||
|
console.log('Input:', input);
|
||||||
|
|
||||||
|
const chars = CharStream.fromString(input);
|
||||||
|
const lexer = new FlowLexer(chars);
|
||||||
|
const tokens = lexer.getAllTokens();
|
||||||
|
|
||||||
|
console.log('Tokens:');
|
||||||
|
for (let i = 0; i < tokens.length; i++) {
|
||||||
|
const token = tokens[i];
|
||||||
|
console.log(` [${i}] Type: ${token.type}, Text: '${token.text}', Channel: ${token.channel}`);
|
||||||
|
}
|
@@ -1,12 +1,22 @@
|
|||||||
// @ts-ignore: JISON doesn't support types
|
// @ts-ignore: JISON doesn't support types
|
||||||
import flowJisonParser from './flow.jison';
|
import flowJisonParser from './flow.jison';
|
||||||
|
import antlrParser from './antlr/antlr-parser.js';
|
||||||
|
|
||||||
const newParser = Object.assign({}, flowJisonParser);
|
// Configuration flag to switch between parsers
|
||||||
|
// Set to true to test ANTLR parser, false to use original Jison parser
|
||||||
|
const USE_ANTLR_PARSER = process.env.USE_ANTLR_PARSER === 'true';
|
||||||
|
|
||||||
|
const newParser = Object.assign({}, USE_ANTLR_PARSER ? antlrParser : flowJisonParser);
|
||||||
|
|
||||||
newParser.parse = (src: string): unknown => {
|
newParser.parse = (src: string): unknown => {
|
||||||
// remove the trailing whitespace after closing curly braces when ending a line break
|
// remove the trailing whitespace after closing curly braces when ending a line break
|
||||||
const newSrc = src.replace(/}\s*\n/g, '}\n');
|
const newSrc = src.replace(/}\s*\n/g, '}\n');
|
||||||
|
|
||||||
|
if (USE_ANTLR_PARSER) {
|
||||||
|
return antlrParser.parse(newSrc);
|
||||||
|
} else {
|
||||||
return flowJisonParser.parse(newSrc);
|
return flowJisonParser.parse(newSrc);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export default newParser;
|
export default newParser;
|
||||||
|
@@ -268,9 +268,7 @@ const fixTaskDates = function (startTime, endTime, dateFormat, excludes, include
|
|||||||
|
|
||||||
const getStartDate = function (prevTime, dateFormat, str) {
|
const getStartDate = function (prevTime, dateFormat, str) {
|
||||||
str = str.trim();
|
str = str.trim();
|
||||||
if ((dateFormat.trim() === 'x' || dateFormat.trim() === 'X') && /^\d+$/.test(str)) {
|
|
||||||
return new Date(Number(str));
|
|
||||||
}
|
|
||||||
// Test for after
|
// Test for after
|
||||||
const afterRePattern = /^after\s+(?<ids>[\d\w- ]+)/;
|
const afterRePattern = /^after\s+(?<ids>[\d\w- ]+)/;
|
||||||
const afterStatement = afterRePattern.exec(str);
|
const afterStatement = afterRePattern.exec(str);
|
||||||
|
@@ -37,7 +37,6 @@ export class MindmapDB {
|
|||||||
private nodes: MindmapNode[] = [];
|
private nodes: MindmapNode[] = [];
|
||||||
private count = 0;
|
private count = 0;
|
||||||
private elements: Record<number, D3Element> = {};
|
private elements: Record<number, D3Element> = {};
|
||||||
private baseLevel?: number;
|
|
||||||
public readonly nodeType: typeof nodeType;
|
public readonly nodeType: typeof nodeType;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
@@ -55,7 +54,6 @@ export class MindmapDB {
|
|||||||
this.nodes = [];
|
this.nodes = [];
|
||||||
this.count = 0;
|
this.count = 0;
|
||||||
this.elements = {};
|
this.elements = {};
|
||||||
this.baseLevel = undefined;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public getParent(level: number): MindmapNode | null {
|
public getParent(level: number): MindmapNode | null {
|
||||||
@@ -74,17 +72,6 @@ export class MindmapDB {
|
|||||||
public addNode(level: number, id: string, descr: string, type: number): void {
|
public addNode(level: number, id: string, descr: string, type: number): void {
|
||||||
log.info('addNode', level, id, descr, type);
|
log.info('addNode', level, id, descr, type);
|
||||||
|
|
||||||
let isRoot = false;
|
|
||||||
|
|
||||||
if (this.nodes.length === 0) {
|
|
||||||
this.baseLevel = level;
|
|
||||||
level = 0;
|
|
||||||
isRoot = true;
|
|
||||||
} else if (this.baseLevel !== undefined) {
|
|
||||||
level = level - this.baseLevel;
|
|
||||||
isRoot = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const conf = getConfig();
|
const conf = getConfig();
|
||||||
let padding = conf.mindmap?.padding ?? defaultConfig.mindmap.padding;
|
let padding = conf.mindmap?.padding ?? defaultConfig.mindmap.padding;
|
||||||
|
|
||||||
@@ -105,7 +92,6 @@ export class MindmapDB {
|
|||||||
children: [],
|
children: [],
|
||||||
width: conf.mindmap?.maxNodeWidth ?? defaultConfig.mindmap.maxNodeWidth,
|
width: conf.mindmap?.maxNodeWidth ?? defaultConfig.mindmap.maxNodeWidth,
|
||||||
padding,
|
padding,
|
||||||
isRoot,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const parent = this.getParent(level);
|
const parent = this.getParent(level);
|
||||||
@@ -113,7 +99,7 @@ export class MindmapDB {
|
|||||||
parent.children.push(node);
|
parent.children.push(node);
|
||||||
this.nodes.push(node);
|
this.nodes.push(node);
|
||||||
} else {
|
} else {
|
||||||
if (isRoot) {
|
if (this.nodes.length === 0) {
|
||||||
this.nodes.push(node);
|
this.nodes.push(node);
|
||||||
} else {
|
} else {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
@@ -218,7 +204,8 @@ export class MindmapDB {
|
|||||||
// Build CSS classes for the node
|
// Build CSS classes for the node
|
||||||
const cssClasses = ['mindmap-node'];
|
const cssClasses = ['mindmap-node'];
|
||||||
|
|
||||||
if (node.isRoot === true) {
|
// Add section-specific classes
|
||||||
|
if (node.level === 0) {
|
||||||
// Root node gets special classes
|
// Root node gets special classes
|
||||||
cssClasses.push('section-root', 'section--1');
|
cssClasses.push('section-root', 'section--1');
|
||||||
} else if (node.section !== undefined) {
|
} else if (node.section !== undefined) {
|
||||||
|
@@ -15,7 +15,6 @@ export interface MindmapNode {
|
|||||||
icon?: string;
|
icon?: string;
|
||||||
x?: number;
|
x?: number;
|
||||||
y?: number;
|
y?: number;
|
||||||
isRoot?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export type FilledMindMapNode = RequiredDeep<MindmapNode>;
|
export type FilledMindMapNode = RequiredDeep<MindmapNode>;
|
||||||
|
@@ -0,0 +1,200 @@
|
|||||||
|
lexer grammar SequenceLexer;
|
||||||
|
tokens { AS }
|
||||||
|
|
||||||
|
|
||||||
|
// Comments (skip)
|
||||||
|
HASH_COMMENT: '#' ~[\r\n]* -> skip;
|
||||||
|
PERCENT_COMMENT1: '%%' ~[\r\n]* -> skip;
|
||||||
|
PERCENT_COMMENT2: ~[}] '%%' ~[\r\n]* -> skip;
|
||||||
|
|
||||||
|
// Whitespace and newline
|
||||||
|
NEWLINE: ('\r'? '\n')+;
|
||||||
|
WS: [ \t]+ -> skip;
|
||||||
|
|
||||||
|
// Punctuation and simple symbols
|
||||||
|
COMMA: ',';
|
||||||
|
SEMI: ';' -> type(NEWLINE);
|
||||||
|
PLUS: '+';
|
||||||
|
MINUS: '-';
|
||||||
|
|
||||||
|
// Core keywords
|
||||||
|
SD: 'sequenceDiagram';
|
||||||
|
PARTICIPANT: 'participant' -> pushMode(ID);
|
||||||
|
PARTICIPANT_ACTOR: 'actor' -> pushMode(ID);
|
||||||
|
CREATE: 'create';
|
||||||
|
DESTROY: 'destroy';
|
||||||
|
BOX: 'box' -> pushMode(LINE);
|
||||||
|
|
||||||
|
// Blocks and control flow
|
||||||
|
LOOP: 'loop' -> pushMode(LINE);
|
||||||
|
RECT: 'rect' -> pushMode(LINE);
|
||||||
|
OPT: 'opt' -> pushMode(LINE);
|
||||||
|
ALT: 'alt' -> pushMode(LINE);
|
||||||
|
ELSE: 'else' -> pushMode(LINE);
|
||||||
|
PAR: 'par' -> pushMode(LINE);
|
||||||
|
PAR_OVER: 'par_over' -> pushMode(LINE);
|
||||||
|
AND: 'and' -> pushMode(LINE);
|
||||||
|
CRITICAL: 'critical' -> pushMode(LINE);
|
||||||
|
OPTION: 'option' -> pushMode(LINE);
|
||||||
|
BREAK: 'break' -> pushMode(LINE);
|
||||||
|
END: 'end';
|
||||||
|
|
||||||
|
// Note and placement
|
||||||
|
LEFT_OF: 'left' WS+ 'of';
|
||||||
|
RIGHT_OF: 'right' WS+ 'of';
|
||||||
|
LINKS: 'links';
|
||||||
|
LINK: 'link';
|
||||||
|
PROPERTIES: 'properties';
|
||||||
|
DETAILS: 'details';
|
||||||
|
OVER: 'over';
|
||||||
|
// Accept both Note and note
|
||||||
|
NOTE: [Nn][Oo][Tt][Ee];
|
||||||
|
|
||||||
|
// Lifecycle
|
||||||
|
ACTIVATE: 'activate';
|
||||||
|
DEACTIVATE: 'deactivate';
|
||||||
|
|
||||||
|
// Titles and accessibility
|
||||||
|
LEGACY_TITLE: 'title' WS* ':' WS* (~[\r\n;#])*;
|
||||||
|
TITLE: 'title' -> pushMode(LINE);
|
||||||
|
ACC_TITLE: 'accTitle' WS* ':' WS* -> pushMode(ACC_TITLE_MODE);
|
||||||
|
ACC_DESCR: 'accDescr' WS* ':' WS* -> pushMode(ACC_DESCR_MODE);
|
||||||
|
ACC_DESCR_MULTI: 'accDescr' WS* '{' WS* -> pushMode(ACC_DESCR_MULTILINE_MODE);
|
||||||
|
|
||||||
|
// Directives
|
||||||
|
AUTONUMBER: 'autonumber';
|
||||||
|
OFF: 'off';
|
||||||
|
|
||||||
|
// Config block @{ ... }
|
||||||
|
CONFIG_START: '@{' -> pushMode(CONFIG_MODE);
|
||||||
|
|
||||||
|
// Arrows (must come before ACTOR)
|
||||||
|
SOLID_ARROW: '->>';
|
||||||
|
BIDIRECTIONAL_SOLID_ARROW: '<<->>';
|
||||||
|
DOTTED_ARROW: '-->>';
|
||||||
|
BIDIRECTIONAL_DOTTED_ARROW: '<<-->>';
|
||||||
|
SOLID_OPEN_ARROW: '->';
|
||||||
|
DOTTED_OPEN_ARROW: '-->';
|
||||||
|
SOLID_CROSS: '-x';
|
||||||
|
DOTTED_CROSS: '--x';
|
||||||
|
SOLID_POINT: '-)';
|
||||||
|
DOTTED_POINT: '--)';
|
||||||
|
|
||||||
|
// Text after colon up to newline or comment delimiter ; or #
|
||||||
|
TXT: ':' (~[\r\n;#])*;
|
||||||
|
|
||||||
|
// Actor identifiers: allow hyphen runs, but forbid -x, --x, -), --)
|
||||||
|
fragment IDCHAR_NO_HYPHEN: ~[+<>:\n,;@# \t-];
|
||||||
|
fragment ALNUM: [A-Za-z0-9_];
|
||||||
|
fragment ALNUM_NOT_X_RPAREN: [A-WYZa-wyz0-9_];
|
||||||
|
fragment H3: '-' '-' '-' ('-')*; // three or more hyphens
|
||||||
|
ACTOR: IDCHAR_NO_HYPHEN+
|
||||||
|
(
|
||||||
|
'-' ALNUM_NOT_X_RPAREN+
|
||||||
|
| '-' '-' ALNUM_NOT_X_RPAREN+
|
||||||
|
| H3 ALNUM+
|
||||||
|
)*;
|
||||||
|
|
||||||
|
|
||||||
|
// Modes to mirror Jison stateful lexing
|
||||||
|
mode ACC_TITLE_MODE;
|
||||||
|
ACC_TITLE_VALUE: (~[\r\n;#])* -> popMode;
|
||||||
|
|
||||||
|
mode ACC_DESCR_MODE;
|
||||||
|
ACC_DESCR_VALUE: (~[\r\n;#])* -> popMode;
|
||||||
|
|
||||||
|
mode ACC_DESCR_MULTILINE_MODE;
|
||||||
|
ACC_DESCR_MULTILINE_END: '}' -> popMode;
|
||||||
|
ACC_DESCR_MULTILINE_VALUE: (~['}'])*;
|
||||||
|
|
||||||
|
mode CONFIG_MODE;
|
||||||
|
CONFIG_CONTENT: (~[}])+;
|
||||||
|
CONFIG_END: '}' -> popMode;
|
||||||
|
|
||||||
|
|
||||||
|
// ID mode: after participant/actor, allow same-line WS/comments; pop on newline
|
||||||
|
mode ID;
|
||||||
|
ID_NEWLINE: ('\r'? '\n')+ -> popMode, type(NEWLINE);
|
||||||
|
ID_SEMI: ';' -> popMode, type(NEWLINE);
|
||||||
|
ID_WS: [ \t]+ -> skip;
|
||||||
|
ID_HASH_COMMENT: '#' ~[\r\n]* -> skip;
|
||||||
|
ID_PERCENT_COMMENT: '%%' ~[\r\n]* -> skip;
|
||||||
|
// recognize 'as' in ID mode and switch to ALIAS
|
||||||
|
ID_AS: 'as' -> type(AS), pushMode(ALIAS);
|
||||||
|
// inline config in ID mode
|
||||||
|
ID_CONFIG_START: '@{' -> type(CONFIG_START), pushMode(CONFIG_MODE);
|
||||||
|
// arrows first to ensure proper splitting before actor
|
||||||
|
ID_BIDIR_SOLID_ARROW: '<<->>' -> type(BIDIRECTIONAL_SOLID_ARROW);
|
||||||
|
ID_BIDIR_DOTTED_ARROW: '<<-->>' -> type(BIDIRECTIONAL_DOTTED_ARROW);
|
||||||
|
ID_SOLID_ARROW: '->>' -> type(SOLID_ARROW);
|
||||||
|
ID_DOTTED_ARROW: '-->>' -> type(DOTTED_ARROW);
|
||||||
|
ID_SOLID_OPEN_ARROW: '->' -> type(SOLID_OPEN_ARROW);
|
||||||
|
ID_DOTTED_OPEN_ARROW: '-->' -> type(DOTTED_OPEN_ARROW);
|
||||||
|
ID_SOLID_CROSS: '-x' -> type(SOLID_CROSS);
|
||||||
|
ID_DOTTED_CROSS: '--x' -> type(DOTTED_CROSS);
|
||||||
|
ID_SOLID_POINT: '-)' -> type(SOLID_POINT);
|
||||||
|
ID_DOTTED_POINT: '--)' -> type(DOTTED_POINT);
|
||||||
|
ID_ACTOR: IDCHAR_NO_HYPHEN+
|
||||||
|
(
|
||||||
|
'-' ALNUM_NOT_X_RPAREN+
|
||||||
|
| '--' ALNUM_NOT_X_RPAREN+
|
||||||
|
| '-' '-' '-' '-'* ALNUM+
|
||||||
|
)* -> type(ACTOR);
|
||||||
|
|
||||||
|
// ALIAS mode: after 'as', capture rest-of-line as TXT (alias display)
|
||||||
|
mode ALIAS;
|
||||||
|
ALIAS_NEWLINE: ('\r'? '\n')+ -> popMode, popMode, type(NEWLINE);
|
||||||
|
ALIAS_SEMI: ';' -> popMode, popMode, type(NEWLINE);
|
||||||
|
ALIAS_WS: [ \t]+ -> skip;
|
||||||
|
ALIAS_HASH_COMMENT: '#' ~[\r\n]* -> skip;
|
||||||
|
ALIAS_PERCENT_COMMENT: '%%' ~[\r\n]* -> skip;
|
||||||
|
// inline config allowed after alias as well
|
||||||
|
ALIAS_CONFIG_START: '@{' -> type(CONFIG_START), pushMode(CONFIG_MODE);
|
||||||
|
// Prefer capturing the remainder of the line as TXT for alias/description
|
||||||
|
ALIAS_TXT: (~[\r\n;#])+ -> type(TXT);
|
||||||
|
// arrows before actor pattern to split properly (kept for parity, though not used after AS)
|
||||||
|
ALIAS_BIDIR_SOLID_ARROW: '<<->>' -> type(BIDIRECTIONAL_SOLID_ARROW);
|
||||||
|
ALIAS_BIDIR_DOTTED_ARROW: '<<-->>' -> type(BIDIRECTIONAL_DOTTED_ARROW);
|
||||||
|
ALIAS_SOLID_ARROW: '->>' -> type(SOLID_ARROW);
|
||||||
|
ALIAS_DOTTED_ARROW: '-->>' -> type(DOTTED_ARROW);
|
||||||
|
ALIAS_SOLID_OPEN_ARROW: '->' -> type(SOLID_OPEN_ARROW);
|
||||||
|
ALIAS_DOTTED_OPEN_ARROW: '-->' -> type(DOTTED_OPEN_ARROW);
|
||||||
|
ALIAS_SOLID_CROSS: '-x' -> type(SOLID_CROSS);
|
||||||
|
ALIAS_DOTTED_CROSS: '--x' -> type(DOTTED_CROSS);
|
||||||
|
ALIAS_SOLID_POINT: '-)' -> type(SOLID_POINT);
|
||||||
|
ALIAS_DOTTED_POINT: '--)' -> type(DOTTED_POINT);
|
||||||
|
ALIAS_ACTOR: IDCHAR_NO_HYPHEN+
|
||||||
|
(
|
||||||
|
'-' ALNUM_NOT_X_RPAREN+
|
||||||
|
| '--' ALNUM_NOT_X_RPAREN+
|
||||||
|
| '-' '-' '-' '-'* ALNUM+
|
||||||
|
)* -> type(ACTOR);
|
||||||
|
|
||||||
|
// LINE mode: after 'title' (no colon), pop at newline
|
||||||
|
mode LINE;
|
||||||
|
LINE_NEWLINE: ('\r'? '\n')+ -> popMode, type(NEWLINE);
|
||||||
|
LINE_SEMI: ';' -> popMode, type(NEWLINE);
|
||||||
|
LINE_WS: [ \t]+ -> skip;
|
||||||
|
LINE_HASH_COMMENT: '#' ~[\r\n]* -> skip;
|
||||||
|
LINE_PERCENT_COMMENT: '%%' ~[\r\n]* -> skip;
|
||||||
|
// Prefer capturing the remainder of the line as a single TXT token
|
||||||
|
LINE_TXT: (~[\r\n;#])+ -> type(TXT);
|
||||||
|
// allow arrows; placed after TXT so it won't split titles
|
||||||
|
LINE_BIDIR_SOLID_ARROW: '<<->>' -> type(BIDIRECTIONAL_SOLID_ARROW);
|
||||||
|
LINE_BIDIR_DOTTED_ARROW: '<<-->>' -> type(BIDIRECTIONAL_DOTTED_ARROW);
|
||||||
|
LINE_SOLID_ARROW: '->>' -> type(SOLID_ARROW);
|
||||||
|
LINE_DOTTED_ARROW: '-->>' -> type(DOTTED_ARROW);
|
||||||
|
LINE_SOLID_OPEN_ARROW: '->' -> type(SOLID_OPEN_ARROW);
|
||||||
|
LINE_DOTTED_OPEN_ARROW: '-->' -> type(DOTTED_OPEN_ARROW);
|
||||||
|
LINE_SOLID_CROSS: '-x' -> type(SOLID_CROSS);
|
||||||
|
LINE_DOTTED_CROSS: '--x' -> type(DOTTED_CROSS);
|
||||||
|
LINE_SOLID_POINT: '-)' -> type(SOLID_POINT);
|
||||||
|
LINE_DOTTED_POINT: '--)' -> type(DOTTED_POINT);
|
||||||
|
// Keep ACTOR for parity if TXT is not applicable
|
||||||
|
LINE_ACTOR: IDCHAR_NO_HYPHEN+
|
||||||
|
(
|
||||||
|
'-' ALNUM_NOT_X_RPAREN+
|
||||||
|
| '--' ALNUM_NOT_X_RPAREN+
|
||||||
|
| '-' '-' '-' '-'* ALNUM+
|
||||||
|
)* -> type(ACTOR);
|
||||||
|
|
@@ -0,0 +1,150 @@
|
|||||||
|
parser grammar SequenceParser;
|
||||||
|
|
||||||
|
options {
|
||||||
|
tokenVocab = SequenceLexer;
|
||||||
|
}
|
||||||
|
|
||||||
|
start: (NEWLINE)* SD document EOF;
|
||||||
|
|
||||||
|
document: (line | loopBlock | rectBlock | boxBlock | optBlock | altBlock | parBlock | parOverBlock | breakBlock | criticalBlock)* statement?;
|
||||||
|
|
||||||
|
line: statement? NEWLINE;
|
||||||
|
|
||||||
|
statement
|
||||||
|
: participantStatement
|
||||||
|
| createStatement
|
||||||
|
| destroyStatement
|
||||||
|
| signalStatement
|
||||||
|
| noteStatement
|
||||||
|
| linksStatement
|
||||||
|
| linkStatement
|
||||||
|
| propertiesStatement
|
||||||
|
| detailsStatement
|
||||||
|
| activationStatement
|
||||||
|
| autonumberStatement
|
||||||
|
| titleStatement
|
||||||
|
| legacyTitleStatement
|
||||||
|
| accTitleStatement
|
||||||
|
| accDescrStatement
|
||||||
|
| accDescrMultilineStatement
|
||||||
|
;
|
||||||
|
|
||||||
|
createStatement
|
||||||
|
: CREATE (PARTICIPANT | PARTICIPANT_ACTOR) actor (AS restOfLine)?
|
||||||
|
;
|
||||||
|
|
||||||
|
destroyStatement
|
||||||
|
: DESTROY actor
|
||||||
|
;
|
||||||
|
|
||||||
|
participantStatement
|
||||||
|
: PARTICIPANT actorWithConfig
|
||||||
|
| (PARTICIPANT | PARTICIPANT_ACTOR) actor (AS restOfLine)?
|
||||||
|
;
|
||||||
|
|
||||||
|
actorWithConfig
|
||||||
|
: ACTOR configObject
|
||||||
|
;
|
||||||
|
|
||||||
|
configObject
|
||||||
|
: CONFIG_START CONFIG_CONTENT CONFIG_END
|
||||||
|
;
|
||||||
|
|
||||||
|
signalStatement
|
||||||
|
: actor signaltype (PLUS actor | MINUS actor | actor) text2
|
||||||
|
;
|
||||||
|
noteStatement
|
||||||
|
: NOTE RIGHT_OF actor text2
|
||||||
|
| NOTE LEFT_OF actor text2
|
||||||
|
| NOTE OVER actor (COMMA actor)? text2
|
||||||
|
;
|
||||||
|
|
||||||
|
linksStatement
|
||||||
|
: LINKS actor text2
|
||||||
|
;
|
||||||
|
|
||||||
|
linkStatement
|
||||||
|
: LINK actor text2
|
||||||
|
;
|
||||||
|
|
||||||
|
propertiesStatement
|
||||||
|
: PROPERTIES actor text2
|
||||||
|
;
|
||||||
|
|
||||||
|
detailsStatement
|
||||||
|
: DETAILS actor text2
|
||||||
|
;
|
||||||
|
|
||||||
|
autonumberStatement
|
||||||
|
: AUTONUMBER // enable default numbering
|
||||||
|
| AUTONUMBER OFF // disable numbering
|
||||||
|
| AUTONUMBER ACTOR // start value
|
||||||
|
| AUTONUMBER ACTOR ACTOR // start and step
|
||||||
|
;
|
||||||
|
|
||||||
|
activationStatement
|
||||||
|
: ACTIVATE actor
|
||||||
|
| DEACTIVATE actor
|
||||||
|
;
|
||||||
|
titleStatement
|
||||||
|
: TITLE
|
||||||
|
| TITLE restOfLine
|
||||||
|
| TITLE ACTOR+ // title without colon
|
||||||
|
;
|
||||||
|
accTitleStatement
|
||||||
|
: ACC_TITLE ACC_TITLE_VALUE
|
||||||
|
;
|
||||||
|
accDescrStatement
|
||||||
|
: ACC_DESCR ACC_DESCR_VALUE
|
||||||
|
;
|
||||||
|
accDescrMultilineStatement
|
||||||
|
: ACC_DESCR_MULTI ACC_DESCR_MULTILINE_VALUE ACC_DESCR_MULTILINE_END
|
||||||
|
;
|
||||||
|
legacyTitleStatement
|
||||||
|
: LEGACY_TITLE
|
||||||
|
;
|
||||||
|
|
||||||
|
// Blocks
|
||||||
|
loopBlock: LOOP restOfLine? document END;
|
||||||
|
rectBlock: RECT restOfLine? document END;
|
||||||
|
boxBlock: BOX restOfLine? document END;
|
||||||
|
optBlock: OPT restOfLine? document END;
|
||||||
|
altBlock: ALT restOfLine? altSections END;
|
||||||
|
parBlock: PAR restOfLine? parSections END;
|
||||||
|
parOverBlock: PAR_OVER restOfLine? parSections END;
|
||||||
|
breakBlock: BREAK restOfLine? document END;
|
||||||
|
criticalBlock: CRITICAL restOfLine? optionSections END;
|
||||||
|
|
||||||
|
altSections: document (elseSection)*;
|
||||||
|
elseSection: ELSE restOfLine? document;
|
||||||
|
|
||||||
|
parSections: document (andSection)*;
|
||||||
|
andSection: AND restOfLine? document;
|
||||||
|
|
||||||
|
optionSections: document (optionSection)*;
|
||||||
|
optionSection: OPTION restOfLine? document;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
actor: ACTOR;
|
||||||
|
|
||||||
|
signaltype
|
||||||
|
: SOLID_ARROW
|
||||||
|
| DOTTED_ARROW
|
||||||
|
| SOLID_OPEN_ARROW
|
||||||
|
| DOTTED_OPEN_ARROW
|
||||||
|
| SOLID_CROSS
|
||||||
|
| DOTTED_CROSS
|
||||||
|
| SOLID_POINT
|
||||||
|
| DOTTED_POINT
|
||||||
|
| BIDIRECTIONAL_SOLID_ARROW
|
||||||
|
| BIDIRECTIONAL_DOTTED_ARROW
|
||||||
|
;
|
||||||
|
|
||||||
|
restOfLine: TXT;
|
||||||
|
|
||||||
|
text2: TXT;
|
||||||
|
|
@@ -0,0 +1,738 @@
|
|||||||
|
/**
|
||||||
|
* ANTLR-based Sequence Diagram Parser (initial implementation)
|
||||||
|
*
|
||||||
|
* Mirrors the flowchart setup: provides an ANTLR entry compatible with the Jison interface.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { CharStream, CommonTokenStream, ParseTreeWalker, BailErrorStrategy } from 'antlr4ng';
|
||||||
|
import { SequenceLexer } from './generated/SequenceLexer.js';
|
||||||
|
import { SequenceParser } from './generated/SequenceParser.js';
|
||||||
|
|
||||||
|
class ANTLRSequenceParser {
|
||||||
|
yy: any = null;
|
||||||
|
|
||||||
|
private mapSignalType(op: string): number | undefined {
|
||||||
|
const LT = this.yy?.LINETYPE;
|
||||||
|
if (!LT) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
switch (op) {
|
||||||
|
case '->':
|
||||||
|
return LT.SOLID_OPEN;
|
||||||
|
case '-->':
|
||||||
|
return LT.DOTTED_OPEN;
|
||||||
|
case '->>':
|
||||||
|
return LT.SOLID;
|
||||||
|
case '-->>':
|
||||||
|
return LT.DOTTED;
|
||||||
|
case '<<->>':
|
||||||
|
return LT.BIDIRECTIONAL_SOLID;
|
||||||
|
case '<<-->>':
|
||||||
|
return LT.BIDIRECTIONAL_DOTTED;
|
||||||
|
case '-x':
|
||||||
|
return LT.SOLID_CROSS;
|
||||||
|
case '--x':
|
||||||
|
return LT.DOTTED_CROSS;
|
||||||
|
case '-)':
|
||||||
|
return LT.SOLID_POINT;
|
||||||
|
case '--)':
|
||||||
|
return LT.DOTTED_POINT;
|
||||||
|
default:
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parse(input: string): any {
|
||||||
|
if (!this.yy) {
|
||||||
|
throw new Error('Sequence ANTLR parser missing yy (database).');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset DB to match Jison behavior
|
||||||
|
this.yy.clear();
|
||||||
|
|
||||||
|
const inputStream = CharStream.fromString(input);
|
||||||
|
const lexer = new SequenceLexer(inputStream);
|
||||||
|
const tokenStream = new CommonTokenStream(lexer);
|
||||||
|
const parser = new SequenceParser(tokenStream);
|
||||||
|
|
||||||
|
// Fail-fast on any syntax error (matches Jison throwing behavior)
|
||||||
|
const anyParser = parser as unknown as {
|
||||||
|
getErrorHandler?: () => unknown;
|
||||||
|
setErrorHandler?: (h: unknown) => void;
|
||||||
|
errorHandler?: unknown;
|
||||||
|
};
|
||||||
|
const currentHandler = anyParser.getErrorHandler?.() ?? anyParser.errorHandler;
|
||||||
|
if (!currentHandler || (currentHandler as any)?.constructor?.name !== 'BailErrorStrategy') {
|
||||||
|
if (typeof anyParser.setErrorHandler === 'function') {
|
||||||
|
anyParser.setErrorHandler(new BailErrorStrategy());
|
||||||
|
} else {
|
||||||
|
(parser as any).errorHandler = new BailErrorStrategy();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const tree = parser.start();
|
||||||
|
|
||||||
|
const db = this.yy;
|
||||||
|
|
||||||
|
// Minimal listener for participants and simple messages
|
||||||
|
const listener: any = {
|
||||||
|
// Required hooks for ParseTreeWalker
|
||||||
|
visitTerminal(_node?: unknown) {
|
||||||
|
void _node;
|
||||||
|
},
|
||||||
|
visitErrorNode(_node?: unknown) {
|
||||||
|
void _node;
|
||||||
|
},
|
||||||
|
enterEveryRule(_ctx?: unknown) {
|
||||||
|
void _ctx;
|
||||||
|
},
|
||||||
|
exitEveryRule(_ctx?: unknown) {
|
||||||
|
void _ctx;
|
||||||
|
},
|
||||||
|
|
||||||
|
// loop block: add start on enter, end on exit to wrap inner content
|
||||||
|
enterLoopBlock(ctx: any) {
|
||||||
|
try {
|
||||||
|
const rest = ctx.restOfLine?.();
|
||||||
|
const raw = rest ? (rest.getText?.() as string | undefined) : undefined;
|
||||||
|
const msgText =
|
||||||
|
raw !== undefined ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.LOOP_START);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitLoopBlock() {
|
||||||
|
try {
|
||||||
|
db.addSignal(undefined, undefined, undefined, db.LINETYPE.LOOP_END);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
exitParticipantStatement(ctx: any) {
|
||||||
|
// Extended participant syntax: participant <ACTOR>@{...}
|
||||||
|
const awc = ctx.actorWithConfig?.();
|
||||||
|
if (awc) {
|
||||||
|
const awcCtx = Array.isArray(awc) ? awc[0] : awc;
|
||||||
|
const idTok = awcCtx?.ACTOR?.();
|
||||||
|
const id = (Array.isArray(idTok) ? idTok[0] : idTok)?.getText?.() as string | undefined;
|
||||||
|
if (!id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const cfgObj = awcCtx?.configObject?.();
|
||||||
|
const cfgCtx = Array.isArray(cfgObj) ? cfgObj[0] : cfgObj;
|
||||||
|
const cfgTok = cfgCtx?.CONFIG_CONTENT?.();
|
||||||
|
const metadata = (Array.isArray(cfgTok) ? cfgTok[0] : cfgTok)?.getText?.() as
|
||||||
|
| string
|
||||||
|
| undefined;
|
||||||
|
// Important: let errors from YAML parsing propagate for invalid configs
|
||||||
|
db.addActor(id, id, { text: id, type: 'participant' }, 'participant', metadata);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const hasActor = !!ctx.PARTICIPANT_ACTOR?.();
|
||||||
|
const draw = hasActor ? 'actor' : 'participant';
|
||||||
|
|
||||||
|
const id = ctx.actor?.(0)?.getText?.() as string | undefined;
|
||||||
|
if (!id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let display = id;
|
||||||
|
if (ctx.AS) {
|
||||||
|
let raw: string | undefined;
|
||||||
|
const rest = ctx.restOfLine?.();
|
||||||
|
raw = rest?.getText?.() as string | undefined;
|
||||||
|
if (raw === undefined && ctx.TXT) {
|
||||||
|
const t = ctx.TXT();
|
||||||
|
raw = Array.isArray(t)
|
||||||
|
? (t[0]?.getText?.() as string | undefined)
|
||||||
|
: (t?.getText?.() as string | undefined);
|
||||||
|
}
|
||||||
|
if (raw !== undefined) {
|
||||||
|
const trimmed = raw.startsWith(':') ? raw.slice(1) : raw;
|
||||||
|
const v = trimmed.trim();
|
||||||
|
if (v) {
|
||||||
|
display = v;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const desc = { text: display, type: draw };
|
||||||
|
db.addActor(id, id, desc, draw);
|
||||||
|
} catch (_e) {
|
||||||
|
// swallow to keep parity with Jison robustness
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
exitCreateStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const hasActor = !!ctx.PARTICIPANT_ACTOR?.();
|
||||||
|
const draw = hasActor ? 'actor' : 'participant';
|
||||||
|
const id = ctx.actor?.()?.getText?.() as string | undefined;
|
||||||
|
if (!id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let display = id;
|
||||||
|
if (ctx.AS) {
|
||||||
|
let raw: string | undefined;
|
||||||
|
const rest = ctx.restOfLine?.();
|
||||||
|
raw = rest?.getText?.() as string | undefined;
|
||||||
|
if (raw === undefined && ctx.TXT) {
|
||||||
|
const t = ctx.TXT();
|
||||||
|
raw = Array.isArray(t)
|
||||||
|
? (t[0]?.getText?.() as string | undefined)
|
||||||
|
: (t?.getText?.() as string | undefined);
|
||||||
|
}
|
||||||
|
if (raw !== undefined) {
|
||||||
|
const trimmed = raw.startsWith(':') ? raw.slice(1) : raw;
|
||||||
|
const v = trimmed.trim();
|
||||||
|
if (v) {
|
||||||
|
display = v;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
db.addActor(id, id, { text: display, type: draw }, draw);
|
||||||
|
const msgs = db.getMessages?.() ?? [];
|
||||||
|
db.getCreatedActors?.().set(id, msgs.length);
|
||||||
|
} catch (_e) {
|
||||||
|
// ignore to keep resilience
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
exitDestroyStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const id = ctx.actor?.()?.getText?.() as string | undefined;
|
||||||
|
if (!id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const msgs = db.getMessages?.() ?? [];
|
||||||
|
db.getDestroyedActors?.().set(id, msgs.length);
|
||||||
|
} catch (_e) {
|
||||||
|
// ignore to keep resilience
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// opt block
|
||||||
|
enterOptBlock(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.OPT_START);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitOptBlock() {
|
||||||
|
try {
|
||||||
|
db.addSignal(undefined, undefined, undefined, db.LINETYPE.OPT_END);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
// alt block
|
||||||
|
enterAltBlock(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.ALT_START);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitAltBlock() {
|
||||||
|
try {
|
||||||
|
db.addSignal(undefined, undefined, undefined, db.LINETYPE.ALT_END);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
enterElseSection(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.ALT_ELSE);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
// par and par_over blocks
|
||||||
|
enterParBlock(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.PAR_START);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
enterParOverBlock(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.PAR_OVER_START);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitParBlock() {
|
||||||
|
try {
|
||||||
|
db.addSignal(undefined, undefined, undefined, db.LINETYPE.PAR_END);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitParOverBlock() {
|
||||||
|
try {
|
||||||
|
db.addSignal(undefined, undefined, undefined, db.LINETYPE.PAR_END);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
enterAndSection(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.PAR_AND);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
// critical block
|
||||||
|
enterCriticalBlock(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.CRITICAL_START);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitCriticalBlock() {
|
||||||
|
try {
|
||||||
|
db.addSignal(undefined, undefined, undefined, db.LINETYPE.CRITICAL_END);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
enterOptionSection(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.CRITICAL_OPTION);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
// break block
|
||||||
|
enterBreakBlock(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.BREAK_START);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitBreakBlock() {
|
||||||
|
try {
|
||||||
|
db.addSignal(undefined, undefined, undefined, db.LINETYPE.BREAK_END);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
// rect block
|
||||||
|
enterRectBlock(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
const msgText = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : undefined;
|
||||||
|
const msg = msgText !== undefined ? db.parseMessage(msgText) : undefined;
|
||||||
|
db.addSignal(undefined, undefined, msg, db.LINETYPE.RECT_START);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitRectBlock() {
|
||||||
|
try {
|
||||||
|
db.addSignal(undefined, undefined, undefined, db.LINETYPE.RECT_END);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
// box block
|
||||||
|
enterBoxBlock(ctx: any) {
|
||||||
|
try {
|
||||||
|
const raw = ctx.restOfLine?.()?.getText?.() as string | undefined;
|
||||||
|
// raw may come from LINE_TXT (no leading colon) or TXT (leading colon)
|
||||||
|
const line = raw ? (raw.startsWith(':') ? raw.slice(1) : raw).trim() : '';
|
||||||
|
const data = db.parseBoxData(line);
|
||||||
|
db.addBox(data);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitBoxBlock() {
|
||||||
|
try {
|
||||||
|
// boxEnd is private in TS types; cast to any to call it here like Jison does via apply()
|
||||||
|
db.boxEnd();
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
exitSignalStatement(ctx: any) {
|
||||||
|
const a1Raw = ctx.actor(0)?.getText?.() as string | undefined;
|
||||||
|
const a2 = ctx.actor(1)?.getText?.();
|
||||||
|
const st = ctx.signaltype?.();
|
||||||
|
const stTextRaw = st ? st.getText() : '';
|
||||||
|
|
||||||
|
// Workaround for current lexer attaching '-' to the left actor (e.g., 'Alice-' + '>>')
|
||||||
|
let a1 = a1Raw ?? '';
|
||||||
|
let op = stTextRaw;
|
||||||
|
if (a1 && /-+$/.test(a1)) {
|
||||||
|
const m = /-+$/.exec(a1)![0];
|
||||||
|
a1 = a1.slice(0, -m.length);
|
||||||
|
op = m + op; // restore full operator, e.g., '-' + '>>' => '->>' or '--' + '>' => '-->'
|
||||||
|
}
|
||||||
|
|
||||||
|
const typ = listener._mapSignal(op);
|
||||||
|
if (typ === undefined) {
|
||||||
|
return; // Not a recognized operator; skip adding a signal
|
||||||
|
}
|
||||||
|
const t2 = ctx.text2?.();
|
||||||
|
const msgTok = t2 ? t2.getText() : undefined;
|
||||||
|
const msgText = msgTok?.startsWith(':') ? msgTok.slice(1) : undefined;
|
||||||
|
const msg = msgText ? db.parseMessage(msgText) : undefined;
|
||||||
|
|
||||||
|
// Ensure participants exist like Jison does
|
||||||
|
const actorsMap = db.getActors?.();
|
||||||
|
const ensure = (id?: string) => {
|
||||||
|
if (!id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!actorsMap?.has(id)) {
|
||||||
|
db.addActor(id, id, { text: id, type: 'participant' }, 'participant');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
ensure(a1);
|
||||||
|
ensure(a2);
|
||||||
|
|
||||||
|
const hasPlus = !!ctx.PLUS?.();
|
||||||
|
const hasMinus = !!ctx.MINUS?.();
|
||||||
|
|
||||||
|
// Main signal; pass 'activate' flag if there is a plus before the target actor
|
||||||
|
db.addSignal(a1, a2, msg, typ, hasPlus);
|
||||||
|
|
||||||
|
// One-line activation/deactivation side-effects
|
||||||
|
if (hasPlus && a2) {
|
||||||
|
db.addSignal(a2, undefined, undefined, db.LINETYPE.ACTIVE_START);
|
||||||
|
}
|
||||||
|
if (hasMinus && a1) {
|
||||||
|
db.addSignal(a1, undefined, undefined, db.LINETYPE.ACTIVE_END);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
exitNoteStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const t2 = ctx.text2?.();
|
||||||
|
const msgTok = t2 ? t2.getText() : undefined;
|
||||||
|
const msgText = msgTok?.startsWith(':') ? msgTok.slice(1) : undefined;
|
||||||
|
const text = msgText ? db.parseMessage(msgText) : { text: '' };
|
||||||
|
|
||||||
|
// Determine placement and actors
|
||||||
|
let placement = db.PLACEMENT.RIGHTOF;
|
||||||
|
|
||||||
|
// Collect all actor texts using index-based accessor to be robust across runtimes
|
||||||
|
const actorIds: string[] = [];
|
||||||
|
if (typeof ctx.actor === 'function') {
|
||||||
|
let i = 0;
|
||||||
|
// @ts-ignore - antlr4ng contexts allow indexed accessors
|
||||||
|
while (true) {
|
||||||
|
const node = ctx.actor(i);
|
||||||
|
if (!node || typeof node.getText !== 'function') {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
actorIds.push(node.getText());
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
// Fallback to single access when no indexed nodes are exposed
|
||||||
|
if (actorIds.length === 0) {
|
||||||
|
// @ts-ignore - antlr4ng exposes single-argument accessor in some builds
|
||||||
|
const single = ctx.actor();
|
||||||
|
const txt =
|
||||||
|
single && typeof single.getText === 'function' ? single.getText() : undefined;
|
||||||
|
if (txt) {
|
||||||
|
actorIds.push(txt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ctx.RIGHT_OF?.()) {
|
||||||
|
placement = db.PLACEMENT.RIGHTOF;
|
||||||
|
// keep first actor only
|
||||||
|
if (actorIds.length > 1) {
|
||||||
|
actorIds.splice(1);
|
||||||
|
}
|
||||||
|
} else if (ctx.LEFT_OF?.()) {
|
||||||
|
placement = db.PLACEMENT.LEFTOF;
|
||||||
|
if (actorIds.length > 1) {
|
||||||
|
actorIds.splice(1);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
placement = db.PLACEMENT.OVER;
|
||||||
|
// keep one or two actors as collected
|
||||||
|
if (actorIds.length > 2) {
|
||||||
|
actorIds.splice(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure actors exist
|
||||||
|
const actorsMap = db.getActors?.();
|
||||||
|
for (const id of actorIds) {
|
||||||
|
if (id && !actorsMap?.has(id)) {
|
||||||
|
db.addActor(id, id, { text: id, type: 'participant' }, 'participant');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const actorParam: any = actorIds.length > 1 ? actorIds : actorIds[0];
|
||||||
|
db.addNote(actorParam, placement, {
|
||||||
|
text: text.text,
|
||||||
|
wrap: text.wrap,
|
||||||
|
});
|
||||||
|
} catch (_e) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
},
|
||||||
|
exitLinksStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const a = ctx.actor?.()?.getText?.() as string | undefined;
|
||||||
|
const t2 = ctx.text2?.();
|
||||||
|
const msgTok = t2 ? t2.getText() : undefined;
|
||||||
|
const msgText = msgTok?.startsWith(':') ? msgTok.slice(1) : undefined;
|
||||||
|
const text = msgText ? db.parseMessage(msgText) : { text: '' };
|
||||||
|
if (!a) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const actorsMap = db.getActors?.();
|
||||||
|
if (!actorsMap?.has(a)) {
|
||||||
|
db.addActor(a, a, { text: a, type: 'participant' }, 'participant');
|
||||||
|
}
|
||||||
|
db.addLinks(a, text);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitLinkStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const a = ctx.actor?.()?.getText?.() as string | undefined;
|
||||||
|
const t2 = ctx.text2?.();
|
||||||
|
const msgTok = t2 ? t2.getText() : undefined;
|
||||||
|
const msgText = msgTok?.startsWith(':') ? msgTok.slice(1) : undefined;
|
||||||
|
const text = msgText ? db.parseMessage(msgText) : { text: '' };
|
||||||
|
if (!a) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const actorsMap = db.getActors?.();
|
||||||
|
if (!actorsMap?.has(a)) {
|
||||||
|
db.addActor(a, a, { text: a, type: 'participant' }, 'participant');
|
||||||
|
}
|
||||||
|
db.addALink(a, text);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitPropertiesStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const a = ctx.actor?.()?.getText?.() as string | undefined;
|
||||||
|
const t2 = ctx.text2?.();
|
||||||
|
const msgTok = t2 ? t2.getText() : undefined;
|
||||||
|
const msgText = msgTok?.startsWith(':') ? msgTok.slice(1) : undefined;
|
||||||
|
const text = msgText ? db.parseMessage(msgText) : { text: '' };
|
||||||
|
if (!a) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const actorsMap = db.getActors?.();
|
||||||
|
if (!actorsMap?.has(a)) {
|
||||||
|
db.addActor(a, a, { text: a, type: 'participant' }, 'participant');
|
||||||
|
}
|
||||||
|
db.addProperties(a, text);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitDetailsStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const a = ctx.actor?.()?.getText?.() as string | undefined;
|
||||||
|
const t2 = ctx.text2?.();
|
||||||
|
const msgTok = t2 ? t2.getText() : undefined;
|
||||||
|
const msgText = msgTok?.startsWith(':') ? msgTok.slice(1) : undefined;
|
||||||
|
const text = msgText ? db.parseMessage(msgText) : { text: '' };
|
||||||
|
if (!a) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const actorsMap = db.getActors?.();
|
||||||
|
if (!actorsMap?.has(a)) {
|
||||||
|
db.addActor(a, a, { text: a, type: 'participant' }, 'participant');
|
||||||
|
}
|
||||||
|
db.addDetails(a, text);
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitActivationStatement(ctx: any) {
|
||||||
|
const a = ctx.actor?.()?.getText?.();
|
||||||
|
if (!a) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const actorsMap = db.getActors?.();
|
||||||
|
if (!actorsMap?.has(a)) {
|
||||||
|
db.addActor(a, a, { text: a, type: 'participant' }, 'participant');
|
||||||
|
}
|
||||||
|
const typ = ctx.ACTIVATE?.() ? db.LINETYPE.ACTIVE_START : db.LINETYPE.ACTIVE_END;
|
||||||
|
db.addSignal(a, a, { text: '', wrap: false }, typ);
|
||||||
|
},
|
||||||
|
exitAutonumberStatement(ctx: any) {
|
||||||
|
// Parse variants: autonumber | autonumber off | autonumber <start> | autonumber <start> <step>
|
||||||
|
const isOff = !!(ctx.OFF && typeof ctx.OFF === 'function' && ctx.OFF());
|
||||||
|
const tokens = ctx.ACTOR && typeof ctx.ACTOR === 'function' ? ctx.ACTOR() : undefined;
|
||||||
|
const parts: string[] = Array.isArray(tokens)
|
||||||
|
? tokens
|
||||||
|
.map((t: any) => (typeof t.getText === 'function' ? t.getText() : undefined))
|
||||||
|
.filter(Boolean)
|
||||||
|
: tokens && typeof tokens.getText === 'function'
|
||||||
|
? [tokens.getText()]
|
||||||
|
: [];
|
||||||
|
|
||||||
|
let start: number | undefined;
|
||||||
|
let step: number | undefined;
|
||||||
|
if (parts.length >= 1) {
|
||||||
|
const v = Number.parseInt(parts[0], 10);
|
||||||
|
if (!Number.isNaN(v)) {
|
||||||
|
start = v;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (parts.length >= 2) {
|
||||||
|
const v = Number.parseInt(parts[1], 10);
|
||||||
|
if (!Number.isNaN(v)) {
|
||||||
|
step = v;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const visible = !isOff;
|
||||||
|
if (visible) {
|
||||||
|
db.enableSequenceNumbers();
|
||||||
|
} else {
|
||||||
|
db.disableSequenceNumbers();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match Jison behavior: if only start is provided, default step to 1
|
||||||
|
const payload = {
|
||||||
|
type: 'sequenceIndex' as const,
|
||||||
|
sequenceIndex: start,
|
||||||
|
sequenceIndexStep: step ?? (start !== undefined ? 1 : undefined),
|
||||||
|
sequenceVisible: visible,
|
||||||
|
signalType: db.LINETYPE.AUTONUMBER,
|
||||||
|
};
|
||||||
|
|
||||||
|
db.apply(payload);
|
||||||
|
},
|
||||||
|
exitTitleStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
let titleText: string | undefined;
|
||||||
|
|
||||||
|
// Case 1: If TITLE token carried inline text (legacy path), use it; otherwise fall through
|
||||||
|
if (ctx.TITLE) {
|
||||||
|
const tok = ctx.TITLE()?.getText?.() as string | undefined;
|
||||||
|
if (tok && tok.length > 'title'.length) {
|
||||||
|
const after = tok.slice('title'.length).trim();
|
||||||
|
if (after) {
|
||||||
|
titleText = after;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Case 2: "title:" used restOfLine (TXT) token
|
||||||
|
if (titleText === undefined) {
|
||||||
|
const rest = ctx.restOfLine?.().getText?.() as string | undefined;
|
||||||
|
if (rest !== undefined) {
|
||||||
|
const raw = rest.startsWith(':') ? rest.slice(1) : rest;
|
||||||
|
titleText = raw.trim();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Case 3: title without colon tokenized as ACTOR(s)
|
||||||
|
if (titleText === undefined) {
|
||||||
|
if (ctx.actor) {
|
||||||
|
const nodes = ctx.actor();
|
||||||
|
const parts = Array.isArray(nodes)
|
||||||
|
? nodes.map((a: any) => a.getText())
|
||||||
|
: [nodes?.getText?.()].filter(Boolean);
|
||||||
|
titleText = parts.join(' ');
|
||||||
|
} else if (ctx.ACTOR) {
|
||||||
|
const tokens = ctx.ACTOR();
|
||||||
|
const parts = Array.isArray(tokens)
|
||||||
|
? tokens.map((t: any) => t.getText())
|
||||||
|
: [tokens?.getText?.()].filter(Boolean);
|
||||||
|
titleText = parts.join(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!titleText) {
|
||||||
|
const parts = (ctx.children ?? [])
|
||||||
|
.map((c: any) =>
|
||||||
|
c?.symbol?.type === SequenceLexer.ACTOR ? c.getText?.() : undefined
|
||||||
|
)
|
||||||
|
.filter(Boolean) as string[];
|
||||||
|
if (parts.length) {
|
||||||
|
titleText = parts.join(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (titleText) {
|
||||||
|
db.setDiagramTitle?.(titleText);
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitLegacyTitleStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const tok = ctx.LEGACY_TITLE?.().getText?.() as string | undefined;
|
||||||
|
if (!tok) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const idx = tok.indexOf(':');
|
||||||
|
const titleText = (idx >= 0 ? tok.slice(idx + 1) : tok).trim();
|
||||||
|
if (titleText) {
|
||||||
|
db.setDiagramTitle?.(titleText);
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitAccTitleStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const v = ctx.ACC_TITLE_VALUE?.().getText?.() as string | undefined;
|
||||||
|
if (v !== undefined) {
|
||||||
|
const val = v.trim();
|
||||||
|
if (val) {
|
||||||
|
db.setAccTitle?.(val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitAccDescrStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const v = ctx.ACC_DESCR_VALUE?.().getText?.() as string | undefined;
|
||||||
|
if (v !== undefined) {
|
||||||
|
const val = v.trim();
|
||||||
|
if (val) {
|
||||||
|
db.setAccDescription?.(val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
exitAccDescrMultilineStatement(ctx: any) {
|
||||||
|
try {
|
||||||
|
const v = ctx.ACC_DESCR_MULTILINE_VALUE?.().getText?.() as string | undefined;
|
||||||
|
if (v !== undefined) {
|
||||||
|
const val = v.trim();
|
||||||
|
if (val) {
|
||||||
|
db.setAccDescription?.(val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
_mapSignal: (op: string) => this.mapSignalType(op),
|
||||||
|
};
|
||||||
|
|
||||||
|
ParseTreeWalker.DEFAULT.walk(listener, tree);
|
||||||
|
return tree;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export in the format expected by the existing code
|
||||||
|
const parser = new ANTLRSequenceParser();
|
||||||
|
|
||||||
|
const exportedParser = {
|
||||||
|
parse: (input: string) => parser.parse(input),
|
||||||
|
parser: parser,
|
||||||
|
yy: null as any,
|
||||||
|
};
|
||||||
|
|
||||||
|
Object.defineProperty(exportedParser, 'yy', {
|
||||||
|
get() {
|
||||||
|
return parser.yy;
|
||||||
|
},
|
||||||
|
set(value) {
|
||||||
|
parser.yy = value;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export default exportedParser;
|
@@ -0,0 +1,234 @@
|
|||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
import type { Token } from 'antlr4ng';
|
||||||
|
import { CharStream } from 'antlr4ng';
|
||||||
|
import { SequenceLexer } from './generated/SequenceLexer.js';
|
||||||
|
|
||||||
|
function lex(input: string): Token[] {
|
||||||
|
const inputStream = CharStream.fromString(input);
|
||||||
|
const lexer = new SequenceLexer(inputStream);
|
||||||
|
return lexer.getAllTokens();
|
||||||
|
}
|
||||||
|
|
||||||
|
function names(tokens: Token[]): string[] {
|
||||||
|
const vocab =
|
||||||
|
(SequenceLexer as any).VOCABULARY ?? new SequenceLexer(CharStream.fromString('')).vocabulary;
|
||||||
|
return tokens.map((t) => vocab.getSymbolicName(t.type) ?? String(t.type));
|
||||||
|
}
|
||||||
|
|
||||||
|
function texts(tokens: Token[]): string[] {
|
||||||
|
return tokens.map((t) => t.text ?? '');
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Sequence ANTLR Lexer - token coverage (expanded for actor/alias)', () => {
|
||||||
|
const singleTokenCases: { input: string; first: string; label?: string }[] = [
|
||||||
|
{ input: 'sequenceDiagram', first: 'SD' },
|
||||||
|
{ input: ';', first: 'NEWLINE' },
|
||||||
|
{ input: ',', first: 'COMMA' },
|
||||||
|
{ input: 'autonumber', first: 'AUTONUMBER' },
|
||||||
|
{ input: 'off', first: 'OFF' },
|
||||||
|
{ input: 'participant', first: 'PARTICIPANT' },
|
||||||
|
{ input: 'actor', first: 'PARTICIPANT_ACTOR' },
|
||||||
|
{ input: 'create', first: 'CREATE' },
|
||||||
|
{ input: 'destroy', first: 'DESTROY' },
|
||||||
|
{ input: 'box', first: 'BOX' },
|
||||||
|
{ input: 'loop', first: 'LOOP' },
|
||||||
|
{ input: 'rect', first: 'RECT' },
|
||||||
|
{ input: 'opt', first: 'OPT' },
|
||||||
|
{ input: 'alt', first: 'ALT' },
|
||||||
|
{ input: 'else', first: 'ELSE' },
|
||||||
|
{ input: 'par', first: 'PAR' },
|
||||||
|
{ input: 'par_over', first: 'PAR_OVER' },
|
||||||
|
{ input: 'and', first: 'AND' },
|
||||||
|
{ input: 'critical', first: 'CRITICAL' },
|
||||||
|
{ input: 'option', first: 'OPTION' },
|
||||||
|
{ input: 'break', first: 'BREAK' },
|
||||||
|
{ input: 'end', first: 'END' },
|
||||||
|
{ input: 'links', first: 'LINKS' },
|
||||||
|
{ input: 'link', first: 'LINK' },
|
||||||
|
{ input: 'properties', first: 'PROPERTIES' },
|
||||||
|
{ input: 'details', first: 'DETAILS' },
|
||||||
|
{ input: 'over', first: 'OVER' },
|
||||||
|
{ input: 'Note', first: 'NOTE' },
|
||||||
|
{ input: 'activate', first: 'ACTIVATE' },
|
||||||
|
{ input: 'deactivate', first: 'DEACTIVATE' },
|
||||||
|
{ input: 'title', first: 'TITLE' },
|
||||||
|
{ input: '->>', first: 'SOLID_ARROW' },
|
||||||
|
{ input: '<<->>', first: 'BIDIRECTIONAL_SOLID_ARROW' },
|
||||||
|
{ input: '-->>', first: 'DOTTED_ARROW' },
|
||||||
|
{ input: '<<-->>', first: 'BIDIRECTIONAL_DOTTED_ARROW' },
|
||||||
|
{ input: '->', first: 'SOLID_OPEN_ARROW' },
|
||||||
|
{ input: '-->', first: 'DOTTED_OPEN_ARROW' },
|
||||||
|
{ input: '-x', first: 'SOLID_CROSS' },
|
||||||
|
{ input: '--x', first: 'DOTTED_CROSS' },
|
||||||
|
{ input: '-)', first: 'SOLID_POINT' },
|
||||||
|
{ input: '--)', first: 'DOTTED_POINT' },
|
||||||
|
{ input: ':text', first: 'TXT' },
|
||||||
|
{ input: '+', first: 'PLUS' },
|
||||||
|
{ input: '-', first: 'MINUS' },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const tc of singleTokenCases) {
|
||||||
|
it(`lexes ${tc.label ?? tc.input} -> ${tc.first}`, () => {
|
||||||
|
const ts = lex(tc.input);
|
||||||
|
const ns = names(ts);
|
||||||
|
expect(ns[0]).toBe(tc.first);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
it('lexes LEFT_OF / RIGHT_OF with space', () => {
|
||||||
|
expect(names(lex('left of'))[0]).toBe('LEFT_OF');
|
||||||
|
expect(names(lex('right of'))[0]).toBe('RIGHT_OF');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('lexes LEGACY_TITLE as a single token', () => {
|
||||||
|
const ts = lex('title: Diagram Title');
|
||||||
|
const ns = names(ts);
|
||||||
|
expect(ns[0]).toBe('LEGACY_TITLE');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('lexes accTitle/accDescr single-line values using modes', () => {
|
||||||
|
const t1 = names(lex('accTitle: This is the title'));
|
||||||
|
expect(t1[0]).toBe('ACC_TITLE');
|
||||||
|
expect(t1[1]).toBe('ACC_TITLE_VALUE');
|
||||||
|
|
||||||
|
const t2 = names(lex('accDescr: Accessibility Description'));
|
||||||
|
expect(t2[0]).toBe('ACC_DESCR');
|
||||||
|
expect(t2[1]).toBe('ACC_DESCR_VALUE');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('lexes accDescr multiline block', () => {
|
||||||
|
const ns = names(lex('accDescr {\nHello\n}'));
|
||||||
|
expect(ns[0]).toBe('ACC_DESCR_MULTI');
|
||||||
|
expect(ns).toContain('ACC_DESCR_MULTILINE_VALUE');
|
||||||
|
expect(ns).toContain('ACC_DESCR_MULTILINE_END');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('lexes config block @{ ... }', () => {
|
||||||
|
const ns = names(lex('@{ shape: rounded }'));
|
||||||
|
expect(ns[0]).toBe('CONFIG_START');
|
||||||
|
expect(ns).toContain('CONFIG_CONTENT');
|
||||||
|
expect(ns[ns.length - 1]).toBe('CONFIG_END');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ACTOR / ALIAS edge cases, mirroring Jison patterns
|
||||||
|
it('participant A', () => {
|
||||||
|
const ns = names(lex('participant A'));
|
||||||
|
expect(ns).toEqual(['PARTICIPANT', 'ACTOR']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('participant Alice as A', () => {
|
||||||
|
const ns = names(lex('participant Alice as A'));
|
||||||
|
expect(ns[0]).toBe('PARTICIPANT');
|
||||||
|
expect(ns[1]).toBe('ACTOR');
|
||||||
|
expect(ns[2]).toBe('AS');
|
||||||
|
expect(['ACTOR', 'TXT']).toContain(ns[3]);
|
||||||
|
const ts = texts(lex('participant Alice as A'));
|
||||||
|
expect(ts[1]).toBe('Alice');
|
||||||
|
// The alias part may be tokenized as ACTOR or TXT depending on mode precedence; trim for TXT variant
|
||||||
|
expect(['A']).toContain(ts[3]?.trim?.());
|
||||||
|
});
|
||||||
|
|
||||||
|
it('participant with same-line spaces are skipped in ID mode', () => {
|
||||||
|
const ts = lex('participant Alice');
|
||||||
|
expect(names(ts)).toEqual(['PARTICIPANT', 'ACTOR']);
|
||||||
|
expect(texts(ts)[1]).toBe('Alice');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('participant ID mode: hash comment skipped on same line', () => {
|
||||||
|
const ns = names(lex('participant Alice # comment here'));
|
||||||
|
expect(ns).toEqual(['PARTICIPANT', 'ACTOR']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('participant ID mode: percent comment skipped on same line', () => {
|
||||||
|
const ns = names(lex('participant Alice %% comment here'));
|
||||||
|
expect(ns).toEqual(['PARTICIPANT', 'ACTOR']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('alias ALIAS mode: spaces skipped and comments ignored', () => {
|
||||||
|
const ns = names(lex('participant Alice as A # c'));
|
||||||
|
expect(ns[0]).toBe('PARTICIPANT');
|
||||||
|
expect(ns[1]).toBe('ACTOR');
|
||||||
|
expect(ns[2]).toBe('AS');
|
||||||
|
expect(['ACTOR', 'TXT']).toContain(ns[3]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('title LINE mode: spaces skipped and words tokenized as ACTORs', () => {
|
||||||
|
const ns = names(lex('title My Diagram'));
|
||||||
|
expect(ns).toEqual(['TITLE', 'TXT']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('title LINE mode: percent comment ignored on same line', () => {
|
||||||
|
const ns = names(lex('title Diagram %% hidden'));
|
||||||
|
expect(ns).toEqual(['TITLE', 'TXT']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ID mode pops to default on newline', () => {
|
||||||
|
const ns = names(lex('participant Alice\nactor Bob'));
|
||||||
|
expect(ns[0]).toBe('PARTICIPANT');
|
||||||
|
expect(ns[1]).toBe('ACTOR');
|
||||||
|
expect(ns[2]).toBe('NEWLINE');
|
||||||
|
expect(ns[3]).toBe('PARTICIPANT_ACTOR');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('actor foo-bar (hyphens allowed)', () => {
|
||||||
|
const ts = lex('actor foo-bar');
|
||||||
|
expect(names(ts)).toEqual(['PARTICIPANT_ACTOR', 'ACTOR']);
|
||||||
|
expect(texts(ts)[1]).toBe('foo-bar');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('actor foo--bar (multiple hyphens)', () => {
|
||||||
|
const ts = lex('actor foo--bar');
|
||||||
|
expect(names(ts)).toEqual(['PARTICIPANT_ACTOR', 'ACTOR']);
|
||||||
|
expect(texts(ts)[1]).toBe('foo--bar');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('actor a-x should split into ACTOR and SOLID_CROSS (per Jison exclusion)', () => {
|
||||||
|
const ns = names(lex('actor a-x'));
|
||||||
|
expect(ns[0]).toBe('PARTICIPANT_ACTOR');
|
||||||
|
// Depending on spacing, ACTOR may be 'a' and '-x' is SOLID_CROSS
|
||||||
|
expect(ns.slice(1)).toEqual(['ACTOR', 'SOLID_CROSS']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('actor a--) should split into ACTOR and DOTTED_POINT', () => {
|
||||||
|
const ns = names(lex('actor a--)'));
|
||||||
|
expect(ns[0]).toBe('PARTICIPANT_ACTOR');
|
||||||
|
expect(ns.slice(1)).toEqual(['ACTOR', 'DOTTED_POINT']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('actor a--x should split into ACTOR and DOTTED_CROSS', () => {
|
||||||
|
const ns = names(lex('actor a--x'));
|
||||||
|
expect(ns[0]).toBe('PARTICIPANT_ACTOR');
|
||||||
|
expect(ns.slice(1)).toEqual(['ACTOR', 'DOTTED_CROSS']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('participant with inline config: participant Alice @{shape:rounded}', () => {
|
||||||
|
const ns = names(lex('participant Alice @{shape: rounded}'));
|
||||||
|
expect(ns[0]).toBe('PARTICIPANT');
|
||||||
|
expect(ns[1]).toBe('ACTOR');
|
||||||
|
expect(ns[2]).toBe('CONFIG_START');
|
||||||
|
expect(ns).toContain('CONFIG_CONTENT');
|
||||||
|
expect(ns[ns.length - 1]).toBe('CONFIG_END');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('autonumber with numbers', () => {
|
||||||
|
const ns = names(lex('autonumber 12 3'));
|
||||||
|
expect(ns[0]).toBe('AUTONUMBER');
|
||||||
|
// Our lexer returns NUM greedily regardless of trailing space/newline context; acceptable for parity tests
|
||||||
|
expect(ns).toContain('NUM');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('participant alias across lines: A as Alice then B as Bob', () => {
|
||||||
|
const input = 'participant A as Alice\nparticipant B as Bob';
|
||||||
|
const ns = names(lex(input));
|
||||||
|
// Expect: PARTICIPANT ACTOR AS (TXT|ACTOR) NEWLINE PARTICIPANT ACTOR AS (TXT|ACTOR)
|
||||||
|
expect(ns[0]).toBe('PARTICIPANT');
|
||||||
|
expect(ns[1]).toBe('ACTOR');
|
||||||
|
expect(ns[2]).toBe('AS');
|
||||||
|
expect(['TXT', 'ACTOR']).toContain(ns[3]);
|
||||||
|
expect(ns[4]).toBe('NEWLINE');
|
||||||
|
expect(ns[5]).toBe('PARTICIPANT');
|
||||||
|
expect(ns[6]).toBe('ACTOR');
|
||||||
|
expect(ns[7]).toBe('AS');
|
||||||
|
expect(['TXT', 'ACTOR']).toContain(ns[8]);
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,40 @@
|
|||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
import type { Token } from 'antlr4ng';
|
||||||
|
import { CharStream } from 'antlr4ng';
|
||||||
|
import { SequenceLexer } from './generated/SequenceLexer.js';
|
||||||
|
|
||||||
|
function lex(input: string): Token[] {
|
||||||
|
const inputStream = CharStream.fromString(input);
|
||||||
|
const lexer = new SequenceLexer(inputStream);
|
||||||
|
const tokens: Token[] = lexer.getAllTokens();
|
||||||
|
return tokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
function tokenNames(tokens: Token[], vocabSource?: SequenceLexer): string[] {
|
||||||
|
// Map type numbers to symbolic names using the lexer's vocabulary
|
||||||
|
const vocab =
|
||||||
|
(SequenceLexer as any).VOCABULARY ??
|
||||||
|
(vocabSource ?? new SequenceLexer(CharStream.fromString(''))).vocabulary;
|
||||||
|
return tokens.map((t) => vocab.getSymbolicName(t.type) ?? String(t.type));
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Sequence ANTLR Lexer', () => {
|
||||||
|
it('lexes title without colon into TITLE followed by ACTOR tokens', () => {
|
||||||
|
const input = `sequenceDiagram\n` + `title Diagram Title\n` + `Alice->Bob:Hello`;
|
||||||
|
|
||||||
|
const tokens = lex(input);
|
||||||
|
const names = tokenNames(tokens);
|
||||||
|
|
||||||
|
// Expect the start: SD NEWLINE TITLE ACTOR ACTOR NEWLINE
|
||||||
|
expect(names.slice(0, 6)).toEqual(['SD', 'NEWLINE', 'TITLE', 'ACTOR', 'ACTOR', 'NEWLINE']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('lexes activate statement', () => {
|
||||||
|
const input = `sequenceDiagram\nactivate Alice\n`;
|
||||||
|
const tokens = lex(input);
|
||||||
|
const names = tokenNames(tokens);
|
||||||
|
|
||||||
|
// Expect: SD NEWLINE ACTIVATE ACTOR NEWLINE
|
||||||
|
expect(names).toEqual(['SD', 'NEWLINE', 'ACTIVATE', 'ACTOR', 'NEWLINE']);
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,23 @@
|
|||||||
|
// @ts-ignore: JISON doesn't support types
|
||||||
|
import jisonParser from './sequenceDiagram.jison';
|
||||||
|
|
||||||
|
// Import the ANTLR parser wrapper (safe stub for now)
|
||||||
|
import antlrParser from './antlr/antlr-parser.js';
|
||||||
|
|
||||||
|
// Configuration flag to switch between parsers (same convention as flowcharts)
|
||||||
|
const USE_ANTLR_PARSER = process.env.USE_ANTLR_PARSER === 'true';
|
||||||
|
|
||||||
|
const newParser: any = Object.assign({}, USE_ANTLR_PARSER ? antlrParser : jisonParser);
|
||||||
|
|
||||||
|
newParser.parse = (src: string): unknown => {
|
||||||
|
// Normalize whitespace like flow does to keep parity with Jison behavior
|
||||||
|
const newSrc = src.replace(/}\s*\n/g, '}\n');
|
||||||
|
|
||||||
|
if (USE_ANTLR_PARSER) {
|
||||||
|
return antlrParser.parse(newSrc);
|
||||||
|
} else {
|
||||||
|
return jisonParser.parse(newSrc);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default newParser;
|
@@ -225,6 +225,65 @@ Bob-->Alice: I am good thanks!`;
|
|||||||
expect(diagram.db.showSequenceNumbers()).toBe(true);
|
expect(diagram.db.showSequenceNumbers()).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should support autonumber with start value', async () => {
|
||||||
|
const str = `
|
||||||
|
sequenceDiagram
|
||||||
|
autonumber 10
|
||||||
|
Alice->Bob: Hello
|
||||||
|
Bob-->Alice: Hi
|
||||||
|
`;
|
||||||
|
const diagram = await Diagram.fromText(str);
|
||||||
|
|
||||||
|
// Verify AUTONUMBER control message
|
||||||
|
const autoMsg = diagram.db.getMessages().find((m) => m.type === diagram.db.LINETYPE.AUTONUMBER);
|
||||||
|
expect(autoMsg).toBeTruthy();
|
||||||
|
expect(autoMsg.message.start).toBe(10);
|
||||||
|
expect(autoMsg.message.step).toBe(1);
|
||||||
|
expect(autoMsg.message.visible).toBe(true);
|
||||||
|
|
||||||
|
// After render, sequence numbers should be enabled
|
||||||
|
await diagram.renderer.draw(str, 'tst', '1.2.3', diagram);
|
||||||
|
expect(diagram.db.showSequenceNumbers()).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support autonumber with start and step values', async () => {
|
||||||
|
const str = `
|
||||||
|
sequenceDiagram
|
||||||
|
autonumber 5 2
|
||||||
|
Alice->Bob: Hello
|
||||||
|
Bob-->Alice: Hi
|
||||||
|
`;
|
||||||
|
const diagram = await Diagram.fromText(str);
|
||||||
|
|
||||||
|
const autoMsg = diagram.db.getMessages().find((m) => m.type === diagram.db.LINETYPE.AUTONUMBER);
|
||||||
|
expect(autoMsg).toBeTruthy();
|
||||||
|
expect(autoMsg.message.start).toBe(5);
|
||||||
|
expect(autoMsg.message.step).toBe(2);
|
||||||
|
expect(autoMsg.message.visible).toBe(true);
|
||||||
|
|
||||||
|
await diagram.renderer.draw(str, 'tst', '1.2.3', diagram);
|
||||||
|
expect(diagram.db.showSequenceNumbers()).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support turning autonumber off', async () => {
|
||||||
|
const str = `
|
||||||
|
sequenceDiagram
|
||||||
|
autonumber off
|
||||||
|
Alice->Bob: Hello
|
||||||
|
Bob-->Alice: Hi
|
||||||
|
`;
|
||||||
|
const diagram = await Diagram.fromText(str);
|
||||||
|
|
||||||
|
const autoMsg = diagram.db.getMessages().find((m) => m.type === diagram.db.LINETYPE.AUTONUMBER);
|
||||||
|
expect(autoMsg).toBeTruthy();
|
||||||
|
expect(autoMsg.message.start).toBeUndefined();
|
||||||
|
expect(autoMsg.message.step).toBeUndefined();
|
||||||
|
expect(autoMsg.message.visible).toBe(false);
|
||||||
|
|
||||||
|
await diagram.renderer.draw(str, 'tst', '1.2.3', diagram);
|
||||||
|
expect(diagram.db.showSequenceNumbers()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
it('should handle a sequenceDiagram definition with a title:', async () => {
|
it('should handle a sequenceDiagram definition with a title:', async () => {
|
||||||
const diagram = await Diagram.fromText(`
|
const diagram = await Diagram.fromText(`
|
||||||
sequenceDiagram
|
sequenceDiagram
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
import type { DiagramDefinition } from '../../diagram-api/types.js';
|
import type { DiagramDefinition } from '../../diagram-api/types.js';
|
||||||
// @ts-ignore: JISON doesn't support types
|
// @ts-ignore: JISON doesn't support types
|
||||||
import parser from './parser/sequenceDiagram.jison';
|
// import parser from './parser/sequenceDiagram.jison';
|
||||||
|
import parser from './parser/sequenceParser.ts';
|
||||||
import { SequenceDB } from './sequenceDb.js';
|
import { SequenceDB } from './sequenceDb.js';
|
||||||
import styles from './styles.js';
|
import styles from './styles.js';
|
||||||
import { setConfig } from '../../diagram-api/diagramAPI.js';
|
import { setConfig } from '../../diagram-api/diagramAPI.js';
|
||||||
|
@@ -1,13 +1,7 @@
|
|||||||
import mermaid, { type MermaidConfig } from 'mermaid';
|
import mermaid, { type MermaidConfig } from 'mermaid';
|
||||||
import zenuml from '../../../../../mermaid-zenuml/dist/mermaid-zenuml.core.mjs';
|
import zenuml from '../../../../../mermaid-zenuml/dist/mermaid-zenuml.core.mjs';
|
||||||
import tidyTreeLayout from '../../../../../mermaid-layout-tidy-tree/dist/mermaid-layout-tidy-tree.core.mjs';
|
|
||||||
import layouts from '../../../../../mermaid-layout-elk/dist/mermaid-layout-elk.core.mjs';
|
|
||||||
|
|
||||||
const init = Promise.all([
|
const init = mermaid.registerExternalDiagrams([zenuml]);
|
||||||
mermaid.registerExternalDiagrams([zenuml]),
|
|
||||||
mermaid.registerLayoutLoaders(layouts),
|
|
||||||
mermaid.registerLayoutLoaders(tidyTreeLayout),
|
|
||||||
]);
|
|
||||||
mermaid.registerIconPacks([
|
mermaid.registerIconPacks([
|
||||||
{
|
{
|
||||||
name: 'logos',
|
name: 'logos',
|
||||||
|
@@ -33,7 +33,7 @@
|
|||||||
"pathe": "^2.0.3",
|
"pathe": "^2.0.3",
|
||||||
"unocss": "^66.4.2",
|
"unocss": "^66.4.2",
|
||||||
"unplugin-vue-components": "^28.4.0",
|
"unplugin-vue-components": "^28.4.0",
|
||||||
"vite": "^7.0.0",
|
"vite": "^6.1.1",
|
||||||
"vite-plugin-pwa": "^1.0.0",
|
"vite-plugin-pwa": "^1.0.0",
|
||||||
"vitepress": "1.6.3",
|
"vitepress": "1.6.3",
|
||||||
"workbox-window": "^7.3.0"
|
"workbox-window": "^7.3.0"
|
||||||
|
@@ -20,5 +20,3 @@ Each user journey is split into sections, these describe the part of the task
|
|||||||
the user is trying to complete.
|
the user is trying to complete.
|
||||||
|
|
||||||
Tasks syntax is `Task name: <score>: <comma separated list of actors>`
|
Tasks syntax is `Task name: <score>: <comma separated list of actors>`
|
||||||
|
|
||||||
Score is a number between 1 and 5, inclusive.
|
|
||||||
|
@@ -13,10 +13,6 @@ const virtualModuleId = 'virtual:mermaid-config';
|
|||||||
const resolvedVirtualModuleId = '\0' + virtualModuleId;
|
const resolvedVirtualModuleId = '\0' + virtualModuleId;
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
build: {
|
|
||||||
// Vite v7 changes the default target and drops old browser support
|
|
||||||
target: 'modules',
|
|
||||||
},
|
|
||||||
optimizeDeps: {
|
optimizeDeps: {
|
||||||
// vitepress is aliased with replacement `join(DIST_CLIENT_PATH, '/index')`
|
// vitepress is aliased with replacement `join(DIST_CLIENT_PATH, '/index')`
|
||||||
// This needs to be excluded from optimization
|
// This needs to be excluded from optimization
|
||||||
|
2278
pnpm-lock.yaml
generated
2278
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -11,6 +11,7 @@ pushd packages/mermaid
|
|||||||
# Append commit hash to version
|
# Append commit hash to version
|
||||||
jq ".version = .version + \"+${COMMIT_REF:0:7}\"" package.json > package.tmp.json
|
jq ".version = .version + \"+${COMMIT_REF:0:7}\"" package.json > package.tmp.json
|
||||||
mv package.tmp.json package.json
|
mv package.tmp.json package.json
|
||||||
|
yarn link
|
||||||
popd
|
popd
|
||||||
|
|
||||||
pnpm run -r clean
|
pnpm run -r clean
|
||||||
@@ -25,14 +26,13 @@ cd mermaid-live-editor
|
|||||||
git clean -xdf
|
git clean -xdf
|
||||||
rm -rf docs/
|
rm -rf docs/
|
||||||
|
|
||||||
# Tells PNPM that mermaid-live-editor is not part of this workspace
|
# We have to use npm instead of yarn because it causes trouble in netlify
|
||||||
touch pnpm-workspace.yaml
|
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
pnpm install --frozen-lockfile
|
yarn install
|
||||||
|
|
||||||
# Link local mermaid to live editor
|
# Link local mermaid to live editor
|
||||||
pnpm link ../packages/mermaid
|
yarn link mermaid
|
||||||
|
|
||||||
# Force Build the site
|
# Force Build the site
|
||||||
pnpm run build
|
yarn run build
|
||||||
|
|
||||||
|
26
test-backslash.js
Normal file
26
test-backslash.js
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
// Test backslash character parsing
|
||||||
|
const flow = require('./packages/mermaid/src/diagrams/flowchart/flowDb.ts');
|
||||||
|
|
||||||
|
// Set up ANTLR parser
|
||||||
|
process.env.USE_ANTLR_PARSER = 'true';
|
||||||
|
const antlrParser = require('./packages/mermaid/src/diagrams/flowchart/parser/antlr/antlr-parser.ts');
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.log('Testing backslash character: \\');
|
||||||
|
|
||||||
|
// Test the problematic input
|
||||||
|
const input = 'graph TD; \\ --> A';
|
||||||
|
console.log('Input:', input);
|
||||||
|
|
||||||
|
// Parse with ANTLR
|
||||||
|
const result = antlrParser.parse(input);
|
||||||
|
console.log('Parse result:', result);
|
||||||
|
|
||||||
|
// Check vertices
|
||||||
|
const vertices = flow.getVertices();
|
||||||
|
console.log('Vertices:', vertices);
|
||||||
|
console.log('Backslash vertex:', vertices.get('\\'));
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error:', error);
|
||||||
|
}
|
Reference in New Issue
Block a user