mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-09-16 05:49:43 +02:00
Initial skeleton setup
This commit is contained in:
@@ -93,11 +93,13 @@
|
|||||||
config:
|
config:
|
||||||
layout: elk
|
layout: elk
|
||||||
---
|
---
|
||||||
flowchart TB
|
swimlane TB
|
||||||
%% swimlane 1 - A E
|
%% swimlane 1 - A E
|
||||||
%% swimlane 2 - B
|
%% swimlane 2 - B
|
||||||
%% swimlane 3 - C D
|
%% swimlane 3 - C D
|
||||||
|
lane First
|
||||||
|
A
|
||||||
|
end
|
||||||
A --> B(I am B, the wide one) --> C
|
A --> B(I am B, the wide one) --> C
|
||||||
C --> D & F
|
C --> D & F
|
||||||
|
|
||||||
@@ -106,7 +108,7 @@ flowchart TB
|
|||||||
|
|
||||||
B@{ shape: diam}
|
B@{ shape: diam}
|
||||||
</pre>
|
</pre>
|
||||||
<pre id="diagram4" class="mermaid">
|
<pre id="diagram4" class="mermaid2">
|
||||||
---
|
---
|
||||||
config:
|
config:
|
||||||
layout: elk
|
layout: elk
|
||||||
|
@@ -529,7 +529,7 @@ export const render = async (
|
|||||||
'elk.alignment': 'BOTTOM',
|
'elk.alignment': 'BOTTOM',
|
||||||
// 'elk.layered.nodePlacement.bk.fixedAlignment': 'RIGHTDOWN',
|
// 'elk.layered.nodePlacement.bk.fixedAlignment': 'RIGHTDOWN',
|
||||||
// 'elk.edgeRouting': 'UNDEFINED',
|
// 'elk.edgeRouting': 'UNDEFINED',
|
||||||
'elk.layered.crossingMinimization.forceNodeModelOrder': true,
|
'elk.layered.crossingMinimization.forceNodeModelOrder': false,
|
||||||
},
|
},
|
||||||
children: [],
|
children: [],
|
||||||
edges: [],
|
edges: [],
|
||||||
@@ -637,7 +637,7 @@ export const render = async (
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
const copy = JSON.parse(JSON.stringify({ ...elkGraph }));
|
const copy = JSON.parse(JSON.stringify({ ...elkGraph }));
|
||||||
console.log('APA13 layout before', copy);
|
console.log('APA13 layout before', JSON.stringify({ ...elkGraph }));
|
||||||
const g = await elk.layout(elkGraph);
|
const g = await elk.layout(elkGraph);
|
||||||
console.log('APA13 layout', JSON.parse(JSON.stringify(g)));
|
console.log('APA13 layout', JSON.parse(JSON.stringify(g)));
|
||||||
// debugger;
|
// debugger;
|
||||||
|
@@ -39,6 +39,7 @@ export const detectType = function (text: string, config?: MermaidConfig): strin
|
|||||||
.replace(directiveRegex, '')
|
.replace(directiveRegex, '')
|
||||||
.replace(anyCommentRegex, '\n');
|
.replace(anyCommentRegex, '\n');
|
||||||
for (const [key, { detector }] of Object.entries(detectors)) {
|
for (const [key, { detector }] of Object.entries(detectors)) {
|
||||||
|
console.log('APA20', key);
|
||||||
const diagram = detector(text, config);
|
const diagram = detector(text, config);
|
||||||
if (diagram) {
|
if (diagram) {
|
||||||
return key;
|
return key;
|
||||||
|
@@ -18,6 +18,7 @@ import journey from '../diagrams/user-journey/journeyDetector.js';
|
|||||||
import errorDiagram from '../diagrams/error/errorDiagram.js';
|
import errorDiagram from '../diagrams/error/errorDiagram.js';
|
||||||
import flowchartElk from '../diagrams/flowchart/elk/detector.js';
|
import flowchartElk from '../diagrams/flowchart/elk/detector.js';
|
||||||
import timeline from '../diagrams/timeline/detector.js';
|
import timeline from '../diagrams/timeline/detector.js';
|
||||||
|
import swimlane from '../diagrams/swimlane/detector.js';
|
||||||
import mindmap from '../diagrams/mindmap/detector.js';
|
import mindmap from '../diagrams/mindmap/detector.js';
|
||||||
import kanban from '../diagrams/kanban/detector.js';
|
import kanban from '../diagrams/kanban/detector.js';
|
||||||
import sankey from '../diagrams/sankey/sankeyDetector.js';
|
import sankey from '../diagrams/sankey/sankeyDetector.js';
|
||||||
@@ -94,6 +95,7 @@ export const addDiagrams = () => {
|
|||||||
packet,
|
packet,
|
||||||
xychart,
|
xychart,
|
||||||
block,
|
block,
|
||||||
architecture
|
architecture,
|
||||||
|
swimlane
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
24
packages/mermaid/src/diagrams/swimlane/detector.ts
Normal file
24
packages/mermaid/src/diagrams/swimlane/detector.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import type {
|
||||||
|
DiagramDetector,
|
||||||
|
DiagramLoader,
|
||||||
|
ExternalDiagramDefinition,
|
||||||
|
} from '../../diagram-api/types.js';
|
||||||
|
|
||||||
|
const id = 'swimlane';
|
||||||
|
|
||||||
|
const detector: DiagramDetector = (txt) => {
|
||||||
|
return /^\s*swimlane/.test(txt);
|
||||||
|
};
|
||||||
|
|
||||||
|
const loader: DiagramLoader = async () => {
|
||||||
|
const { diagram } = await import('./swimlaneDiagram.js');
|
||||||
|
return { id, diagram };
|
||||||
|
};
|
||||||
|
|
||||||
|
const plugin: ExternalDiagramDefinition = {
|
||||||
|
id,
|
||||||
|
detector,
|
||||||
|
loader,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default plugin;
|
67
packages/mermaid/src/diagrams/swimlane/flowDb.spec.ts
Normal file
67
packages/mermaid/src/diagrams/swimlane/flowDb.spec.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import flowDb from './flowDb.js';
|
||||||
|
import type { FlowSubGraph } from './types.js';
|
||||||
|
|
||||||
|
describe('flow db subgraphs', () => {
|
||||||
|
let subgraphs: FlowSubGraph[];
|
||||||
|
beforeEach(() => {
|
||||||
|
subgraphs = [
|
||||||
|
{ nodes: ['a', 'b', 'c', 'e'] },
|
||||||
|
{ nodes: ['f', 'g', 'h'] },
|
||||||
|
{ nodes: ['i', 'j'] },
|
||||||
|
{ nodes: ['k'] },
|
||||||
|
] as FlowSubGraph[];
|
||||||
|
});
|
||||||
|
describe('exist', () => {
|
||||||
|
it('should return true when the is exists in a subgraph', () => {
|
||||||
|
expect(flowDb.exists(subgraphs, 'a')).toBe(true);
|
||||||
|
expect(flowDb.exists(subgraphs, 'h')).toBe(true);
|
||||||
|
expect(flowDb.exists(subgraphs, 'j')).toBe(true);
|
||||||
|
expect(flowDb.exists(subgraphs, 'k')).toBe(true);
|
||||||
|
});
|
||||||
|
it('should return false when the is exists in a subgraph', () => {
|
||||||
|
expect(flowDb.exists(subgraphs, 'a2')).toBe(false);
|
||||||
|
expect(flowDb.exists(subgraphs, 'l')).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('makeUniq', () => {
|
||||||
|
it('should remove ids from sungraph that already exists in another subgraph even if it gets empty', () => {
|
||||||
|
const subgraph = flowDb.makeUniq({ nodes: ['i', 'j'] } as FlowSubGraph, subgraphs);
|
||||||
|
|
||||||
|
expect(subgraph.nodes).toEqual([]);
|
||||||
|
});
|
||||||
|
it('should remove ids from sungraph that already exists in another subgraph', () => {
|
||||||
|
const subgraph = flowDb.makeUniq({ nodes: ['i', 'j', 'o'] } as FlowSubGraph, subgraphs);
|
||||||
|
|
||||||
|
expect(subgraph.nodes).toEqual(['o']);
|
||||||
|
});
|
||||||
|
it('should not remove ids from subgraph if they are unique', () => {
|
||||||
|
const subgraph = flowDb.makeUniq({ nodes: ['q', 'r', 's'] } as FlowSubGraph, subgraphs);
|
||||||
|
|
||||||
|
expect(subgraph.nodes).toEqual(['q', 'r', 's']);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('flow db addClass', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
flowDb.clear();
|
||||||
|
});
|
||||||
|
it('should detect many classes', () => {
|
||||||
|
flowDb.addClass('a,b', ['stroke-width: 8px']);
|
||||||
|
const classes = flowDb.getClasses();
|
||||||
|
|
||||||
|
expect(classes.has('a')).toBe(true);
|
||||||
|
expect(classes.has('b')).toBe(true);
|
||||||
|
expect(classes.get('a')?.styles).toEqual(['stroke-width: 8px']);
|
||||||
|
expect(classes.get('b')?.styles).toEqual(['stroke-width: 8px']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should detect single class', () => {
|
||||||
|
flowDb.addClass('a', ['stroke-width: 8px']);
|
||||||
|
const classes = flowDb.getClasses();
|
||||||
|
|
||||||
|
expect(classes.has('a')).toBe(true);
|
||||||
|
expect(classes.get('a')?.styles).toEqual(['stroke-width: 8px']);
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,269 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('[Arrows] when parsing', () => {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a nodes and edges', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;\nA-->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle angle bracket ' > ' as direction LR", function () {
|
||||||
|
const res = flow.parser.parse('graph >;A-->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
const direction = flow.parser.yy.getDirection();
|
||||||
|
|
||||||
|
expect(direction).toBe('LR');
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle angle bracket ' < ' as direction RL", function () {
|
||||||
|
const res = flow.parser.parse('graph <;A-->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
const direction = flow.parser.yy.getDirection();
|
||||||
|
|
||||||
|
expect(direction).toBe('RL');
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle caret ' ^ ' as direction BT", function () {
|
||||||
|
const res = flow.parser.parse('graph ^;A-->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
const direction = flow.parser.yy.getDirection();
|
||||||
|
|
||||||
|
expect(direction).toBe('BT');
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle lower-case 'v' as direction TB", function () {
|
||||||
|
const res = flow.parser.parse('graph v;A-->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
const direction = flow.parser.yy.getDirection();
|
||||||
|
|
||||||
|
expect(direction).toBe('TB');
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a nodes and edges and a space between link and node', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A --> B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a nodes and edges, a space between link and node and each line ending without semicolon', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nA --> B\n style e red');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle statements ending without semicolon', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nB-->C');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(2);
|
||||||
|
expect(edges[1].start).toBe('B');
|
||||||
|
expect(edges[1].end).toBe('C');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('it should handle multi directional arrows', function () {
|
||||||
|
describe('point', function () {
|
||||||
|
it('should handle double edged nodes and edges', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;\nA<-->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle double edged nodes with text', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;\nA<-- text -->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('text');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle double edged nodes and edges on thick arrows', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;\nA<==>B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle double edged nodes with text on thick arrows', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;\nA<== text ==>B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('text');
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle double edged nodes and edges on dotted arrows', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;\nA<-.->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle double edged nodes with text on dotted arrows', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;\nA<-. text .->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('text');
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,154 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
import { cleanupComments } from '../../../diagram-api/comments.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('[Comments] when parsing', () => {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle comments', function () {
|
||||||
|
const res = flow.parser.parse(cleanupComments('graph TD;\n%% Comment\n A-->B;'));
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle comments at the start', function () {
|
||||||
|
const res = flow.parser.parse(cleanupComments('%% Comment\ngraph TD;\n A-->B;'));
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle comments at the end', function () {
|
||||||
|
const res = flow.parser.parse(cleanupComments('graph TD;\n A-->B\n %% Comment at the end\n'));
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle comments at the end no trailing newline', function () {
|
||||||
|
const res = flow.parser.parse(cleanupComments('graph TD;\n A-->B\n%% Comment'));
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle comments at the end many trailing newlines', function () {
|
||||||
|
const res = flow.parser.parse(cleanupComments('graph TD;\n A-->B\n%% Comment\n\n\n'));
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle no trailing newlines', function () {
|
||||||
|
const res = flow.parser.parse(cleanupComments('graph TD;\n A-->B'));
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle many trailing newlines', function () {
|
||||||
|
const res = flow.parser.parse(cleanupComments('graph TD;\n A-->B\n\n'));
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a comment with blank rows in-between', function () {
|
||||||
|
const res = flow.parser.parse(cleanupComments('graph TD;\n\n\n %% Comment\n A-->B;'));
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a comment with mermaid flowchart code in them', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
cleanupComments(
|
||||||
|
'graph TD;\n\n\n %% Test od>Odd shape]-->|Two line<br>edge comment|ro;\n A-->B;'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,92 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when parsing directions', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
flow.parser.yy.setGen('gen-2');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use default direction from top level', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
subgraph A
|
||||||
|
a --> b
|
||||||
|
end`);
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
expect(subgraph.nodes.length).toBe(2);
|
||||||
|
expect(subgraph.nodes[0]).toBe('b');
|
||||||
|
expect(subgraph.nodes[1]).toBe('a');
|
||||||
|
expect(subgraph.id).toBe('A');
|
||||||
|
expect(subgraph.dir).toBe(undefined);
|
||||||
|
});
|
||||||
|
it('should handle a subgraph with a direction', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
subgraph A
|
||||||
|
direction BT
|
||||||
|
a --> b
|
||||||
|
end`);
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
expect(subgraph.nodes.length).toBe(2);
|
||||||
|
expect(subgraph.nodes[0]).toBe('b');
|
||||||
|
expect(subgraph.nodes[1]).toBe('a');
|
||||||
|
expect(subgraph.id).toBe('A');
|
||||||
|
expect(subgraph.dir).toBe('BT');
|
||||||
|
});
|
||||||
|
it('should use the last defined direction', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
subgraph A
|
||||||
|
direction BT
|
||||||
|
a --> b
|
||||||
|
direction RL
|
||||||
|
end`);
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
expect(subgraph.nodes.length).toBe(2);
|
||||||
|
expect(subgraph.nodes[0]).toBe('b');
|
||||||
|
expect(subgraph.nodes[1]).toBe('a');
|
||||||
|
expect(subgraph.id).toBe('A');
|
||||||
|
expect(subgraph.dir).toBe('RL');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle nested subgraphs 1', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
subgraph A
|
||||||
|
direction RL
|
||||||
|
b-->B
|
||||||
|
a
|
||||||
|
end
|
||||||
|
a-->c
|
||||||
|
subgraph B
|
||||||
|
direction LR
|
||||||
|
c
|
||||||
|
end`);
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(2);
|
||||||
|
|
||||||
|
const subgraphA = subgraphs.find((o) => o.id === 'A');
|
||||||
|
const subgraphB = subgraphs.find((o) => o.id === 'B');
|
||||||
|
|
||||||
|
expect(subgraphB.nodes[0]).toBe('c');
|
||||||
|
expect(subgraphB.dir).toBe('LR');
|
||||||
|
expect(subgraphA.nodes).toContain('B');
|
||||||
|
expect(subgraphA.nodes).toContain('b');
|
||||||
|
expect(subgraphA.nodes).toContain('a');
|
||||||
|
expect(subgraphA.nodes).not.toContain('c');
|
||||||
|
expect(subgraphA.dir).toBe('RL');
|
||||||
|
});
|
||||||
|
});
|
496
packages/mermaid/src/diagrams/swimlane/parser/flow-edges.spec.js
Normal file
496
packages/mermaid/src/diagrams/swimlane/parser/flow-edges.spec.js
Normal file
@@ -0,0 +1,496 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
const keywords = [
|
||||||
|
'graph',
|
||||||
|
'flowchart',
|
||||||
|
'flowchart-elk',
|
||||||
|
'style',
|
||||||
|
'default',
|
||||||
|
'linkStyle',
|
||||||
|
'interpolate',
|
||||||
|
'classDef',
|
||||||
|
'class',
|
||||||
|
'href',
|
||||||
|
'call',
|
||||||
|
'click',
|
||||||
|
'_self',
|
||||||
|
'_blank',
|
||||||
|
'_parent',
|
||||||
|
'_top',
|
||||||
|
'end',
|
||||||
|
'subgraph',
|
||||||
|
'kitty',
|
||||||
|
];
|
||||||
|
|
||||||
|
const doubleEndedEdges = [
|
||||||
|
{ edgeStart: 'x--', edgeEnd: '--x', stroke: 'normal', type: 'double_arrow_cross' },
|
||||||
|
{ edgeStart: 'x==', edgeEnd: '==x', stroke: 'thick', type: 'double_arrow_cross' },
|
||||||
|
{ edgeStart: 'x-.', edgeEnd: '.-x', stroke: 'dotted', type: 'double_arrow_cross' },
|
||||||
|
{ edgeStart: 'o--', edgeEnd: '--o', stroke: 'normal', type: 'double_arrow_circle' },
|
||||||
|
{ edgeStart: 'o==', edgeEnd: '==o', stroke: 'thick', type: 'double_arrow_circle' },
|
||||||
|
{ edgeStart: 'o-.', edgeEnd: '.-o', stroke: 'dotted', type: 'double_arrow_circle' },
|
||||||
|
{ edgeStart: '<--', edgeEnd: '-->', stroke: 'normal', type: 'double_arrow_point' },
|
||||||
|
{ edgeStart: '<==', edgeEnd: '==>', stroke: 'thick', type: 'double_arrow_point' },
|
||||||
|
{ edgeStart: '<-.', edgeEnd: '.->', stroke: 'dotted', type: 'double_arrow_point' },
|
||||||
|
];
|
||||||
|
|
||||||
|
describe('[Edges] when parsing', () => {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle open ended edges', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A---B;');
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_open');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle cross ended edges', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--xB;');
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle open ended edges', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--oB;');
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_circle');
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edges', function () {
|
||||||
|
doubleEndedEdges.forEach((edgeType) => {
|
||||||
|
it(`should handle ${edgeType.stroke} ${edgeType.type} with no text`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA ${edgeType.edgeStart}${edgeType.edgeEnd} B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe(`${edgeType.type}`);
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe(`${edgeType.stroke}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it(`should handle ${edgeType.stroke} ${edgeType.type} with text`, function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
`graph TD;\nA ${edgeType.edgeStart} text ${edgeType.edgeEnd} B;`
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe(`${edgeType.type}`);
|
||||||
|
expect(edges[0].text).toBe('text');
|
||||||
|
expect(edges[0].stroke).toBe(`${edgeType.stroke}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it.each(keywords)(
|
||||||
|
`should handle ${edgeType.stroke} ${edgeType.type} with %s text`,
|
||||||
|
function (keyword) {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
`graph TD;\nA ${edgeType.edgeStart} ${keyword} ${edgeType.edgeEnd} B;`
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe(`${edgeType.type}`);
|
||||||
|
expect(edges[0].text).toBe(`${keyword}`);
|
||||||
|
expect(edges[0].stroke).toBe(`${edgeType.stroke}`);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple edges', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD;A---|This is the 123 s text|B;\nA---|This is the second edge|B;'
|
||||||
|
);
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(2);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_open');
|
||||||
|
expect(edges[0].text).toBe('This is the 123 s text');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(1);
|
||||||
|
expect(edges[1].start).toBe('A');
|
||||||
|
expect(edges[1].end).toBe('B');
|
||||||
|
expect(edges[1].type).toBe('arrow_open');
|
||||||
|
expect(edges[1].text).toBe('This is the second edge');
|
||||||
|
expect(edges[1].stroke).toBe('normal');
|
||||||
|
expect(edges[1].length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge length', function () {
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle normal edges with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA -${'-'.repeat(length)}- B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_open');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle normal labelled edges with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA -- Label -${'-'.repeat(length)}- B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_open');
|
||||||
|
expect(edges[0].text).toBe('Label');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle normal edges with arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA -${'-'.repeat(length)}> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle normal labelled edges with arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA -- Label -${'-'.repeat(length)}> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('Label');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle normal edges with double arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA <-${'-'.repeat(length)}> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle normal labelled edges with double arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA <-- Label -${'-'.repeat(length)}> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('Label');
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle thick edges with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA =${'='.repeat(length)}= B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_open');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle thick labelled edges with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA == Label =${'='.repeat(length)}= B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_open');
|
||||||
|
expect(edges[0].text).toBe('Label');
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle thick edges with arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA =${'='.repeat(length)}> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle thick labelled edges with arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA == Label =${'='.repeat(length)}> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('Label');
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle thick edges with double arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA <=${'='.repeat(length)}> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle thick labelled edges with double arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA <== Label =${'='.repeat(length)}> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('Label');
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle dotted edges with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA -${'.'.repeat(length)}- B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_open');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle dotted labelled edges with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA -. Label ${'.'.repeat(length)}- B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_open');
|
||||||
|
expect(edges[0].text).toBe('Label');
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle dotted edges with arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA -${'.'.repeat(length)}-> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle dotted labelled edges with arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA -. Label ${'.'.repeat(length)}-> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('Label');
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle dotted edges with double arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA <-${'.'.repeat(length)}-> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let length = 1; length <= 3; ++length) {
|
||||||
|
it(`should handle dotted edges with double arrows with length ${length}`, function () {
|
||||||
|
const res = flow.parser.parse(`graph TD;\nA <-. Label ${'.'.repeat(length)}-> B;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(1);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('double_arrow_point');
|
||||||
|
expect(edges[0].text).toBe('Label');
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
expect(edges[0].length).toBe(length);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,29 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('[Text] when parsing', () => {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('it should handle huge files', function () {
|
||||||
|
// skipped because this test takes like 2 minutes or more!
|
||||||
|
it.skip('it should handlehuge diagrams', function () {
|
||||||
|
const nodes = ('A-->B;B-->A;'.repeat(415) + 'A-->B;').repeat(57) + 'A-->B;B-->A;'.repeat(275);
|
||||||
|
flow.parser.parse(`graph LR;${nodes}`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges.length).toBe(47917);
|
||||||
|
expect(vert.size).toBe(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,158 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
import { vi } from 'vitest';
|
||||||
|
const spyOn = vi.spyOn;
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('[Interactions] when parsing', () => {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to use click to a callback', function () {
|
||||||
|
spyOn(flowDb, 'setClickEvent');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A callback');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to use click to a click and call callback', function () {
|
||||||
|
spyOn(flowDb, 'setClickEvent');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A call callback()');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to use click to a callback with toolip', function () {
|
||||||
|
spyOn(flowDb, 'setClickEvent');
|
||||||
|
spyOn(flowDb, 'setTooltip');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A callback "tooltip"');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback');
|
||||||
|
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to use click to a click and call callback with toolip', function () {
|
||||||
|
spyOn(flowDb, 'setClickEvent');
|
||||||
|
spyOn(flowDb, 'setTooltip');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A call callback() "tooltip"');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback');
|
||||||
|
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to use click to a callback with an arbitrary number of args', function () {
|
||||||
|
spyOn(flowDb, 'setClickEvent');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A call callback("test0", test1, test2)');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback', '"test0", test1, test2');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle interaction - click to a link', function () {
|
||||||
|
spyOn(flowDb, 'setLink');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A "click.html"');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle interaction - click to a click and href link', function () {
|
||||||
|
spyOn(flowDb, 'setLink');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A href "click.html"');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle interaction - click to a link with tooltip', function () {
|
||||||
|
spyOn(flowDb, 'setLink');
|
||||||
|
spyOn(flowDb, 'setTooltip');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A "click.html" "tooltip"');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html');
|
||||||
|
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle interaction - click to a click and href link with tooltip', function () {
|
||||||
|
spyOn(flowDb, 'setLink');
|
||||||
|
spyOn(flowDb, 'setTooltip');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A href "click.html" "tooltip"');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html');
|
||||||
|
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle interaction - click to a link with target', function () {
|
||||||
|
spyOn(flowDb, 'setLink');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A "click.html" _blank');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html', '_blank');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle interaction - click to a click and href link with target', function () {
|
||||||
|
spyOn(flowDb, 'setLink');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A href "click.html" _blank');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html', '_blank');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle interaction - click to a link with tooltip and target', function () {
|
||||||
|
spyOn(flowDb, 'setLink');
|
||||||
|
spyOn(flowDb, 'setTooltip');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A "click.html" "tooltip" _blank');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html', '_blank');
|
||||||
|
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle interaction - click to a click and href link with tooltip and target', function () {
|
||||||
|
spyOn(flowDb, 'setLink');
|
||||||
|
spyOn(flowDb, 'setTooltip');
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclick A href "click.html" "tooltip" _blank');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html', '_blank');
|
||||||
|
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||||
|
});
|
||||||
|
});
|
119
packages/mermaid/src/diagrams/swimlane/parser/flow-lines.spec.js
Normal file
119
packages/mermaid/src/diagrams/swimlane/parser/flow-lines.spec.js
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('[Lines] when parsing', () => {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle line interpolation default definitions', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\n' + 'A-->B\n' + 'linkStyle default interpolate basis');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.defaultInterpolate).toBe('basis');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle line interpolation numbered definitions', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD\n' +
|
||||||
|
'A-->B\n' +
|
||||||
|
'A-->C\n' +
|
||||||
|
'linkStyle 0 interpolate basis\n' +
|
||||||
|
'linkStyle 1 interpolate cardinal'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].interpolate).toBe('basis');
|
||||||
|
expect(edges[1].interpolate).toBe('cardinal');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle line interpolation multi-numbered definitions', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD\n' + 'A-->B\n' + 'A-->C\n' + 'linkStyle 0,1 interpolate basis'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].interpolate).toBe('basis');
|
||||||
|
expect(edges[1].interpolate).toBe('basis');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle line interpolation default with style', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD\n' + 'A-->B\n' + 'linkStyle default interpolate basis stroke-width:1px;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.defaultInterpolate).toBe('basis');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle line interpolation numbered with style', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD\n' +
|
||||||
|
'A-->B\n' +
|
||||||
|
'A-->C\n' +
|
||||||
|
'linkStyle 0 interpolate basis stroke-width:1px;\n' +
|
||||||
|
'linkStyle 1 interpolate cardinal stroke-width:1px;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].interpolate).toBe('basis');
|
||||||
|
expect(edges[1].interpolate).toBe('cardinal');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle line interpolation multi-numbered with style', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD\n' + 'A-->B\n' + 'A-->C\n' + 'linkStyle 0,1 interpolate basis stroke-width:1px;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].interpolate).toBe('basis');
|
||||||
|
expect(edges[1].interpolate).toBe('basis');
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('it should handle new line type notation', function () {
|
||||||
|
it('should handle regular lines', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle dotted lines', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-.->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle dotted lines', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A==>B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,64 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('parsing a flow chart with markdown strings', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('mardown formatting in nodes and labels', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart
|
||||||
|
A["\`The cat in **the** hat\`"]-- "\`The *bat* in the chat\`" -->B["The dog in the hog"] -- "The rat in the mat" -->C;`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('A').text).toBe('The cat in **the** hat');
|
||||||
|
expect(vert.get('A').labelType).toBe('markdown');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(vert.get('B').text).toBe('The dog in the hog');
|
||||||
|
expect(vert.get('B').labelType).toBe('string');
|
||||||
|
expect(edges.length).toBe(2);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('The *bat* in the chat');
|
||||||
|
expect(edges[0].labelType).toBe('markdown');
|
||||||
|
expect(edges[1].start).toBe('B');
|
||||||
|
expect(edges[1].end).toBe('C');
|
||||||
|
expect(edges[1].type).toBe('arrow_point');
|
||||||
|
expect(edges[1].text).toBe('The rat in the mat');
|
||||||
|
expect(edges[1].labelType).toBe('string');
|
||||||
|
});
|
||||||
|
it('mardown formatting in subgraphs', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart LR
|
||||||
|
subgraph "One"
|
||||||
|
a("\`The **cat**
|
||||||
|
in the hat\`") -- "1o" --> b{{"\`The **dog** in the hog\`"}}
|
||||||
|
end
|
||||||
|
subgraph "\`**Two**\`"
|
||||||
|
c("\`The **cat**
|
||||||
|
in the hat\`") -- "\`1o **ipa**\`" --> d("The dog in the hog")
|
||||||
|
end`);
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(2);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
|
||||||
|
expect(subgraph.nodes.length).toBe(2);
|
||||||
|
expect(subgraph.title).toBe('One');
|
||||||
|
expect(subgraph.labelType).toBe('text');
|
||||||
|
|
||||||
|
const subgraph2 = subgraphs[1];
|
||||||
|
expect(subgraph2.nodes.length).toBe(2);
|
||||||
|
expect(subgraph2.title).toBe('**Two**');
|
||||||
|
expect(subgraph2.labelType).toBe('markdown');
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,293 @@
|
|||||||
|
import flowDb from '../swimlaneDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when parsing directions', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
flow.parser.yy.setGen('gen-2');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle basic shape data statements', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded}`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
});
|
||||||
|
it('should handle basic shape data statements', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded }`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle basic shape data statements with &', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded } & E`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(2);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
expect(data4Layout.nodes[1].label).toEqual('E');
|
||||||
|
});
|
||||||
|
it('should handle shape data statements with edges', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded } --> E`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(2);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
expect(data4Layout.nodes[1].label).toEqual('E');
|
||||||
|
});
|
||||||
|
it('should handle basic shape data statements with amp and edges 1', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded } & E --> F`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(3);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
expect(data4Layout.nodes[1].label).toEqual('E');
|
||||||
|
});
|
||||||
|
it('should handle basic shape data statements with amp and edges 2', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded } & E@{ shape: rounded } --> F`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(3);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
expect(data4Layout.nodes[1].label).toEqual('E');
|
||||||
|
});
|
||||||
|
it('should handle basic shape data statements with amp and edges 3', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded } & E@{ shape: rounded } --> F & G@{ shape: rounded }`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(4);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
expect(data4Layout.nodes[1].label).toEqual('E');
|
||||||
|
});
|
||||||
|
it('should handle basic shape data statements with amp and edges 4', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded } & E@{ shape: rounded } --> F@{ shape: rounded } & G@{ shape: rounded }`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(4);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
expect(data4Layout.nodes[1].label).toEqual('E');
|
||||||
|
});
|
||||||
|
it('should handle basic shape data statements with amp and edges 5, trailing space', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded } & E@{ shape: rounded } --> F{ shape: rounded } & G{ shape: rounded } `);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(4);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
expect(data4Layout.nodes[1].label).toEqual('E');
|
||||||
|
});
|
||||||
|
it('should no matter of there are no leading spaces', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{shape: rounded}`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should no matter of there are many leading spaces', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded}`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be forgiving with many spaces before teh end', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded }`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('D');
|
||||||
|
});
|
||||||
|
it('should be possible to add multiple properties on the same line', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
D@{ shape: rounded , label: "DD"}`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('rounded');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('DD');
|
||||||
|
});
|
||||||
|
it('should be possible to link to a node with more data', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
A --> D@{
|
||||||
|
shape: circle
|
||||||
|
other: "clock"
|
||||||
|
}
|
||||||
|
|
||||||
|
`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(2);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('squareRect');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('A');
|
||||||
|
expect(data4Layout.nodes[1].label).toEqual('D');
|
||||||
|
expect(data4Layout.nodes[1].shape).toEqual('circle');
|
||||||
|
|
||||||
|
expect(data4Layout.edges.length).toBe(1);
|
||||||
|
});
|
||||||
|
it('should not disturb adding multiple nodes after each other', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
A[hello]
|
||||||
|
B@{
|
||||||
|
shape: circle
|
||||||
|
other: "clock"
|
||||||
|
}
|
||||||
|
C[Hello]@{
|
||||||
|
shape: circle
|
||||||
|
other: "clock"
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(3);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('squareRect');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('hello');
|
||||||
|
expect(data4Layout.nodes[1].shape).toEqual('circle');
|
||||||
|
expect(data4Layout.nodes[1].label).toEqual('B');
|
||||||
|
expect(data4Layout.nodes[2].shape).toEqual('circle');
|
||||||
|
expect(data4Layout.nodes[2].label).toEqual('Hello');
|
||||||
|
});
|
||||||
|
it('should use handle bracket end (}) character inside the shape data', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
A@{
|
||||||
|
label: "This is }"
|
||||||
|
other: "clock"
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('squareRect');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('This is }');
|
||||||
|
});
|
||||||
|
it('should error on non-existent shape', function () {
|
||||||
|
expect(() => {
|
||||||
|
flow.parser.parse(`flowchart TB
|
||||||
|
A@{ shape: this-shape-does-not-exist }
|
||||||
|
`);
|
||||||
|
}).toThrow('No such shape: this-shape-does-not-exist.');
|
||||||
|
});
|
||||||
|
it('should error on internal-only shape', function () {
|
||||||
|
expect(() => {
|
||||||
|
// this shape does exist, but it's only supposed to be for internal/backwards compatibility use
|
||||||
|
flow.parser.parse(`flowchart TB
|
||||||
|
A@{ shape: rect_left_inv_arrow }
|
||||||
|
`);
|
||||||
|
}).toThrow('No such shape: rect_left_inv_arrow. Shape names should be lowercase.');
|
||||||
|
});
|
||||||
|
it('Diamond shapes should work as usual', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
A{This is a label}
|
||||||
|
`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('diamond');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('This is a label');
|
||||||
|
});
|
||||||
|
it('Multi line strings should be supported', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
A@{
|
||||||
|
label: |
|
||||||
|
This is a
|
||||||
|
multiline string
|
||||||
|
other: "clock"
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('squareRect');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('This is a\nmultiline string\n');
|
||||||
|
});
|
||||||
|
it('Multi line strings should be supported', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
A@{
|
||||||
|
label: "This is a
|
||||||
|
multiline string"
|
||||||
|
other: "clock"
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('squareRect');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('This is a<br/>multiline string');
|
||||||
|
});
|
||||||
|
it(' should be possible to use } in strings', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
A@{
|
||||||
|
label: "This is a string with }"
|
||||||
|
other: "clock"
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('squareRect');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('This is a string with }');
|
||||||
|
});
|
||||||
|
it(' should be possible to use @ in strings', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
A@{
|
||||||
|
label: "This is a string with @"
|
||||||
|
other: "clock"
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('squareRect');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('This is a string with @');
|
||||||
|
});
|
||||||
|
it(' should be possible to use @ in strings', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
A@{
|
||||||
|
label: "This is a string with}"
|
||||||
|
other: "clock"
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
|
const data4Layout = flow.parser.yy.getData();
|
||||||
|
expect(data4Layout.nodes.length).toBe(1);
|
||||||
|
expect(data4Layout.nodes[0].shape).toEqual('squareRect');
|
||||||
|
expect(data4Layout.nodes[0].label).toEqual('This is a string with}');
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,371 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
const keywords = [
|
||||||
|
'graph',
|
||||||
|
'flowchart',
|
||||||
|
'flowchart-elk',
|
||||||
|
'style',
|
||||||
|
'default',
|
||||||
|
'linkStyle',
|
||||||
|
'interpolate',
|
||||||
|
'classDef',
|
||||||
|
'class',
|
||||||
|
'href',
|
||||||
|
'call',
|
||||||
|
'click',
|
||||||
|
'_self',
|
||||||
|
'_blank',
|
||||||
|
'_parent',
|
||||||
|
'_top',
|
||||||
|
'end',
|
||||||
|
'subgraph',
|
||||||
|
];
|
||||||
|
|
||||||
|
const specialChars = ['#', ':', '0', '&', ',', '*', '.', '\\', 'v', '-', '/', '_'];
|
||||||
|
|
||||||
|
describe('[Singlenodes] when parsing', () => {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single node', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;A;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('A').styles.length).toBe(0);
|
||||||
|
});
|
||||||
|
it('should handle a single node with white space after it (SN1)', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;A ;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('A').styles.length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single square node', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a[A];');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').styles.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('square');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single round square node', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a[A];');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').styles.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('square');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single circle node', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a((A));');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('circle');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single round node', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a(A);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('round');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single odd node', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a>A];');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('odd');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single diamond node', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a{A};');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('diamond');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single diamond node with whitespace after it', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a{A} ;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('diamond');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single diamond node with html in it (SN3)', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a{A <br> end};');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('diamond');
|
||||||
|
expect(vert.get('a').text).toBe('A <br> end');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single hexagon node', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a{{A}};');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('hexagon');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single hexagon node with html in it', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a{{A <br> end}};');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('hexagon');
|
||||||
|
expect(vert.get('a').text).toBe('A <br> end');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single round node with html in it', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a(A <br> end);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('round');
|
||||||
|
expect(vert.get('a').text).toBe('A <br> end');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single double circle node', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a(((A)));');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('doublecircle');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single double circle node with whitespace after it', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a(((A))) ;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('doublecircle');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single double circle node with html in it (SN3)', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;a(((A <br> end)));');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('a').type).toBe('doublecircle');
|
||||||
|
expect(vert.get('a').text).toBe('A <br> end');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single node with alphanumerics starting on a char', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;id1;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('id1').styles.length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single node with a single digit', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;1;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('1').text).toBe('1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single node with a single digit in a subgraph', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
|
||||||
|
const res = flow.parser.parse('graph TD;subgraph "hello";1;end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('1').text).toBe('1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single node with alphanumerics starting on a num', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;1id;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('1id').styles.length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single node with alphanumerics containing a minus sign', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;i-d;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('i-d').styles.length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a single node with alphanumerics containing a underscore sign', function () {
|
||||||
|
// Silly but syntactically correct
|
||||||
|
const res = flow.parser.parse('graph TD;i_d;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges.length).toBe(0);
|
||||||
|
expect(vert.get('i_d').styles.length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it.each(keywords)('should handle keywords between dashes "-"', function (keyword) {
|
||||||
|
const res = flow.parser.parse(`graph TD;a-${keyword}-node;`);
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
expect(vert.get(`a-${keyword}-node`).text).toBe(`a-${keyword}-node`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it.each(keywords)('should handle keywords between periods "."', function (keyword) {
|
||||||
|
const res = flow.parser.parse(`graph TD;a.${keyword}.node;`);
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
expect(vert.get(`a.${keyword}.node`).text).toBe(`a.${keyword}.node`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it.each(keywords)('should handle keywords between underscores "_"', function (keyword) {
|
||||||
|
const res = flow.parser.parse(`graph TD;a_${keyword}_node;`);
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
expect(vert.get(`a_${keyword}_node`).text).toBe(`a_${keyword}_node`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it.each(keywords)('should handle nodes ending in %s', function (keyword) {
|
||||||
|
const res = flow.parser.parse(`graph TD;node_${keyword};node.${keyword};node-${keyword};`);
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
expect(vert.get(`node_${keyword}`).text).toBe(`node_${keyword}`);
|
||||||
|
expect(vert.get(`node.${keyword}`).text).toBe(`node.${keyword}`);
|
||||||
|
expect(vert.get(`node-${keyword}`).text).toBe(`node-${keyword}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
const errorKeywords = [
|
||||||
|
'graph',
|
||||||
|
'flowchart',
|
||||||
|
'flowchart-elk',
|
||||||
|
'style',
|
||||||
|
'linkStyle',
|
||||||
|
'interpolate',
|
||||||
|
'classDef',
|
||||||
|
'class',
|
||||||
|
'_self',
|
||||||
|
'_blank',
|
||||||
|
'_parent',
|
||||||
|
'_top',
|
||||||
|
'end',
|
||||||
|
'subgraph',
|
||||||
|
];
|
||||||
|
it.each(errorKeywords)('should throw error at nodes beginning with %s', function (keyword) {
|
||||||
|
const str = `graph TD;${keyword}.node;${keyword}-node;${keyword}/node`;
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
|
||||||
|
expect(() => flow.parser.parse(str)).toThrowError();
|
||||||
|
});
|
||||||
|
|
||||||
|
const workingKeywords = ['default', 'href', 'click', 'call'];
|
||||||
|
|
||||||
|
it.each(workingKeywords)('should parse node beginning with %s', function (keyword) {
|
||||||
|
flow.parser.parse(`graph TD; ${keyword}.node;${keyword}-node;${keyword}/node;`);
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
expect(vert.get(`${keyword}.node`).text).toBe(`${keyword}.node`);
|
||||||
|
expect(vert.get(`${keyword}-node`).text).toBe(`${keyword}-node`);
|
||||||
|
expect(vert.get(`${keyword}/node`).text).toBe(`${keyword}/node`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it.each(specialChars)(
|
||||||
|
'should allow node ids of single special characters',
|
||||||
|
function (specialChar) {
|
||||||
|
flow.parser.parse(`graph TD; ${specialChar} --> A`);
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
expect(vert.get(`${specialChar}`).text).toBe(`${specialChar}`);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
it.each(specialChars)(
|
||||||
|
'should allow node ids with special characters at start of id',
|
||||||
|
function (specialChar) {
|
||||||
|
flow.parser.parse(`graph TD; ${specialChar}node --> A`);
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
expect(vert.get(`${specialChar}node`).text).toBe(`${specialChar}node`);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
it.each(specialChars)(
|
||||||
|
'should allow node ids with special characters at end of id',
|
||||||
|
function (specialChar) {
|
||||||
|
flow.parser.parse(`graph TD; node${specialChar} --> A`);
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
expect(vert.get(`node${specialChar}`).text).toBe(`node${specialChar}`);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
370
packages/mermaid/src/diagrams/swimlane/parser/flow-style.spec.js
Normal file
370
packages/mermaid/src/diagrams/swimlane/parser/flow-style.spec.js
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('[Style] when parsing', () => {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
flow.parser.yy.setGen('gen-2');
|
||||||
|
});
|
||||||
|
|
||||||
|
// log.debug(flow.parser.parse('graph TD;style Q background:#fff;'));
|
||||||
|
it('should handle styles for vertices', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;style Q background:#fff;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('Q').styles.length).toBe(1);
|
||||||
|
expect(vert.get('Q').styles[0]).toBe('background:#fff');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple styles for a vortex', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;style R background:#fff,border:1px solid red;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('R').styles.length).toBe(2);
|
||||||
|
expect(vert.get('R').styles[0]).toBe('background:#fff');
|
||||||
|
expect(vert.get('R').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple styles in a graph', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD;style S background:#aaa;\nstyle T background:#bbb,border:1px solid red;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('S').styles.length).toBe(1);
|
||||||
|
expect(vert.get('T').styles.length).toBe(2);
|
||||||
|
expect(vert.get('S').styles[0]).toBe('background:#aaa');
|
||||||
|
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle styles and graph definitions in a graph', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD;S-->T;\nstyle S background:#aaa;\nstyle T background:#bbb,border:1px solid red;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('S').styles.length).toBe(1);
|
||||||
|
expect(vert.get('T').styles.length).toBe(2);
|
||||||
|
expect(vert.get('S').styles[0]).toBe('background:#aaa');
|
||||||
|
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle styles and graph definitions in a graph', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;style T background:#bbb,border:1px solid red;');
|
||||||
|
// const res = flow.parser.parse('graph TD;style T background: #bbb;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
|
||||||
|
expect(vert.get('T').styles.length).toBe(2);
|
||||||
|
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should keep node label text (if already defined) when a style is applied', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD;A(( ));B((Test));C;style A background:#fff;style D border:1px solid red;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
|
||||||
|
expect(vert.get('A').text).toBe('');
|
||||||
|
expect(vert.get('B').text).toBe('Test');
|
||||||
|
expect(vert.get('C').text).toBe('C');
|
||||||
|
expect(vert.get('D').text).toBe('D');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to declare a class', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD;classDef exClass background:#bbb,border:1px solid red;'
|
||||||
|
);
|
||||||
|
// const res = flow.parser.parse('graph TD;style T background: #bbb;');
|
||||||
|
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to declare multiple classes', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD;classDef firstClass,secondClass background:#bbb,border:1px solid red;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('firstClass').styles.length).toBe(2);
|
||||||
|
expect(classes.get('firstClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('firstClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
|
||||||
|
expect(classes.get('secondClass').styles.length).toBe(2);
|
||||||
|
expect(classes.get('secondClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('secondClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to declare a class with a dot in the style', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD;classDef exClass background:#bbb,border:1.5px solid red;'
|
||||||
|
);
|
||||||
|
// const res = flow.parser.parse('graph TD;style T background: #bbb;');
|
||||||
|
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1.5px solid red');
|
||||||
|
});
|
||||||
|
it('should be possible to declare a class with a space in the style', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD;classDef exClass background: #bbb,border:1.5px solid red;'
|
||||||
|
);
|
||||||
|
// const res = flow.parser.parse('graph TD;style T background : #bbb;');
|
||||||
|
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background: #bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1.5px solid red');
|
||||||
|
});
|
||||||
|
it('should be possible to apply a class to a vertex', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TD;' + '\n';
|
||||||
|
statement = statement + 'classDef exClass background:#bbb,border:1px solid red;' + '\n';
|
||||||
|
statement = statement + 'a-->b;' + '\n';
|
||||||
|
statement = statement + 'class a exClass;';
|
||||||
|
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
it('should be possible to apply a class to a vertex with an id containing _', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TD;' + '\n';
|
||||||
|
statement = statement + 'classDef exClass background:#bbb,border:1px solid red;' + '\n';
|
||||||
|
statement = statement + 'a_a-->b_b;' + '\n';
|
||||||
|
statement = statement + 'class a_a exClass;';
|
||||||
|
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
it('should be possible to apply a class to a vertex directly', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TD;' + '\n';
|
||||||
|
statement = statement + 'classDef exClass background:#bbb,border:1px solid red;' + '\n';
|
||||||
|
statement = statement + 'a-->b[test]:::exClass;' + '\n';
|
||||||
|
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
const vertices = flow.parser.yy.getVertices();
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to apply a class to a vertex directly : usecase A[text].class ', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TD;' + '\n';
|
||||||
|
statement = statement + 'classDef exClass background:#bbb,border:1px solid red;' + '\n';
|
||||||
|
statement = statement + 'b[test]:::exClass;' + '\n';
|
||||||
|
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
const vertices = flow.parser.yy.getVertices();
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to apply a class to a vertex directly : usecase A[text].class-->B[test2] ', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TD;' + '\n';
|
||||||
|
statement = statement + 'classDef exClass background:#bbb,border:1px solid red;' + '\n';
|
||||||
|
statement = statement + 'A[test]:::exClass-->B[test2];' + '\n';
|
||||||
|
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
const vertices = flow.parser.yy.getVertices();
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(vertices.get('A').classes[0]).toBe('exClass');
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to apply a class to a vertex directly 2', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TD;' + '\n';
|
||||||
|
statement = statement + 'classDef exClass background:#bbb,border:1px solid red;' + '\n';
|
||||||
|
statement = statement + 'a-->b[1 a a text!.]:::exClass;' + '\n';
|
||||||
|
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
const vertices = flow.parser.yy.getVertices();
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
});
|
||||||
|
it('should be possible to apply a class to a comma separated list of vertices', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TD;' + '\n';
|
||||||
|
statement = statement + 'classDef exClass background:#bbb,border:1px solid red;' + '\n';
|
||||||
|
statement = statement + 'a-->b;' + '\n';
|
||||||
|
statement = statement + 'class a,b exClass;';
|
||||||
|
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
const vertices = flow.parser.yy.getVertices();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
expect(vertices.get('a').classes[0]).toBe('exClass');
|
||||||
|
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle style definitions with more then 1 digit in a row', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD\n' +
|
||||||
|
'A-->B1\n' +
|
||||||
|
'A-->B2\n' +
|
||||||
|
'A-->B3\n' +
|
||||||
|
'A-->B4\n' +
|
||||||
|
'A-->B5\n' +
|
||||||
|
'A-->B6\n' +
|
||||||
|
'A-->B7\n' +
|
||||||
|
'A-->B8\n' +
|
||||||
|
'A-->B9\n' +
|
||||||
|
'A-->B10\n' +
|
||||||
|
'A-->B11\n' +
|
||||||
|
'linkStyle 10 stroke-width:1px;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle style definitions within number of edges', function () {
|
||||||
|
expect(() =>
|
||||||
|
flow.parser
|
||||||
|
.parse(
|
||||||
|
`graph TD
|
||||||
|
A-->B
|
||||||
|
linkStyle 1 stroke-width:1px;`
|
||||||
|
)
|
||||||
|
.toThrow(
|
||||||
|
'The index 1 for linkStyle is out of bounds. Valid indices for linkStyle are between 0 and 0. (Help: Ensure that the index is within the range of existing edges.)'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle style definitions within number of edges', function () {
|
||||||
|
const res = flow.parser.parse(`graph TD
|
||||||
|
A-->B
|
||||||
|
linkStyle 0 stroke-width:1px;`);
|
||||||
|
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].style[0]).toBe('stroke-width:1px');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multi-numbered style definitions with more then 1 digit in a row', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD\n' +
|
||||||
|
'A-->B1\n' +
|
||||||
|
'A-->B2\n' +
|
||||||
|
'A-->B3\n' +
|
||||||
|
'A-->B4\n' +
|
||||||
|
'A-->B5\n' +
|
||||||
|
'A-->B6\n' +
|
||||||
|
'A-->B7\n' +
|
||||||
|
'A-->B8\n' +
|
||||||
|
'A-->B9\n' +
|
||||||
|
'A-->B10\n' +
|
||||||
|
'A-->B11\n' +
|
||||||
|
'A-->B12\n' +
|
||||||
|
'linkStyle 10,11 stroke-width:1px;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle classDefs with style in classes', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nclassDef exClass font-style:bold;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle classDefs with % in classes', function () {
|
||||||
|
const res = flow.parser.parse(
|
||||||
|
'graph TD\nA-->B\nclassDef exClass fill:#f96,stroke:#333,stroke-width:4px,font-size:50%,font-style:bold;'
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple vertices with style', function () {
|
||||||
|
const res = flow.parser.parse(`
|
||||||
|
graph TD
|
||||||
|
classDef C1 stroke-dasharray:4
|
||||||
|
classDef C2 stroke-dasharray:6
|
||||||
|
A & B:::C1 & D:::C1 --> E:::C2
|
||||||
|
`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
|
||||||
|
expect(vert.get('A').classes.length).toBe(0);
|
||||||
|
expect(vert.get('B').classes[0]).toBe('C1');
|
||||||
|
expect(vert.get('D').classes[0]).toBe('C1');
|
||||||
|
expect(vert.get('E').classes[0]).toBe('C2');
|
||||||
|
});
|
||||||
|
});
|
609
packages/mermaid/src/diagrams/swimlane/parser/flow-text.spec.js
Normal file
609
packages/mermaid/src/diagrams/swimlane/parser/flow-text.spec.js
Normal file
@@ -0,0 +1,609 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('[Text] when parsing', () => {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('it should handle text on edges', function () {
|
||||||
|
it('should handle text without space', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--x|textNoSpace|B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle with space', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--x|text including space|B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text with /', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--x|text with / should work|B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].text).toBe('text with / should work');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle space and space between vertices and link', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A --x|textNoSpace| B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle space and CAPS', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--x|text including CAPS space|B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle space and dir', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--x|text including URL space|B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
expect(edges[0].text).toBe('text including URL space');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle space and send', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--text including URL space and send-->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('text including URL space and send');
|
||||||
|
});
|
||||||
|
it('should handle space and send', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- text including URL space and send -->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('text including URL space and send');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle space and dir (TD)', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--x|text including R TD space|B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
expect(edges[0].text).toBe('text including R TD space');
|
||||||
|
});
|
||||||
|
it('should handle `', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--x|text including `|B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
expect(edges[0].text).toBe('text including `');
|
||||||
|
});
|
||||||
|
it('should handle v in node ids only v', function () {
|
||||||
|
// only v
|
||||||
|
const res = flow.parser.parse('graph TD;A--xv(my text);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
expect(vert.get('v').text).toBe('my text');
|
||||||
|
});
|
||||||
|
it('should handle v in node ids v at end', function () {
|
||||||
|
// v at end
|
||||||
|
const res = flow.parser.parse('graph TD;A--xcsv(my text);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
expect(vert.get('csv').text).toBe('my text');
|
||||||
|
});
|
||||||
|
it('should handle v in node ids v in middle', function () {
|
||||||
|
// v in middle
|
||||||
|
const res = flow.parser.parse('graph TD;A--xava(my text);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
expect(vert.get('ava').text).toBe('my text');
|
||||||
|
});
|
||||||
|
it('should handle v in node ids, v at start', function () {
|
||||||
|
// v at start
|
||||||
|
const res = flow.parser.parse('graph TD;A--xva(my text);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
expect(vert.get('va').text).toBe('my text');
|
||||||
|
});
|
||||||
|
it('should handle keywords', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--x|text including graph space|B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].text).toBe('text including graph space');
|
||||||
|
});
|
||||||
|
it('should handle keywords', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;V-->a[v]');
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
expect(vert.get('a').text).toBe('v');
|
||||||
|
});
|
||||||
|
it('should handle quoted text', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;V-- "test string()" -->a[v]');
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
expect(edges[0].text).toBe('test string()');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('it should handle text on lines', () => {
|
||||||
|
it('should handle normal text on lines', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- test text with == -->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].stroke).toBe('normal');
|
||||||
|
});
|
||||||
|
it('should handle dotted text on lines (TD3)', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-. test text with == .->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].stroke).toBe('dotted');
|
||||||
|
});
|
||||||
|
it('should handle thick text on lines', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A== test text with - ==>B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].stroke).toBe('thick');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('it should handle text on edges using the new notation', function () {
|
||||||
|
it('should handle text without space', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- textNoSpace --xB;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text with multiple leading space', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- textNoSpace --xB;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle with space', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- text including space --xB;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text with /', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A -- text with / should work --x B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].text).toBe('text with / should work');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle space and space between vertices and link', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A -- textNoSpace --x B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle space and CAPS', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- text including CAPS space --xB;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle space and dir', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- text including URL space --xB;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
expect(edges[0].text).toBe('text including URL space');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle space and dir (TD2)', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- text including R TD space --xB;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_cross');
|
||||||
|
expect(edges[0].text).toBe('text including R TD space');
|
||||||
|
});
|
||||||
|
it('should handle keywords', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- text including graph space and v --xB;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].text).toBe('text including graph space and v');
|
||||||
|
});
|
||||||
|
it('should handle keywords', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-- text including graph space and v --xB[blav]');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].text).toBe('text including graph space and v');
|
||||||
|
});
|
||||||
|
// it.skip('should handle text on open links',function(){
|
||||||
|
// const res = flow.parser.parse('graph TD;A-- text including graph space --B');
|
||||||
|
//
|
||||||
|
// const vert = flow.parser.yy.getVertices();
|
||||||
|
// const edges = flow.parser.yy.getEdges();
|
||||||
|
//
|
||||||
|
// expect(edges[0].text).toBe('text including graph space');
|
||||||
|
//
|
||||||
|
// });
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('it should handle text in vertices, ', function () {
|
||||||
|
it('should handle space', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->C(Chimpansen hoppar);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('C').type).toBe('round');
|
||||||
|
expect(vert.get('C').text).toBe('Chimpansen hoppar');
|
||||||
|
});
|
||||||
|
|
||||||
|
const keywords = [
|
||||||
|
'graph',
|
||||||
|
'flowchart',
|
||||||
|
'flowchart-elk',
|
||||||
|
'style',
|
||||||
|
'default',
|
||||||
|
'linkStyle',
|
||||||
|
'interpolate',
|
||||||
|
'classDef',
|
||||||
|
'class',
|
||||||
|
'href',
|
||||||
|
'call',
|
||||||
|
'click',
|
||||||
|
'_self',
|
||||||
|
'_blank',
|
||||||
|
'_parent',
|
||||||
|
'_top',
|
||||||
|
'end',
|
||||||
|
'subgraph',
|
||||||
|
'kitty',
|
||||||
|
];
|
||||||
|
|
||||||
|
const shapes = [
|
||||||
|
{ start: '[', end: ']', name: 'square' },
|
||||||
|
{ start: '(', end: ')', name: 'round' },
|
||||||
|
{ start: '{', end: '}', name: 'diamond' },
|
||||||
|
{ start: '(-', end: '-)', name: 'ellipse' },
|
||||||
|
{ start: '([', end: '])', name: 'stadium' },
|
||||||
|
{ start: '>', end: ']', name: 'odd' },
|
||||||
|
{ start: '[(', end: ')]', name: 'cylinder' },
|
||||||
|
{ start: '(((', end: ')))', name: 'doublecircle' },
|
||||||
|
{ start: '[/', end: '\\]', name: 'trapezoid' },
|
||||||
|
{ start: '[\\', end: '/]', name: 'inv_trapezoid' },
|
||||||
|
{ start: '[/', end: '/]', name: 'lean_right' },
|
||||||
|
{ start: '[\\', end: '\\]', name: 'lean_left' },
|
||||||
|
{ start: '[[', end: ']]', name: 'subroutine' },
|
||||||
|
{ start: '{{', end: '}}', name: 'hexagon' },
|
||||||
|
];
|
||||||
|
|
||||||
|
shapes.forEach((shape) => {
|
||||||
|
it.each(keywords)(`should handle %s keyword in ${shape.name} vertex`, function (keyword) {
|
||||||
|
const rest = flow.parser.parse(
|
||||||
|
`graph TD;A_${keyword}_node-->B${shape.start}This node has a ${keyword} as text${shape.end};`
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
expect(vert.get('B').type).toBe(`${shape.name}`);
|
||||||
|
expect(vert.get('B').text).toBe(`This node has a ${keyword} as text`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it.each(keywords)('should handle %s keyword in rect vertex', function (keyword) {
|
||||||
|
const rest = flow.parser.parse(
|
||||||
|
`graph TD;A_${keyword}_node-->B[|borders:lt|This node has a ${keyword} as text];`
|
||||||
|
);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
expect(vert.get('B').type).toBe('rect');
|
||||||
|
expect(vert.get('B').text).toBe(`This node has a ${keyword} as text`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle edge case for odd vertex with node id ending with minus', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A_node-->odd->Vertex Text];');
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
|
||||||
|
expect(vert.get('odd-').type).toBe('odd');
|
||||||
|
expect(vert.get('odd-').text).toBe('Vertex Text');
|
||||||
|
});
|
||||||
|
it('should allow forward slashes in lean_right vertices', function () {
|
||||||
|
const rest = flow.parser.parse(`graph TD;A_node-->B[/This node has a / as text/];`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
expect(vert.get('B').type).toBe('lean_right');
|
||||||
|
expect(vert.get('B').text).toBe(`This node has a / as text`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow back slashes in lean_left vertices', function () {
|
||||||
|
const rest = flow.parser.parse(`graph TD;A_node-->B[\\This node has a \\ as text\\];`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
expect(vert.get('B').type).toBe('lean_left');
|
||||||
|
expect(vert.get('B').text).toBe(`This node has a \\ as text`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle åäö and minus', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->C{Chimpansen hoppar åäö-ÅÄÖ};');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('C').type).toBe('diamond');
|
||||||
|
expect(vert.get('C').text).toBe('Chimpansen hoppar åäö-ÅÄÖ');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle with åäö, minus and space and br', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->C(Chimpansen hoppar åäö <br> - ÅÄÖ);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('C').type).toBe('round');
|
||||||
|
expect(vert.get('C').text).toBe('Chimpansen hoppar åäö <br> - ÅÄÖ');
|
||||||
|
});
|
||||||
|
// it.skip('should handle åäö, minus and space and br',function(){
|
||||||
|
// const res = flow.parser.parse('graph TD; A[Object(foo,bar)]-->B(Thing);');
|
||||||
|
//
|
||||||
|
// const vert = flow.parser.yy.getVertices();
|
||||||
|
// const edges = flow.parser.yy.getEdges();
|
||||||
|
//
|
||||||
|
// expect(vert.get('C').type).toBe('round');
|
||||||
|
// expect(vert.get('C').text).toBe(' A[Object(foo,bar)]-->B(Thing);');
|
||||||
|
// });
|
||||||
|
it('should handle unicode chars', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->C(Начало);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
|
||||||
|
expect(vert.get('C').text).toBe('Начало');
|
||||||
|
});
|
||||||
|
it('should handle backslask', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->C(c:\\windows);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
|
||||||
|
expect(vert.get('C').text).toBe('c:\\windows');
|
||||||
|
});
|
||||||
|
it('should handle CAPS', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->C(some CAPS);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('C').type).toBe('round');
|
||||||
|
expect(vert.get('C').text).toBe('some CAPS');
|
||||||
|
});
|
||||||
|
it('should handle directions', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->C(some URL);');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('C').type).toBe('round');
|
||||||
|
expect(vert.get('C').text).toBe('some URL');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multi-line text', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A--o|text space|B;\n B-->|more text with space|C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_circle');
|
||||||
|
expect(edges[1].type).toBe('arrow_point');
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(vert.get('C').id).toBe('C');
|
||||||
|
expect(edges.length).toBe(2);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
// expect(edges[0].text).toBe('text space');
|
||||||
|
expect(edges[1].start).toBe('B');
|
||||||
|
expect(edges[1].end).toBe('C');
|
||||||
|
expect(edges[1].text).toBe('more text with space');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text in vertices with space', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A[chimpansen hoppar]-->C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').type).toBe('square');
|
||||||
|
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text in vertices with space with spaces between vertices and link', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A[chimpansen hoppar] --> C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').type).toBe('square');
|
||||||
|
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||||
|
});
|
||||||
|
it('should handle text including _ in vertices', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A[chimpansen_hoppar] --> C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').type).toBe('square');
|
||||||
|
expect(vert.get('A').text).toBe('chimpansen_hoppar');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle quoted text in vertices ', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A["chimpansen hoppar ()[]"] --> C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').type).toBe('square');
|
||||||
|
expect(vert.get('A').text).toBe('chimpansen hoppar ()[]');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text in circle vertices with space', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A((chimpansen hoppar))-->C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').type).toBe('circle');
|
||||||
|
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text in ellipse vertices', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nA(-this is an ellipse-)-->B');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').type).toBe('ellipse');
|
||||||
|
expect(vert.get('A').text).toBe('this is an ellipse');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not freeze when ellipse text has a `(`', function () {
|
||||||
|
expect(() => flow.parser.parse('graph\nX(- My Text (')).toThrowError();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text in diamond vertices with space', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A(chimpansen hoppar)-->C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').type).toBe('round');
|
||||||
|
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text in with ?', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A(?)-->|?|C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').text).toBe('?');
|
||||||
|
expect(edges[0].text).toBe('?');
|
||||||
|
});
|
||||||
|
it('should handle text in with éèêàçô', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A(éèêàçô)-->|éèêàçô|C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').text).toBe('éèêàçô');
|
||||||
|
expect(edges[0].text).toBe('éèêàçô');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text in with ,.?!+-*', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A(,.?!+-*)-->|,.?!+-*|C;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').text).toBe(',.?!+-*');
|
||||||
|
expect(edges[0].text).toBe(',.?!+-*');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw error at nested set of brackets', function () {
|
||||||
|
const str = 'graph TD; A[This is a () in text];';
|
||||||
|
expect(() => flow.parser.parse(str)).toThrowError("got 'PS'");
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw error for strings and text at the same time', function () {
|
||||||
|
const str = 'graph TD;A(this node has "string" and text)-->|this link has "string" and text|C;';
|
||||||
|
|
||||||
|
expect(() => flow.parser.parse(str)).toThrowError("got 'STR'");
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw error for escaping quotes in text state', function () {
|
||||||
|
//prettier-ignore
|
||||||
|
const str = 'graph TD; A[This is a \"()\" in text];'; //eslint-disable-line no-useless-escape
|
||||||
|
|
||||||
|
expect(() => flow.parser.parse(str)).toThrowError("got 'STR'");
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw error for nested quoatation marks', function () {
|
||||||
|
const str = 'graph TD; A["This is a "()" in text"];';
|
||||||
|
|
||||||
|
expect(() => flow.parser.parse(str)).toThrowError("Expecting 'SQE'");
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw error', function () {
|
||||||
|
const str = `graph TD; node[hello ) world] --> works`;
|
||||||
|
expect(() => flow.parser.parse(str)).toThrowError("got 'PE'");
|
||||||
|
});
|
||||||
|
});
|
@@ -0,0 +1,222 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when parsing flowcharts', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
flow.parser.yy.setGen('gen-2');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle chaining of vertices', function () {
|
||||||
|
const res = flow.parser.parse(`
|
||||||
|
graph TD
|
||||||
|
A-->B-->C;
|
||||||
|
`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(vert.get('C').id).toBe('C');
|
||||||
|
expect(edges.length).toBe(2);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[1].start).toBe('B');
|
||||||
|
expect(edges[1].end).toBe('C');
|
||||||
|
expect(edges[1].type).toBe('arrow_point');
|
||||||
|
expect(edges[1].text).toBe('');
|
||||||
|
});
|
||||||
|
it('should handle chaining of vertices', function () {
|
||||||
|
const res = flow.parser.parse(`
|
||||||
|
graph TD
|
||||||
|
A & B --> C;
|
||||||
|
`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(vert.get('C').id).toBe('C');
|
||||||
|
expect(edges.length).toBe(2);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('C');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[1].start).toBe('B');
|
||||||
|
expect(edges[1].end).toBe('C');
|
||||||
|
expect(edges[1].type).toBe('arrow_point');
|
||||||
|
expect(edges[1].text).toBe('');
|
||||||
|
});
|
||||||
|
it('should multiple vertices in link statement in the begining', function () {
|
||||||
|
const res = flow.parser.parse(`
|
||||||
|
graph TD
|
||||||
|
A-->B & C;
|
||||||
|
`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(vert.get('C').id).toBe('C');
|
||||||
|
expect(edges.length).toBe(2);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[1].start).toBe('A');
|
||||||
|
expect(edges[1].end).toBe('C');
|
||||||
|
expect(edges[1].type).toBe('arrow_point');
|
||||||
|
expect(edges[1].text).toBe('');
|
||||||
|
});
|
||||||
|
it('should multiple vertices in link statement at the end', function () {
|
||||||
|
const res = flow.parser.parse(`
|
||||||
|
graph TD
|
||||||
|
A & B--> C & D;
|
||||||
|
`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(vert.get('C').id).toBe('C');
|
||||||
|
expect(vert.get('D').id).toBe('D');
|
||||||
|
expect(edges.length).toBe(4);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('C');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[1].start).toBe('A');
|
||||||
|
expect(edges[1].end).toBe('D');
|
||||||
|
expect(edges[1].type).toBe('arrow_point');
|
||||||
|
expect(edges[1].text).toBe('');
|
||||||
|
expect(edges[2].start).toBe('B');
|
||||||
|
expect(edges[2].end).toBe('C');
|
||||||
|
expect(edges[2].type).toBe('arrow_point');
|
||||||
|
expect(edges[2].text).toBe('');
|
||||||
|
expect(edges[3].start).toBe('B');
|
||||||
|
expect(edges[3].end).toBe('D');
|
||||||
|
expect(edges[3].type).toBe('arrow_point');
|
||||||
|
expect(edges[3].text).toBe('');
|
||||||
|
});
|
||||||
|
it('should handle chaining of vertices at both ends at once', function () {
|
||||||
|
const res = flow.parser.parse(`
|
||||||
|
graph TD
|
||||||
|
A & B--> C & D;
|
||||||
|
`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(vert.get('C').id).toBe('C');
|
||||||
|
expect(vert.get('D').id).toBe('D');
|
||||||
|
expect(edges.length).toBe(4);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('C');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[1].start).toBe('A');
|
||||||
|
expect(edges[1].end).toBe('D');
|
||||||
|
expect(edges[1].type).toBe('arrow_point');
|
||||||
|
expect(edges[1].text).toBe('');
|
||||||
|
expect(edges[2].start).toBe('B');
|
||||||
|
expect(edges[2].end).toBe('C');
|
||||||
|
expect(edges[2].type).toBe('arrow_point');
|
||||||
|
expect(edges[2].text).toBe('');
|
||||||
|
expect(edges[3].start).toBe('B');
|
||||||
|
expect(edges[3].end).toBe('D');
|
||||||
|
expect(edges[3].type).toBe('arrow_point');
|
||||||
|
expect(edges[3].text).toBe('');
|
||||||
|
});
|
||||||
|
it('should handle chaining and multiple nodes in link statement FVC ', function () {
|
||||||
|
const res = flow.parser.parse(`
|
||||||
|
graph TD
|
||||||
|
A --> B & B2 & C --> D2;
|
||||||
|
`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(vert.get('B2').id).toBe('B2');
|
||||||
|
expect(vert.get('C').id).toBe('C');
|
||||||
|
expect(vert.get('D2').id).toBe('D2');
|
||||||
|
expect(edges.length).toBe(6);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
expect(edges[1].start).toBe('A');
|
||||||
|
expect(edges[1].end).toBe('B2');
|
||||||
|
expect(edges[1].type).toBe('arrow_point');
|
||||||
|
expect(edges[1].text).toBe('');
|
||||||
|
expect(edges[2].start).toBe('A');
|
||||||
|
expect(edges[2].end).toBe('C');
|
||||||
|
expect(edges[2].type).toBe('arrow_point');
|
||||||
|
expect(edges[2].text).toBe('');
|
||||||
|
expect(edges[3].start).toBe('B');
|
||||||
|
expect(edges[3].end).toBe('D2');
|
||||||
|
expect(edges[3].type).toBe('arrow_point');
|
||||||
|
expect(edges[3].text).toBe('');
|
||||||
|
expect(edges[4].start).toBe('B2');
|
||||||
|
expect(edges[4].end).toBe('D2');
|
||||||
|
expect(edges[4].type).toBe('arrow_point');
|
||||||
|
expect(edges[4].text).toBe('');
|
||||||
|
expect(edges[5].start).toBe('C');
|
||||||
|
expect(edges[5].end).toBe('D2');
|
||||||
|
expect(edges[5].type).toBe('arrow_point');
|
||||||
|
expect(edges[5].text).toBe('');
|
||||||
|
});
|
||||||
|
it('should handle chaining and multiple nodes in link statement with extra info in statements', function () {
|
||||||
|
const res = flow.parser.parse(`
|
||||||
|
graph TD
|
||||||
|
A[ h ] -- hello --> B[" test "]:::exClass & C --> D;
|
||||||
|
classDef exClass background:#bbb,border:1px solid red;
|
||||||
|
`);
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
|
||||||
|
expect(classes.get('exClass').styles.length).toBe(2);
|
||||||
|
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||||
|
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(vert.get('B').classes[0]).toBe('exClass');
|
||||||
|
expect(vert.get('C').id).toBe('C');
|
||||||
|
expect(vert.get('D').id).toBe('D');
|
||||||
|
expect(edges.length).toBe(4);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('hello');
|
||||||
|
expect(edges[1].start).toBe('A');
|
||||||
|
expect(edges[1].end).toBe('C');
|
||||||
|
expect(edges[1].type).toBe('arrow_point');
|
||||||
|
expect(edges[1].text).toBe('hello');
|
||||||
|
expect(edges[2].start).toBe('B');
|
||||||
|
expect(edges[2].end).toBe('D');
|
||||||
|
expect(edges[2].type).toBe('arrow_point');
|
||||||
|
expect(edges[2].text).toBe('');
|
||||||
|
expect(edges[3].start).toBe('C');
|
||||||
|
expect(edges[3].end).toBe('D');
|
||||||
|
expect(edges[3].type).toBe('arrow_point');
|
||||||
|
expect(edges[3].text).toBe('');
|
||||||
|
});
|
||||||
|
});
|
240
packages/mermaid/src/diagrams/swimlane/parser/flow.spec.js
Normal file
240
packages/mermaid/src/diagrams/swimlane/parser/flow.spec.js
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { cleanupComments } from '../../../diagram-api/comments.js';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('parsing a flow chart', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a trailing whitespaces after statements', function () {
|
||||||
|
const res = flow.parser.parse(cleanupComments('graph TD;\n\n\n %% Comment\n A-->B; \n B-->C;'));
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
expect(edges.length).toBe(2);
|
||||||
|
expect(edges[0].start).toBe('A');
|
||||||
|
expect(edges[0].end).toBe('B');
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
expect(edges[0].text).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle node names with "end" substring', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nendpoint --> sender');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('endpoint').id).toBe('endpoint');
|
||||||
|
expect(vert.get('sender').id).toBe('sender');
|
||||||
|
expect(edges[0].start).toBe('endpoint');
|
||||||
|
expect(edges[0].end).toBe('sender');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle node names ending with keywords', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nblend --> monograph');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('blend').id).toBe('blend');
|
||||||
|
expect(vert.get('monograph').id).toBe('monograph');
|
||||||
|
expect(edges[0].start).toBe('blend');
|
||||||
|
expect(edges[0].end).toBe('monograph');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow default in the node name/id', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\ndefault --> monograph');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('default').id).toBe('default');
|
||||||
|
expect(vert.get('monograph').id).toBe('monograph');
|
||||||
|
expect(edges[0].start).toBe('default');
|
||||||
|
expect(edges[0].end).toBe('monograph');
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('special characters should be handled.', function () {
|
||||||
|
const charTest = function (char, result) {
|
||||||
|
const res = flow.parser.parse('graph TD;A(' + char + ')-->B;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(vert.get('A').id).toBe('A');
|
||||||
|
expect(vert.get('B').id).toBe('B');
|
||||||
|
if (result) {
|
||||||
|
expect(vert.get('A').text).toBe(result);
|
||||||
|
} else {
|
||||||
|
expect(vert.get('A').text).toBe(char);
|
||||||
|
}
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should be able to parse a '.'", function () {
|
||||||
|
charTest('.');
|
||||||
|
charTest('Start 103a.a1');
|
||||||
|
});
|
||||||
|
|
||||||
|
// it('should be able to parse text containing \'_\'', function () {
|
||||||
|
// charTest('_')
|
||||||
|
// })
|
||||||
|
|
||||||
|
it("should be able to parse a ':'", function () {
|
||||||
|
charTest(':');
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be able to parse a ','", function () {
|
||||||
|
charTest(',');
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be able to parse text containing '-'", function () {
|
||||||
|
charTest('a-b');
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be able to parse a '+'", function () {
|
||||||
|
charTest('+');
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be able to parse a '*'", function () {
|
||||||
|
charTest('*');
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be able to parse a '<'", function () {
|
||||||
|
charTest('<', '<');
|
||||||
|
});
|
||||||
|
|
||||||
|
// it("should be able to parse a '>'", function() {
|
||||||
|
// charTest('>', '>');
|
||||||
|
// });
|
||||||
|
|
||||||
|
// it("should be able to parse a '='", function() {
|
||||||
|
// charTest('=', '=');
|
||||||
|
// });
|
||||||
|
it("should be able to parse a '&'", function () {
|
||||||
|
charTest('&');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to use direction in node ids', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TD;' + '\n';
|
||||||
|
statement = statement + ' node1TB\n';
|
||||||
|
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
const vertices = flow.parser.yy.getVertices();
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
expect(vertices.get('node1TB').id).toBe('node1TB');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to use direction in node ids', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TD;A--x|text including URL space|B;';
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
const vertices = flow.parser.yy.getVertices();
|
||||||
|
const classes = flow.parser.yy.getClasses();
|
||||||
|
expect(vertices.get('A').id).toBe('A');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be possible to use numbers as labels', function () {
|
||||||
|
let statement = '';
|
||||||
|
|
||||||
|
statement = statement + 'graph TB;subgraph "number as labels";1;end;';
|
||||||
|
const res = flow.parser.parse(statement);
|
||||||
|
const vertices = flow.parser.yy.getVertices();
|
||||||
|
|
||||||
|
expect(vertices.get('1').id).toBe('1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add accTitle and accDescr to flow chart', function () {
|
||||||
|
const flowChart = `graph LR
|
||||||
|
accTitle: Big decisions
|
||||||
|
accDescr: Flow chart of the decision making process
|
||||||
|
A[Hard] -->|Text| B(Round)
|
||||||
|
B --> C{Decision}
|
||||||
|
C -->|One| D[Result 1]
|
||||||
|
C -->|Two| E[Result 2]
|
||||||
|
`;
|
||||||
|
|
||||||
|
flow.parser.parse(flowChart);
|
||||||
|
expect(flow.parser.yy.getAccTitle()).toBe('Big decisions');
|
||||||
|
expect(flow.parser.yy.getAccDescription()).toBe('Flow chart of the decision making process');
|
||||||
|
});
|
||||||
|
it('should add accTitle and a multi line accDescr to flow chart', function () {
|
||||||
|
const flowChart = `graph LR
|
||||||
|
accTitle: Big decisions
|
||||||
|
|
||||||
|
accDescr {
|
||||||
|
Flow chart of the decision making process
|
||||||
|
with a second line
|
||||||
|
}
|
||||||
|
|
||||||
|
A[Hard] -->|Text| B(Round)
|
||||||
|
B --> C{Decision}
|
||||||
|
C -->|One| D[Result 1]
|
||||||
|
C -->|Two| E[Result 2]
|
||||||
|
`;
|
||||||
|
|
||||||
|
flow.parser.parse(flowChart);
|
||||||
|
expect(flow.parser.yy.getAccTitle()).toBe('Big decisions');
|
||||||
|
expect(flow.parser.yy.getAccDescription()).toBe(
|
||||||
|
`Flow chart of the decision making process
|
||||||
|
with a second line`
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const unsafeProp of ['__proto__', 'constructor']) {
|
||||||
|
it(`should work with node id ${unsafeProp}`, function () {
|
||||||
|
const flowChart = `graph LR
|
||||||
|
${unsafeProp} --> A;`;
|
||||||
|
|
||||||
|
expect(() => {
|
||||||
|
flow.parser.parse(flowChart);
|
||||||
|
}).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it(`should work with tooltip id ${unsafeProp}`, function () {
|
||||||
|
const flowChart = `graph LR
|
||||||
|
click ${unsafeProp} callback "${unsafeProp}";`;
|
||||||
|
|
||||||
|
expect(() => {
|
||||||
|
flow.parser.parse(flowChart);
|
||||||
|
}).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it(`should work with class id ${unsafeProp}`, function () {
|
||||||
|
const flowChart = `graph LR
|
||||||
|
${unsafeProp} --> A;
|
||||||
|
classDef ${unsafeProp} color:#ffffff,fill:#000000;
|
||||||
|
class ${unsafeProp} ${unsafeProp};`;
|
||||||
|
|
||||||
|
expect(() => {
|
||||||
|
flow.parser.parse(flowChart);
|
||||||
|
}).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it(`should work with subgraph id ${unsafeProp}`, function () {
|
||||||
|
const flowChart = `graph LR
|
||||||
|
${unsafeProp} --> A;
|
||||||
|
subgraph ${unsafeProp}
|
||||||
|
C --> D;
|
||||||
|
end;`;
|
||||||
|
|
||||||
|
expect(() => {
|
||||||
|
flow.parser.parse(flowChart);
|
||||||
|
}).not.toThrow();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
312
packages/mermaid/src/diagrams/swimlane/parser/subgraph.spec.js
Normal file
312
packages/mermaid/src/diagrams/swimlane/parser/subgraph.spec.js
Normal file
@@ -0,0 +1,312 @@
|
|||||||
|
import flowDb from '../flowDb.js';
|
||||||
|
import flow from './flow.jison';
|
||||||
|
import { setConfig } from '../../../config.js';
|
||||||
|
|
||||||
|
setConfig({
|
||||||
|
securityLevel: 'strict',
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when parsing subgraphs', function () {
|
||||||
|
beforeEach(function () {
|
||||||
|
flow.parser.yy = flowDb;
|
||||||
|
flow.parser.yy.clear();
|
||||||
|
flow.parser.yy.setGen('gen-2');
|
||||||
|
});
|
||||||
|
it('should handle subgraph with tab indentation', function () {
|
||||||
|
const res = flow.parser.parse('graph TB\nsubgraph One\n\ta1-->a2\nend');
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
|
||||||
|
expect(subgraph.nodes.length).toBe(2);
|
||||||
|
expect(subgraph.nodes[0]).toBe('a2');
|
||||||
|
expect(subgraph.nodes[1]).toBe('a1');
|
||||||
|
expect(subgraph.title).toBe('One');
|
||||||
|
expect(subgraph.id).toBe('One');
|
||||||
|
});
|
||||||
|
it('should handle subgraph with chaining nodes indentation', function () {
|
||||||
|
const res = flow.parser.parse('graph TB\nsubgraph One\n\ta1-->a2-->a3\nend');
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
expect(subgraph.nodes.length).toBe(3);
|
||||||
|
expect(subgraph.nodes[0]).toBe('a3');
|
||||||
|
expect(subgraph.nodes[1]).toBe('a2');
|
||||||
|
expect(subgraph.nodes[2]).toBe('a1');
|
||||||
|
expect(subgraph.title).toBe('One');
|
||||||
|
expect(subgraph.id).toBe('One');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subgraph with multiple words in title', function () {
|
||||||
|
const res = flow.parser.parse('graph TB\nsubgraph "Some Title"\n\ta1-->a2\nend');
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
expect(subgraph.nodes.length).toBe(2);
|
||||||
|
expect(subgraph.nodes[0]).toBe('a2');
|
||||||
|
expect(subgraph.nodes[1]).toBe('a1');
|
||||||
|
expect(subgraph.title).toBe('Some Title');
|
||||||
|
expect(subgraph.id).toBe('subGraph0');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subgraph with id and title notation', function () {
|
||||||
|
const res = flow.parser.parse('graph TB\nsubgraph some-id[Some Title]\n\ta1-->a2\nend');
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
expect(subgraph.nodes.length).toBe(2);
|
||||||
|
expect(subgraph.nodes[0]).toBe('a2');
|
||||||
|
expect(subgraph.nodes[1]).toBe('a1');
|
||||||
|
expect(subgraph.title).toBe('Some Title');
|
||||||
|
expect(subgraph.id).toBe('some-id');
|
||||||
|
});
|
||||||
|
|
||||||
|
it.skip('should handle subgraph without id and space in title', function () {
|
||||||
|
const res = flow.parser.parse('graph TB\nsubgraph Some Title\n\ta1-->a2\nend');
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
expect(subgraph.nodes.length).toBe(2);
|
||||||
|
expect(subgraph.nodes[0]).toBe('a1');
|
||||||
|
expect(subgraph.nodes[1]).toBe('a2');
|
||||||
|
expect(subgraph.title).toBe('Some Title');
|
||||||
|
expect(subgraph.id).toBe('some-id');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subgraph id starting with a number', function () {
|
||||||
|
const res = flow.parser.parse(`graph TD
|
||||||
|
A[Christmas] -->|Get money| B(Go shopping)
|
||||||
|
subgraph 1test
|
||||||
|
A
|
||||||
|
end`);
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
expect(subgraph.nodes.length).toBe(1);
|
||||||
|
expect(subgraph.nodes[0]).toBe('A');
|
||||||
|
expect(subgraph.id).toBe('1test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subgraphs1', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->B;subgraph myTitle;c-->d;end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
it('should handle subgraphs with title in quotes', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->B;subgraph "title in quotes";c-->d;end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
|
||||||
|
expect(subgraph.title).toBe('title in quotes');
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
it('should handle subgraphs in old style that was broken', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->B;subgraph old style that is broken;c-->d;end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
|
||||||
|
expect(subgraph.title).toBe('old style that is broken');
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
it('should handle subgraphs with dashes in the title', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->B;subgraph a-b-c;c-->d;end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
|
||||||
|
expect(subgraph.title).toBe('a-b-c');
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
it('should handle subgraphs with id and title in brackets', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->B;subgraph uid1[text of doom];c-->d;end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
|
||||||
|
expect(subgraph.title).toBe('text of doom');
|
||||||
|
expect(subgraph.id).toBe('uid1');
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
it('should handle subgraphs with id and title in brackets and quotes', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->B;subgraph uid2["text of doom"];c-->d;end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
|
||||||
|
expect(subgraph.title).toBe('text of doom');
|
||||||
|
expect(subgraph.id).toBe('uid2');
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
it('should handle subgraphs with id and title in brackets without spaces', function () {
|
||||||
|
const res = flow.parser.parse('graph TD;A-->B;subgraph uid2[textofdoom];c-->d;end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(1);
|
||||||
|
const subgraph = subgraphs[0];
|
||||||
|
|
||||||
|
expect(subgraph.title).toBe('textofdoom');
|
||||||
|
expect(subgraph.id).toBe('uid2');
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subgraphs2', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle\n\n c-->d \nend\n');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subgraphs3', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle \n\n c-->d \nend\n');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle nested subgraphs', function () {
|
||||||
|
const str =
|
||||||
|
'graph TD\n' +
|
||||||
|
'A-->B\n' +
|
||||||
|
'subgraph myTitle\n\n' +
|
||||||
|
' c-->d \n\n' +
|
||||||
|
' subgraph inner\n\n e-->f \n end \n\n' +
|
||||||
|
' subgraph inner\n\n h-->i \n end \n\n' +
|
||||||
|
'end\n';
|
||||||
|
const res = flow.parser.parse(str);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subgraphs4', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle\nc-->d\nend;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subgraphs5', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle\nc-- text -->d\nd-->e\n end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
it('should handle subgraphs with multi node statements in it', function () {
|
||||||
|
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle\na & b --> c & e\n end;');
|
||||||
|
|
||||||
|
const vert = flow.parser.yy.getVertices();
|
||||||
|
const edges = flow.parser.yy.getEdges();
|
||||||
|
|
||||||
|
expect(edges[0].type).toBe('arrow_point');
|
||||||
|
});
|
||||||
|
it('should handle nested subgraphs 1', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
subgraph A
|
||||||
|
b-->B
|
||||||
|
a
|
||||||
|
end
|
||||||
|
a-->c
|
||||||
|
subgraph B
|
||||||
|
c
|
||||||
|
end`);
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(2);
|
||||||
|
|
||||||
|
const subgraphA = subgraphs.find((o) => o.id === 'A');
|
||||||
|
const subgraphB = subgraphs.find((o) => o.id === 'B');
|
||||||
|
|
||||||
|
expect(subgraphB.nodes[0]).toBe('c');
|
||||||
|
expect(subgraphA.nodes).toContain('B');
|
||||||
|
expect(subgraphA.nodes).toContain('b');
|
||||||
|
expect(subgraphA.nodes).toContain('a');
|
||||||
|
expect(subgraphA.nodes).not.toContain('c');
|
||||||
|
});
|
||||||
|
it('should handle nested subgraphs 2', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
b-->B
|
||||||
|
a-->c
|
||||||
|
subgraph B
|
||||||
|
c
|
||||||
|
end
|
||||||
|
subgraph A
|
||||||
|
a
|
||||||
|
b
|
||||||
|
B
|
||||||
|
end`);
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(2);
|
||||||
|
|
||||||
|
const subgraphA = subgraphs.find((o) => o.id === 'A');
|
||||||
|
const subgraphB = subgraphs.find((o) => o.id === 'B');
|
||||||
|
|
||||||
|
expect(subgraphB.nodes[0]).toBe('c');
|
||||||
|
expect(subgraphA.nodes).toContain('B');
|
||||||
|
expect(subgraphA.nodes).toContain('b');
|
||||||
|
expect(subgraphA.nodes).toContain('a');
|
||||||
|
expect(subgraphA.nodes).not.toContain('c');
|
||||||
|
});
|
||||||
|
it('should handle nested subgraphs 3', function () {
|
||||||
|
const res = flow.parser.parse(`flowchart TB
|
||||||
|
subgraph B
|
||||||
|
c
|
||||||
|
end
|
||||||
|
a-->c
|
||||||
|
subgraph A
|
||||||
|
b-->B
|
||||||
|
a
|
||||||
|
end`);
|
||||||
|
|
||||||
|
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||||
|
expect(subgraphs.length).toBe(2);
|
||||||
|
|
||||||
|
const subgraphA = subgraphs.find((o) => o.id === 'A');
|
||||||
|
const subgraphB = subgraphs.find((o) => o.id === 'B');
|
||||||
|
expect(subgraphB.nodes[0]).toBe('c');
|
||||||
|
expect(subgraphA.nodes).toContain('B');
|
||||||
|
expect(subgraphA.nodes).toContain('b');
|
||||||
|
expect(subgraphA.nodes).toContain('a');
|
||||||
|
expect(subgraphA.nodes).not.toContain('c');
|
||||||
|
});
|
||||||
|
});
|
622
packages/mermaid/src/diagrams/swimlane/parser/swimlane.jison
Normal file
622
packages/mermaid/src/diagrams/swimlane/parser/swimlane.jison
Normal file
@@ -0,0 +1,622 @@
|
|||||||
|
/** mermaid
|
||||||
|
* https://mermaidjs.github.io/
|
||||||
|
* (c) 2015 Knut Sveidqvist
|
||||||
|
* MIT license.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* lexical grammar */
|
||||||
|
%lex
|
||||||
|
%x string
|
||||||
|
%x md_string
|
||||||
|
%x acc_title
|
||||||
|
%x acc_descr
|
||||||
|
%x acc_descr_multiline
|
||||||
|
%x dir
|
||||||
|
%x vertex
|
||||||
|
%x text
|
||||||
|
%x ellipseText
|
||||||
|
%x trapText
|
||||||
|
%x edgeText
|
||||||
|
%x thickEdgeText
|
||||||
|
%x dottedEdgeText
|
||||||
|
%x click
|
||||||
|
%x href
|
||||||
|
%x callbackname
|
||||||
|
%x callbackargs
|
||||||
|
%x shapeData
|
||||||
|
%x shapeDataStr
|
||||||
|
%x shapeDataEndBracket
|
||||||
|
|
||||||
|
%%
|
||||||
|
accTitle\s*":"\s* { this.begin("acc_title");return 'acc_title'; }
|
||||||
|
<acc_title>(?!\n|;|#)*[^\n]* { this.popState(); return "acc_title_value"; }
|
||||||
|
accDescr\s*":"\s* { this.begin("acc_descr");return 'acc_descr'; }
|
||||||
|
<acc_descr>(?!\n|;|#)*[^\n]* { this.popState(); return "acc_descr_value"; }
|
||||||
|
accDescr\s*"{"\s* { this.begin("acc_descr_multiline");}
|
||||||
|
<acc_descr_multiline>[\}] { this.popState(); }
|
||||||
|
<acc_descr_multiline>[^\}]* return "acc_descr_multiline_value";
|
||||||
|
// <acc_descr_multiline>.*[^\n]* { return "acc_descr_line"}
|
||||||
|
|
||||||
|
|
||||||
|
\@\{ {
|
||||||
|
// console.log('=> shapeData', yytext);
|
||||||
|
this.pushState("shapeData"); yytext=""; return 'SHAPE_DATA' }
|
||||||
|
<shapeData>["] {
|
||||||
|
// console.log('=> shapeDataStr', yytext);
|
||||||
|
this.pushState("shapeDataStr");
|
||||||
|
return 'SHAPE_DATA';
|
||||||
|
}
|
||||||
|
<shapeDataStr>["] {
|
||||||
|
// console.log('shapeData <==', yytext);
|
||||||
|
this.popState(); return 'SHAPE_DATA'}
|
||||||
|
<shapeDataStr>[^\"]+ {
|
||||||
|
// console.log('shapeData', yytext);
|
||||||
|
const re = /\n\s*/g;
|
||||||
|
yytext = yytext.replace(re,"<br/>");
|
||||||
|
return 'SHAPE_DATA'}
|
||||||
|
<shapeData>[^}^"]+ {
|
||||||
|
// console.log('shapeData', yytext);
|
||||||
|
return 'SHAPE_DATA';
|
||||||
|
}
|
||||||
|
<shapeData>"}" {
|
||||||
|
// console.log('<== root', yytext)
|
||||||
|
this.popState();
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
---interactivity command---
|
||||||
|
'call' adds a callback to the specified node. 'call' can only be specified when
|
||||||
|
the line was introduced with 'click'.
|
||||||
|
'call <callbackname>(<args>)' attaches the function 'callbackname' with the specified
|
||||||
|
arguments to the node that was specified by 'click'.
|
||||||
|
Function arguments are optional: 'call <callbackname>()' simply executes 'callbackname' without any arguments.
|
||||||
|
*/
|
||||||
|
"call"[\s]+ this.begin("callbackname");
|
||||||
|
<callbackname>\([\s]*\) this.popState();
|
||||||
|
<callbackname>\( this.popState(); this.begin("callbackargs");
|
||||||
|
<callbackname>[^(]* return 'CALLBACKNAME';
|
||||||
|
<callbackargs>\) this.popState();
|
||||||
|
<callbackargs>[^)]* return 'CALLBACKARGS';
|
||||||
|
|
||||||
|
|
||||||
|
<md_string>[^`"]+ { return "MD_STR";}
|
||||||
|
<md_string>[`]["] { this.popState();}
|
||||||
|
<*>["][`] { this.begin("md_string");}
|
||||||
|
<string>[^"]+ { return "STR"; }
|
||||||
|
<string>["] this.popState();
|
||||||
|
<*>["] this.pushState("string");
|
||||||
|
"style" return 'STYLE';
|
||||||
|
"default" return 'DEFAULT';
|
||||||
|
"linkStyle" return 'LINKSTYLE';
|
||||||
|
"interpolate" return 'INTERPOLATE';
|
||||||
|
"classDef" return 'CLASSDEF';
|
||||||
|
"class" return 'CLASS';
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
---interactivity command---
|
||||||
|
'href' adds a link to the specified node. 'href' can only be specified when the
|
||||||
|
line was introduced with 'click'.
|
||||||
|
'href "<link>"' attaches the specified link to the node that was specified by 'click'.
|
||||||
|
*/
|
||||||
|
"href"[\s] return 'HREF';
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
'click' is the keyword to introduce a line that contains interactivity commands.
|
||||||
|
'click' must be followed by an existing node-id. All commands are attached to
|
||||||
|
that id.
|
||||||
|
'click <id>' can be followed by href or call commands in any desired order
|
||||||
|
*/
|
||||||
|
"click"[\s]+ this.begin("click");
|
||||||
|
<click>[\s\n] this.popState();
|
||||||
|
<click>[^\s\n]* return 'CLICK';
|
||||||
|
|
||||||
|
"swimlane" {if(yy.lex.firstGraph()){this.begin("dir");} return 'GRAPH';}
|
||||||
|
"lane" return 'subgraph';
|
||||||
|
"end"\b\s* return 'end';
|
||||||
|
|
||||||
|
"_self" return 'LINK_TARGET';
|
||||||
|
"_blank" return 'LINK_TARGET';
|
||||||
|
"_parent" return 'LINK_TARGET';
|
||||||
|
"_top" return 'LINK_TARGET';
|
||||||
|
|
||||||
|
<dir>(\r?\n)*\s*\n { this.popState(); return 'NODIR'; }
|
||||||
|
<dir>\s*"LR" { this.popState(); return 'DIR'; }
|
||||||
|
<dir>\s*"RL" { this.popState(); return 'DIR'; }
|
||||||
|
<dir>\s*"TB" { this.popState(); return 'DIR'; }
|
||||||
|
<dir>\s*"BT" { this.popState(); return 'DIR'; }
|
||||||
|
<dir>\s*"TD" { this.popState(); return 'DIR'; }
|
||||||
|
<dir>\s*"BR" { this.popState(); return 'DIR'; }
|
||||||
|
<dir>\s*"<" { this.popState(); return 'DIR'; }
|
||||||
|
<dir>\s*">" { this.popState(); return 'DIR'; }
|
||||||
|
<dir>\s*"^" { this.popState(); return 'DIR'; }
|
||||||
|
<dir>\s*"v" { this.popState(); return 'DIR'; }
|
||||||
|
|
||||||
|
.*direction\s+TB[^\n]* return 'direction_tb';
|
||||||
|
.*direction\s+BT[^\n]* return 'direction_bt';
|
||||||
|
.*direction\s+RL[^\n]* return 'direction_rl';
|
||||||
|
.*direction\s+LR[^\n]* return 'direction_lr';
|
||||||
|
|
||||||
|
[0-9]+ return 'NUM';
|
||||||
|
\# return 'BRKT';
|
||||||
|
":::" return 'STYLE_SEPARATOR';
|
||||||
|
":" return 'COLON';
|
||||||
|
"&" return 'AMP';
|
||||||
|
";" return 'SEMI';
|
||||||
|
"," return 'COMMA';
|
||||||
|
"*" return 'MULT';
|
||||||
|
|
||||||
|
<INITIAL,edgeText>\s*[xo<]?\-\-+[-xo>]\s* { this.popState(); return 'LINK'; }
|
||||||
|
<INITIAL>\s*[xo<]?\-\-\s* { this.pushState("edgeText"); return 'START_LINK'; }
|
||||||
|
<edgeText>[^-]|\-(?!\-)+ return 'EDGE_TEXT';
|
||||||
|
|
||||||
|
<INITIAL,thickEdgeText>\s*[xo<]?\=\=+[=xo>]\s* { this.popState(); return 'LINK'; }
|
||||||
|
<INITIAL>\s*[xo<]?\=\=\s* { this.pushState("thickEdgeText"); return 'START_LINK'; }
|
||||||
|
<thickEdgeText>[^=]|\=(?!=) return 'EDGE_TEXT';
|
||||||
|
|
||||||
|
<INITIAL,dottedEdgeText>\s*[xo<]?\-?\.+\-[xo>]?\s* { this.popState(); return 'LINK'; }
|
||||||
|
<INITIAL>\s*[xo<]?\-\.\s* { this.pushState("dottedEdgeText"); return 'START_LINK'; }
|
||||||
|
<dottedEdgeText>[^\.]|\.(?!-) return 'EDGE_TEXT';
|
||||||
|
|
||||||
|
|
||||||
|
<*>\s*\~\~[\~]+\s* return 'LINK';
|
||||||
|
|
||||||
|
<ellipseText>[-/\)][\)] { this.popState(); return '-)'; }
|
||||||
|
<ellipseText>[^\(\)\[\]\{\}]|-\!\)+ return "TEXT"
|
||||||
|
<*>"(-" { this.pushState("ellipseText"); return '(-'; }
|
||||||
|
|
||||||
|
<text>"])" { this.popState(); return 'STADIUMEND'; }
|
||||||
|
<*>"([" { this.pushState("text"); return 'STADIUMSTART'; }
|
||||||
|
|
||||||
|
<text>"]]" { this.popState(); return 'SUBROUTINEEND'; }
|
||||||
|
<*>"[[" { this.pushState("text"); return 'SUBROUTINESTART'; }
|
||||||
|
|
||||||
|
"[|" { return 'VERTEX_WITH_PROPS_START'; }
|
||||||
|
|
||||||
|
\> { this.pushState("text"); return 'TAGEND'; }
|
||||||
|
|
||||||
|
<text>")]" { this.popState(); return 'CYLINDEREND'; }
|
||||||
|
<*>"[(" { this.pushState("text") ;return 'CYLINDERSTART'; }
|
||||||
|
|
||||||
|
<text>")))" { this.popState(); return 'DOUBLECIRCLEEND'; }
|
||||||
|
<*>"(((" { this.pushState("text"); return 'DOUBLECIRCLESTART'; }
|
||||||
|
|
||||||
|
<trapText>[\\(?=\])][\]] { this.popState(); return 'TRAPEND'; }
|
||||||
|
<trapText>\/(?=\])\] { this.popState(); return 'INVTRAPEND'; }
|
||||||
|
<trapText>\/(?!\])|\\(?!\])|[^\\\[\]\(\)\{\}\/]+ return 'TEXT';
|
||||||
|
<*>"[/" { this.pushState("trapText"); return 'TRAPSTART'; }
|
||||||
|
|
||||||
|
<*>"[\\" { this.pushState("trapText"); return 'INVTRAPSTART'; }
|
||||||
|
|
||||||
|
|
||||||
|
"<" return 'TAGSTART';
|
||||||
|
">" return 'TAGEND';
|
||||||
|
"^" return 'UP';
|
||||||
|
"\|" return 'SEP';
|
||||||
|
"v" return 'DOWN';
|
||||||
|
"*" return 'MULT';
|
||||||
|
"#" return 'BRKT';
|
||||||
|
"&" return 'AMP';
|
||||||
|
([A-Za-z0-9!"\#$%&'*+\.`?\\_\/]|\-(?=[^\>\-\.])|=(?!=))+ return 'NODE_STRING';
|
||||||
|
"-" return 'MINUS'
|
||||||
|
[\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6]|
|
||||||
|
[\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377]|
|
||||||
|
[\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5]|
|
||||||
|
[\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA]|
|
||||||
|
[\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE]|
|
||||||
|
[\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA]|
|
||||||
|
[\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0]|
|
||||||
|
[\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977]|
|
||||||
|
[\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2]|
|
||||||
|
[\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A]|
|
||||||
|
[\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39]|
|
||||||
|
[\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8]|
|
||||||
|
[\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C]|
|
||||||
|
[\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C]|
|
||||||
|
[\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99]|
|
||||||
|
[\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0]|
|
||||||
|
[\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D]|
|
||||||
|
[\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3]|
|
||||||
|
[\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10]|
|
||||||
|
[\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1]|
|
||||||
|
[\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81]|
|
||||||
|
[\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3]|
|
||||||
|
[\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6]|
|
||||||
|
[\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A]|
|
||||||
|
[\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081]|
|
||||||
|
[\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D]|
|
||||||
|
[\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0]|
|
||||||
|
[\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310]|
|
||||||
|
[\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C]|
|
||||||
|
[\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u1700-\u170C\u170E-\u1711]|
|
||||||
|
[\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7]|
|
||||||
|
[\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C]|
|
||||||
|
[\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16]|
|
||||||
|
[\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF]|
|
||||||
|
[\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC]|
|
||||||
|
[\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D]|
|
||||||
|
[\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D]|
|
||||||
|
[\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3]|
|
||||||
|
[\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F]|
|
||||||
|
[\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128]|
|
||||||
|
[\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2183\u2184]|
|
||||||
|
[\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3]|
|
||||||
|
[\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6]|
|
||||||
|
[\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE]|
|
||||||
|
[\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C]|
|
||||||
|
[\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D]|
|
||||||
|
[\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC]|
|
||||||
|
[\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B]|
|
||||||
|
[\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6E5\uA717-\uA71F\uA722-\uA788]|
|
||||||
|
[\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805]|
|
||||||
|
[\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB]|
|
||||||
|
[\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28]|
|
||||||
|
[\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5]|
|
||||||
|
[\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4]|
|
||||||
|
[\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E]|
|
||||||
|
[\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D]|
|
||||||
|
[\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36]|
|
||||||
|
[\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D]|
|
||||||
|
[\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC]|
|
||||||
|
[\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF]|
|
||||||
|
[\uFFD2-\uFFD7\uFFDA-\uFFDC]
|
||||||
|
return 'UNICODE_TEXT';
|
||||||
|
|
||||||
|
<text>"|" { this.popState(); return 'PIPE'; }
|
||||||
|
<*>"|" { this.pushState("text"); return 'PIPE'; }
|
||||||
|
|
||||||
|
<text>")" { this.popState(); return 'PE'; }
|
||||||
|
<*>"(" { this.pushState("text"); return 'PS'; }
|
||||||
|
|
||||||
|
<text>"]" { this.popState(); return 'SQE'; }
|
||||||
|
<*>"[" { this.pushState("text"); return 'SQS'; }
|
||||||
|
|
||||||
|
<text>(\}) { this.popState(); return 'DIAMOND_STOP' }
|
||||||
|
<*>"{" { this.pushState("text"); return 'DIAMOND_START' }
|
||||||
|
<text>[^\[\]\(\)\{\}\|\"]+ return "TEXT";
|
||||||
|
|
||||||
|
"\"" return 'QUOTE';
|
||||||
|
(\r?\n)+ return 'NEWLINE';
|
||||||
|
\s return 'SPACE';
|
||||||
|
<<EOF>> return 'EOF';
|
||||||
|
|
||||||
|
/lex
|
||||||
|
|
||||||
|
/* operator associations and precedence */
|
||||||
|
|
||||||
|
%left '^'
|
||||||
|
|
||||||
|
%start start
|
||||||
|
|
||||||
|
%% /* language grammar */
|
||||||
|
|
||||||
|
start
|
||||||
|
: graphConfig document
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
document
|
||||||
|
: /* empty */
|
||||||
|
{ $$ = [];}
|
||||||
|
| document line
|
||||||
|
{
|
||||||
|
if(!Array.isArray($line) || $line.length > 0){
|
||||||
|
$document.push($line);
|
||||||
|
}
|
||||||
|
$$=$document;}
|
||||||
|
;
|
||||||
|
|
||||||
|
line
|
||||||
|
: statement
|
||||||
|
{$$=$statement;}
|
||||||
|
| SEMI
|
||||||
|
| NEWLINE
|
||||||
|
| SPACE
|
||||||
|
| EOF
|
||||||
|
;
|
||||||
|
|
||||||
|
graphConfig
|
||||||
|
: SPACE graphConfig
|
||||||
|
| NEWLINE graphConfig
|
||||||
|
| GRAPH NODIR
|
||||||
|
{ yy.setDirection('TB');$$ = 'TB';}
|
||||||
|
| GRAPH DIR FirstStmtSeparator
|
||||||
|
{ yy.setDirection($DIR);$$ = $DIR;}
|
||||||
|
// | GRAPH SPACE TAGEND FirstStmtSeparator
|
||||||
|
// { yy.setDirection("LR");$$ = $TAGEND;}
|
||||||
|
// | GRAPH SPACE TAGSTART FirstStmtSeparator
|
||||||
|
// { yy.setDirection("RL");$$ = $TAGSTART;}
|
||||||
|
// | GRAPH SPACE UP FirstStmtSeparator
|
||||||
|
// { yy.setDirection("BT");$$ = $UP;}
|
||||||
|
// | GRAPH SPACE DOWN FirstStmtSeparator
|
||||||
|
// { yy.setDirection("TB");$$ = $DOWN;}
|
||||||
|
;
|
||||||
|
|
||||||
|
ending: endToken ending
|
||||||
|
| endToken
|
||||||
|
;
|
||||||
|
|
||||||
|
endToken: NEWLINE | SPACE | EOF;
|
||||||
|
|
||||||
|
FirstStmtSeparator
|
||||||
|
: SEMI | NEWLINE | spaceList NEWLINE ;
|
||||||
|
|
||||||
|
|
||||||
|
spaceListNewline
|
||||||
|
: SPACE spaceListNewline
|
||||||
|
| NEWLINE spaceListNewline
|
||||||
|
| NEWLINE
|
||||||
|
| SPACE
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
spaceList
|
||||||
|
: SPACE spaceList
|
||||||
|
| SPACE
|
||||||
|
;
|
||||||
|
|
||||||
|
statement
|
||||||
|
: vertexStatement separator
|
||||||
|
{ /* console.warn('finat vs', $vertexStatement.nodes); */ $$=$vertexStatement.nodes}
|
||||||
|
| styleStatement separator
|
||||||
|
{$$=[];}
|
||||||
|
| linkStyleStatement separator
|
||||||
|
{$$=[];}
|
||||||
|
| classDefStatement separator
|
||||||
|
{$$=[];}
|
||||||
|
| classStatement separator
|
||||||
|
{$$=[];}
|
||||||
|
| clickStatement separator
|
||||||
|
{$$=[];}
|
||||||
|
| subgraph SPACE textNoTags SQS text SQE separator document end
|
||||||
|
{$$=yy.addSubGraph($textNoTags,$document,$text);}
|
||||||
|
| subgraph SPACE textNoTags separator document end
|
||||||
|
{$$=yy.addSubGraph($textNoTags,$document,$textNoTags);}
|
||||||
|
// | subgraph SPACE textNoTags separator document end
|
||||||
|
// {$$=yy.addSubGraph($textNoTags,$document,$textNoTags);}
|
||||||
|
| subgraph separator document end
|
||||||
|
{$$=yy.addSubGraph(undefined,$document,undefined);}
|
||||||
|
| direction
|
||||||
|
| acc_title acc_title_value { $$=$acc_title_value.trim();yy.setAccTitle($$); }
|
||||||
|
| acc_descr acc_descr_value { $$=$acc_descr_value.trim();yy.setAccDescription($$); }
|
||||||
|
| acc_descr_multiline_value { $$=$acc_descr_multiline_value.trim();yy.setAccDescription($$); }
|
||||||
|
;
|
||||||
|
|
||||||
|
separator: NEWLINE | SEMI | EOF ;
|
||||||
|
|
||||||
|
shapeData:
|
||||||
|
shapeData SHAPE_DATA
|
||||||
|
{ $$ = $1 + $2; }
|
||||||
|
| SHAPE_DATA
|
||||||
|
{ $$ = $1; }
|
||||||
|
;
|
||||||
|
|
||||||
|
vertexStatement: vertexStatement link node shapeData
|
||||||
|
{ /* console.warn('vs shapeData',$vertexStatement.stmt,$node, $shapeData);*/ yy.addVertex($node[0],undefined,undefined,undefined, undefined,undefined, undefined,$shapeData); yy.addLink($vertexStatement.stmt,$node,$link); $$ = { stmt: $node, nodes: $node.concat($vertexStatement.nodes) } }
|
||||||
|
| vertexStatement link node
|
||||||
|
{ /*console.warn('vs',$vertexStatement.stmt,$node);*/ yy.addLink($vertexStatement.stmt,$node,$link); $$ = { stmt: $node, nodes: $node.concat($vertexStatement.nodes) } }
|
||||||
|
| vertexStatement link node spaceList
|
||||||
|
{ /* console.warn('vs',$vertexStatement.stmt,$node); */ yy.addLink($vertexStatement.stmt,$node,$link); $$ = { stmt: $node, nodes: $node.concat($vertexStatement.nodes) } }
|
||||||
|
|node spaceList { /*console.warn('vertexStatement: node spaceList', $node);*/ $$ = {stmt: $node, nodes:$node }}
|
||||||
|
|node shapeData {
|
||||||
|
/*console.warn('vertexStatement: node shapeData', $node[0], $shapeData);*/
|
||||||
|
yy.addVertex($node[0],undefined,undefined,undefined, undefined,undefined, undefined,$shapeData);
|
||||||
|
$$ = {stmt: $node, nodes:$node, shapeData: $shapeData}
|
||||||
|
}
|
||||||
|
|node { /* console.warn('vertexStatement: single node', $node); */ $$ = {stmt: $node, nodes:$node }}
|
||||||
|
;
|
||||||
|
|
||||||
|
node: styledVertex
|
||||||
|
{ /*console.warn('nod', $styledVertex);*/ $$ = [$styledVertex];}
|
||||||
|
| node shapeData spaceList AMP spaceList styledVertex
|
||||||
|
{ yy.addVertex($node[0],undefined,undefined,undefined, undefined,undefined, undefined,$shapeData); $$ = $node.concat($styledVertex); /*console.warn('pip2', $node[0], $styledVertex, $$);*/ }
|
||||||
|
| node spaceList AMP spaceList styledVertex
|
||||||
|
{ $$ = $node.concat($styledVertex); /*console.warn('pip', $node[0], $styledVertex, $$);*/ }
|
||||||
|
;
|
||||||
|
|
||||||
|
styledVertex: vertex
|
||||||
|
{ /* console.warn('nodc', $vertex);*/ $$ = $vertex;}
|
||||||
|
| vertex STYLE_SEPARATOR idString
|
||||||
|
{$$ = $vertex;yy.setClass($vertex,$idString)}
|
||||||
|
;
|
||||||
|
|
||||||
|
vertex: idString SQS text SQE
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'square');}
|
||||||
|
| idString DOUBLECIRCLESTART text DOUBLECIRCLEEND
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'doublecircle');}
|
||||||
|
| idString PS PS text PE PE
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'circle');}
|
||||||
|
| idString '(-' text '-)'
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'ellipse');}
|
||||||
|
| idString STADIUMSTART text STADIUMEND
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'stadium');}
|
||||||
|
| idString SUBROUTINESTART text SUBROUTINEEND
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'subroutine');}
|
||||||
|
| idString VERTEX_WITH_PROPS_START NODE_STRING\[field] COLON NODE_STRING\[value] PIPE text SQE
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'rect',undefined,undefined,undefined, Object.fromEntries([[$field, $value]]));}
|
||||||
|
| idString CYLINDERSTART text CYLINDEREND
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'cylinder');}
|
||||||
|
| idString PS text PE
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'round');}
|
||||||
|
| idString DIAMOND_START text DIAMOND_STOP
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'diamond');}
|
||||||
|
| idString DIAMOND_START DIAMOND_START text DIAMOND_STOP DIAMOND_STOP
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'hexagon');}
|
||||||
|
| idString TAGEND text SQE
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'odd');}
|
||||||
|
| idString TRAPSTART text TRAPEND
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'trapezoid');}
|
||||||
|
| idString INVTRAPSTART text INVTRAPEND
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'inv_trapezoid');}
|
||||||
|
| idString TRAPSTART text INVTRAPEND
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'lean_right');}
|
||||||
|
| idString INVTRAPSTART text TRAPEND
|
||||||
|
{$$ = $idString;yy.addVertex($idString,$text,'lean_left');}
|
||||||
|
| idString
|
||||||
|
{ /*console.warn('h: ', $idString);*/$$ = $idString;yy.addVertex($idString);}
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
link: linkStatement arrowText
|
||||||
|
{$linkStatement.text = $arrowText;$$ = $linkStatement;}
|
||||||
|
| linkStatement TESTSTR SPACE
|
||||||
|
{$linkStatement.text = $TESTSTR;$$ = $linkStatement;}
|
||||||
|
| linkStatement arrowText SPACE
|
||||||
|
{$linkStatement.text = $arrowText;$$ = $linkStatement;}
|
||||||
|
| linkStatement
|
||||||
|
{$$ = $linkStatement;}
|
||||||
|
| START_LINK edgeText LINK
|
||||||
|
{var inf = yy.destructLink($LINK, $START_LINK); $$ = {"type":inf.type,"stroke":inf.stroke,"length":inf.length,"text":$edgeText};}
|
||||||
|
;
|
||||||
|
|
||||||
|
edgeText: edgeTextToken
|
||||||
|
{$$={text:$edgeTextToken, type:'text'};}
|
||||||
|
| edgeText edgeTextToken
|
||||||
|
{$$={text:$edgeText.text+''+$edgeTextToken, type:$edgeText.type};}
|
||||||
|
|STR
|
||||||
|
{$$={text: $STR, type: 'string'};}
|
||||||
|
| MD_STR
|
||||||
|
{$$={text:$MD_STR, type:'markdown'};}
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
linkStatement: LINK
|
||||||
|
{var inf = yy.destructLink($LINK);$$ = {"type":inf.type,"stroke":inf.stroke,"length":inf.length};}
|
||||||
|
;
|
||||||
|
|
||||||
|
arrowText:
|
||||||
|
PIPE text PIPE
|
||||||
|
{$$ = $text;}
|
||||||
|
;
|
||||||
|
|
||||||
|
text: textToken
|
||||||
|
{ $$={text:$textToken, type: 'text'};}
|
||||||
|
| text textToken
|
||||||
|
{ $$={text:$text.text+''+$textToken, type: $text.type};}
|
||||||
|
| STR
|
||||||
|
{ $$ = {text: $STR, type: 'string'};}
|
||||||
|
| MD_STR
|
||||||
|
{ $$={text: $MD_STR, type: 'markdown'};}
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
keywords
|
||||||
|
: STYLE | LINKSTYLE | CLASSDEF | CLASS | CLICK | GRAPH | DIR | subgraph | end | DOWN | UP;
|
||||||
|
|
||||||
|
|
||||||
|
textNoTags: textNoTagsToken
|
||||||
|
{$$={text:$textNoTagsToken, type: 'text'};}
|
||||||
|
| textNoTags textNoTagsToken
|
||||||
|
{$$={text:$textNoTags.text+''+$textNoTagsToken, type: $textNoTags.type};}
|
||||||
|
| STR
|
||||||
|
{ $$={text: $STR, type: 'text'};}
|
||||||
|
| MD_STR
|
||||||
|
{ $$={text: $MD_STR, type: 'markdown'};}
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
classDefStatement:CLASSDEF SPACE idString SPACE stylesOpt
|
||||||
|
{$$ = $CLASSDEF;yy.addClass($idString,$stylesOpt);}
|
||||||
|
;
|
||||||
|
|
||||||
|
classStatement:CLASS SPACE idString\[vertex] SPACE idString\[class]
|
||||||
|
{$$ = $CLASS;yy.setClass($vertex, $class);}
|
||||||
|
;
|
||||||
|
|
||||||
|
clickStatement
|
||||||
|
: CLICK CALLBACKNAME {$$ = $CLICK;yy.setClickEvent($CLICK, $CALLBACKNAME);}
|
||||||
|
| CLICK CALLBACKNAME SPACE STR {$$ = $CLICK;yy.setClickEvent($CLICK, $CALLBACKNAME);yy.setTooltip($CLICK, $STR);}
|
||||||
|
| CLICK CALLBACKNAME CALLBACKARGS {$$ = $CLICK;yy.setClickEvent($CLICK, $CALLBACKNAME, $CALLBACKARGS);}
|
||||||
|
| CLICK CALLBACKNAME CALLBACKARGS SPACE STR {$$ = $CLICK;yy.setClickEvent($CLICK, $CALLBACKNAME, $CALLBACKARGS);yy.setTooltip($CLICK, $STR);}
|
||||||
|
| CLICK HREF STR {$$ = $CLICK;yy.setLink($CLICK, $STR);}
|
||||||
|
| CLICK HREF STR SPACE STR {$$ = $CLICK;yy.setLink($CLICK, $STR1);yy.setTooltip($CLICK, $STR2);}
|
||||||
|
| CLICK HREF STR SPACE LINK_TARGET {$$ = $CLICK;yy.setLink($CLICK, $STR, $LINK_TARGET);}
|
||||||
|
| CLICK HREF STR\[link] SPACE STR\[tooltip] SPACE LINK_TARGET {$$ = $CLICK;yy.setLink($CLICK, $link, $LINK_TARGET);yy.setTooltip($CLICK, $tooltip);}
|
||||||
|
| CLICK alphaNum {$$ = $CLICK;yy.setClickEvent($CLICK, $alphaNum);}
|
||||||
|
| CLICK alphaNum SPACE STR {$$ = $CLICK;yy.setClickEvent($CLICK, $alphaNum);yy.setTooltip($CLICK, $STR);}
|
||||||
|
| CLICK STR {$$ = $CLICK;yy.setLink($CLICK, $STR);}
|
||||||
|
| CLICK STR\[link] SPACE STR\[tooltip] {$$ = $CLICK;yy.setLink($CLICK, $link);yy.setTooltip($CLICK, $tooltip);}
|
||||||
|
| CLICK STR SPACE LINK_TARGET {$$ = $CLICK;yy.setLink($CLICK, $STR, $LINK_TARGET);}
|
||||||
|
| CLICK STR\[link] SPACE STR\[tooltip] SPACE LINK_TARGET {$$ = $CLICK;yy.setLink($CLICK, $link, $LINK_TARGET);yy.setTooltip($CLICK, $tooltip);}
|
||||||
|
;
|
||||||
|
|
||||||
|
styleStatement:STYLE SPACE idString SPACE stylesOpt
|
||||||
|
{$$ = $STYLE;yy.addVertex($idString,undefined,undefined,$stylesOpt);}
|
||||||
|
;
|
||||||
|
|
||||||
|
linkStyleStatement
|
||||||
|
: LINKSTYLE SPACE DEFAULT SPACE stylesOpt
|
||||||
|
{$$ = $LINKSTYLE;yy.updateLink([$DEFAULT],$stylesOpt);}
|
||||||
|
| LINKSTYLE SPACE numList SPACE stylesOpt
|
||||||
|
{$$ = $LINKSTYLE;yy.updateLink($numList,$stylesOpt);}
|
||||||
|
| LINKSTYLE SPACE DEFAULT SPACE INTERPOLATE SPACE alphaNum SPACE stylesOpt
|
||||||
|
{$$ = $LINKSTYLE;yy.updateLinkInterpolate([$DEFAULT],$alphaNum);yy.updateLink([$DEFAULT],$stylesOpt);}
|
||||||
|
| LINKSTYLE SPACE numList SPACE INTERPOLATE SPACE alphaNum SPACE stylesOpt
|
||||||
|
{$$ = $LINKSTYLE;yy.updateLinkInterpolate($numList,$alphaNum);yy.updateLink($numList,$stylesOpt);}
|
||||||
|
| LINKSTYLE SPACE DEFAULT SPACE INTERPOLATE SPACE alphaNum
|
||||||
|
{$$ = $LINKSTYLE;yy.updateLinkInterpolate([$DEFAULT],$alphaNum);}
|
||||||
|
| LINKSTYLE SPACE numList SPACE INTERPOLATE SPACE alphaNum
|
||||||
|
{$$ = $LINKSTYLE;yy.updateLinkInterpolate($numList,$alphaNum);}
|
||||||
|
;
|
||||||
|
|
||||||
|
numList: NUM
|
||||||
|
{$$ = [$NUM]}
|
||||||
|
| numList COMMA NUM
|
||||||
|
{$numList.push($NUM);$$ = $numList;}
|
||||||
|
;
|
||||||
|
|
||||||
|
stylesOpt: style
|
||||||
|
{$$ = [$style]}
|
||||||
|
| stylesOpt COMMA style
|
||||||
|
{$stylesOpt.push($style);$$ = $stylesOpt;}
|
||||||
|
;
|
||||||
|
|
||||||
|
style: styleComponent
|
||||||
|
|style styleComponent
|
||||||
|
{$$ = $style + $styleComponent;}
|
||||||
|
;
|
||||||
|
|
||||||
|
styleComponent: NUM | NODE_STRING| COLON | UNIT | SPACE | BRKT | STYLE | PCT ;
|
||||||
|
|
||||||
|
/* Token lists */
|
||||||
|
idStringToken : NUM | NODE_STRING | DOWN | MINUS | DEFAULT | COMMA | COLON | AMP | BRKT | MULT | UNICODE_TEXT;
|
||||||
|
|
||||||
|
textToken : TEXT | TAGSTART | TAGEND | UNICODE_TEXT;
|
||||||
|
|
||||||
|
textNoTagsToken: NUM | NODE_STRING | SPACE | MINUS | AMP | UNICODE_TEXT | COLON | MULT | BRKT | keywords | START_LINK ;
|
||||||
|
|
||||||
|
edgeTextToken : EDGE_TEXT | UNICODE_TEXT ;
|
||||||
|
|
||||||
|
alphaNumToken : NUM | UNICODE_TEXT | NODE_STRING | DIR | DOWN | MINUS | COMMA | COLON | AMP | BRKT | MULT;
|
||||||
|
|
||||||
|
idString
|
||||||
|
:idStringToken
|
||||||
|
{$$=$idStringToken}
|
||||||
|
| idString idStringToken
|
||||||
|
{$$=$idString+''+$idStringToken}
|
||||||
|
;
|
||||||
|
|
||||||
|
alphaNum
|
||||||
|
: alphaNumToken
|
||||||
|
{$$=$alphaNumToken;}
|
||||||
|
| alphaNum alphaNumToken
|
||||||
|
{$$=$alphaNum+''+$alphaNumToken;}
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
direction
|
||||||
|
: direction_tb
|
||||||
|
{ $$={stmt:'dir', value:'TB'};}
|
||||||
|
| direction_bt
|
||||||
|
{ $$={stmt:'dir', value:'BT'};}
|
||||||
|
| direction_rl
|
||||||
|
{ $$={stmt:'dir', value:'RL'};}
|
||||||
|
| direction_lr
|
||||||
|
{ $$={stmt:'dir', value:'LR'};}
|
||||||
|
;
|
||||||
|
|
||||||
|
%%
|
38
packages/mermaid/src/diagrams/swimlane/styles.ts
Normal file
38
packages/mermaid/src/diagrams/swimlane/styles.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
// import khroma from 'khroma';
|
||||||
|
import * as khroma from 'khroma';
|
||||||
|
import getStyleFlowchart from '../flowchart/styles.ts';
|
||||||
|
|
||||||
|
/** Returns the styles given options */
|
||||||
|
export interface FlowChartStyleOptions {
|
||||||
|
arrowheadColor: string;
|
||||||
|
border2: string;
|
||||||
|
clusterBkg: string;
|
||||||
|
clusterBorder: string;
|
||||||
|
edgeLabelBackground: string;
|
||||||
|
fontFamily: string;
|
||||||
|
lineColor: string;
|
||||||
|
mainBkg: string;
|
||||||
|
nodeBorder: string;
|
||||||
|
nodeTextColor: string;
|
||||||
|
tertiaryColor: string;
|
||||||
|
textColor: string;
|
||||||
|
titleColor: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fade = (color: string, opacity: number) => {
|
||||||
|
// @ts-ignore TODO: incorrect types from khroma
|
||||||
|
const channel = khroma.channel;
|
||||||
|
|
||||||
|
const r = channel(color, 'r');
|
||||||
|
const g = channel(color, 'g');
|
||||||
|
const b = channel(color, 'b');
|
||||||
|
|
||||||
|
// @ts-ignore incorrect types from khroma
|
||||||
|
return khroma.rgba(r, g, b, opacity);
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStyles = (options: FlowChartStyleOptions) =>
|
||||||
|
`${getStyleFlowchart(options)}
|
||||||
|
`;
|
||||||
|
|
||||||
|
export default getStyles;
|
1063
packages/mermaid/src/diagrams/swimlane/swimlaneDb.ts
Normal file
1063
packages/mermaid/src/diagrams/swimlane/swimlaneDb.ts
Normal file
File diff suppressed because it is too large
Load Diff
26
packages/mermaid/src/diagrams/swimlane/swimlaneDiagram.ts
Normal file
26
packages/mermaid/src/diagrams/swimlane/swimlaneDiagram.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import type { MermaidConfig } from '../../config.type.js';
|
||||||
|
import { setConfig } from '../../diagram-api/diagramAPI.js';
|
||||||
|
import swimlaneDb from './swimlaneDb.js';
|
||||||
|
import renderer from './swimlaneRenderer.js';
|
||||||
|
// @ts-ignore: JISON doesn't support types
|
||||||
|
import swimlaneParser from './parser/swimlane.jison';
|
||||||
|
import swimlaneStyles from './styles.js';
|
||||||
|
|
||||||
|
export const diagram = {
|
||||||
|
parser: swimlaneParser,
|
||||||
|
db: swimlaneDb,
|
||||||
|
renderer,
|
||||||
|
styles: swimlaneStyles,
|
||||||
|
init: (cnf: MermaidConfig) => {
|
||||||
|
if (!cnf.flowchart) {
|
||||||
|
cnf.flowchart = {};
|
||||||
|
}
|
||||||
|
if (cnf.layout) {
|
||||||
|
setConfig({ layout: cnf.layout });
|
||||||
|
}
|
||||||
|
cnf.flowchart.arrowMarkerAbsolute = cnf.arrowMarkerAbsolute;
|
||||||
|
setConfig({ flowchart: { arrowMarkerAbsolute: cnf.arrowMarkerAbsolute } });
|
||||||
|
swimlaneDb.clear();
|
||||||
|
swimlaneDb.setGen('gen-2');
|
||||||
|
},
|
||||||
|
};
|
104
packages/mermaid/src/diagrams/swimlane/swimlaneRenderer.ts
Normal file
104
packages/mermaid/src/diagrams/swimlane/swimlaneRenderer.ts
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
import { select } from 'd3';
|
||||||
|
import { getConfig } from '../../diagram-api/diagramAPI.js';
|
||||||
|
import type { DiagramStyleClassDef } from '../../diagram-api/types.js';
|
||||||
|
import { log } from '../../logger.js';
|
||||||
|
import { getDiagramElement } from '../../rendering-util/insertElementsForSize.js';
|
||||||
|
import { getRegisteredLayoutAlgorithm, render } from '../../rendering-util/render.js';
|
||||||
|
import { setupViewPortForSVG } from '../../rendering-util/setupViewPortForSVG.js';
|
||||||
|
import type { LayoutData } from '../../rendering-util/types.js';
|
||||||
|
import utils from '../../utils.js';
|
||||||
|
import { getDirection } from './swimlaneDb.js';
|
||||||
|
|
||||||
|
export const getClasses = function (
|
||||||
|
text: string,
|
||||||
|
diagramObj: any
|
||||||
|
): Map<string, DiagramStyleClassDef> {
|
||||||
|
return diagramObj.db.getClasses();
|
||||||
|
};
|
||||||
|
|
||||||
|
export const draw = async function (text: string, id: string, _version: string, diag: any) {
|
||||||
|
log.info('REF0:');
|
||||||
|
log.info('Drawing state diagram (v2)', id);
|
||||||
|
const { securityLevel, flowchart: conf, layout } = getConfig();
|
||||||
|
|
||||||
|
// Handle root and document for when rendering in sandbox mode
|
||||||
|
let sandboxElement;
|
||||||
|
if (securityLevel === 'sandbox') {
|
||||||
|
sandboxElement = select('#i' + id);
|
||||||
|
}
|
||||||
|
|
||||||
|
// @ts-ignore - document is always available
|
||||||
|
const doc = securityLevel === 'sandbox' ? sandboxElement.nodes()[0].contentDocument : document;
|
||||||
|
|
||||||
|
// The getData method provided in all supported diagrams is used to extract the data from the parsed structure
|
||||||
|
// into the Layout data format
|
||||||
|
log.debug('Before getData: ');
|
||||||
|
const data4Layout = diag.db.getData() as LayoutData;
|
||||||
|
log.debug('Data: ', data4Layout);
|
||||||
|
// Create the root SVG
|
||||||
|
const svg = getDiagramElement(id, securityLevel);
|
||||||
|
const direction = getDirection();
|
||||||
|
|
||||||
|
data4Layout.type = diag.type;
|
||||||
|
data4Layout.layoutAlgorithm = getRegisteredLayoutAlgorithm(layout);
|
||||||
|
if (data4Layout.layoutAlgorithm === 'dagre' && layout === 'elk') {
|
||||||
|
log.warn(
|
||||||
|
'flowchart-elk was moved to an external package in Mermaid v11. Please refer [release notes](https://github.com/mermaid-js/mermaid/releases/tag/v11.0.0) for more details. This diagram will be rendered using `dagre` layout as a fallback.'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
data4Layout.direction = direction;
|
||||||
|
data4Layout.nodeSpacing = conf?.nodeSpacing || 50;
|
||||||
|
data4Layout.rankSpacing = conf?.rankSpacing || 50;
|
||||||
|
data4Layout.markers = ['point', 'circle', 'cross'];
|
||||||
|
|
||||||
|
data4Layout.diagramId = id;
|
||||||
|
log.debug('REF1:', data4Layout);
|
||||||
|
await render(data4Layout, svg);
|
||||||
|
const padding = data4Layout.config.flowchart?.diagramPadding ?? 8;
|
||||||
|
utils.insertTitle(
|
||||||
|
svg,
|
||||||
|
'flowchartTitleText',
|
||||||
|
conf?.titleTopMargin || 0,
|
||||||
|
diag.db.getDiagramTitle()
|
||||||
|
);
|
||||||
|
setupViewPortForSVG(svg, padding, 'flowchart', conf?.useMaxWidth || false);
|
||||||
|
|
||||||
|
// If node has a link, wrap it in an anchor SVG object.
|
||||||
|
for (const vertex of data4Layout.nodes) {
|
||||||
|
const node = select(`#${id} [id="${vertex.id}"]`);
|
||||||
|
if (!node || !vertex.link) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const link = doc.createElementNS('http://www.w3.org/2000/svg', 'a');
|
||||||
|
link.setAttributeNS('http://www.w3.org/2000/svg', 'class', vertex.cssClasses);
|
||||||
|
link.setAttributeNS('http://www.w3.org/2000/svg', 'rel', 'noopener');
|
||||||
|
if (securityLevel === 'sandbox') {
|
||||||
|
link.setAttributeNS('http://www.w3.org/2000/svg', 'target', '_top');
|
||||||
|
} else if (vertex.linkTarget) {
|
||||||
|
link.setAttributeNS('http://www.w3.org/2000/svg', 'target', vertex.linkTarget);
|
||||||
|
}
|
||||||
|
|
||||||
|
const linkNode = node.insert(function () {
|
||||||
|
return link;
|
||||||
|
}, ':first-child');
|
||||||
|
|
||||||
|
const shape = node.select('.label-container');
|
||||||
|
if (shape) {
|
||||||
|
linkNode.append(function () {
|
||||||
|
return shape.node();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const label = node.select('.label');
|
||||||
|
if (label) {
|
||||||
|
linkNode.append(function () {
|
||||||
|
return label.node();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default {
|
||||||
|
getClasses,
|
||||||
|
draw,
|
||||||
|
};
|
Reference in New Issue
Block a user