mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-12-09 22:14:10 +01:00
Compare commits
2 Commits
sidv/langi
...
sidv/class
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
345b75cc0e | ||
|
|
0ebea7744b |
@@ -156,7 +156,7 @@ We know it can sometimes be hard to code _and_ write user documentation.
|
||||
|
||||
Our documentation is managed in `packages/mermaid/src/docs`. Details on how to edit is in the [Contributing Documentation](#contributing-documentation) section.
|
||||
|
||||
Create another issue specifically for the documentation.
|
||||
Create another issue specifically for the documentation.\
|
||||
You will need to help with the PR, but definitely ask for help if you feel stuck.
|
||||
When it feels hard to write stuff out, explaining it to someone and having that person ask you clarifying questions can often be 80% of the work!
|
||||
|
||||
|
||||
@@ -111,9 +111,9 @@ b. The importing of mermaid library through the `mermaid.esm.mjs` or `mermaid.es
|
||||
<body>
|
||||
Here is a mermaid diagram:
|
||||
<pre class="mermaid">
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server01]
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server01]
|
||||
B --> D[Server02]
|
||||
</pre>
|
||||
</body>
|
||||
@@ -152,18 +152,18 @@ Rendering in Mermaid is initialized by `mermaid.initialize()` call. However, doi
|
||||
<body>
|
||||
Here is one mermaid diagram:
|
||||
<pre class="mermaid">
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server1]
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server1]
|
||||
B --> D[Server2]
|
||||
</pre>
|
||||
|
||||
And here is another:
|
||||
<pre class="mermaid">
|
||||
graph TD
|
||||
graph TD
|
||||
A[Client] -->|tcp_123| B
|
||||
B(Load Balancer)
|
||||
B -->|tcp_456| C[Server1]
|
||||
B(Load Balancer)
|
||||
B -->|tcp_456| C[Server1]
|
||||
B -->|tcp_456| D[Server2]
|
||||
</pre>
|
||||
|
||||
@@ -185,15 +185,15 @@ In this example mermaid.js is referenced in `src` as a separate JavaScript file,
|
||||
</head>
|
||||
<body>
|
||||
<pre class="mermaid">
|
||||
graph LR
|
||||
A --- B
|
||||
B-->C[fa:fa-ban forbidden]
|
||||
graph LR
|
||||
A --- B
|
||||
B-->C[fa:fa-ban forbidden]
|
||||
B-->D(fa:fa-spinner);
|
||||
</pre>
|
||||
<pre class="mermaid">
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server1]
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server1]
|
||||
B --> D[Server2]
|
||||
</pre>
|
||||
<script type="module">
|
||||
|
||||
@@ -7,9 +7,9 @@
|
||||
base = ""
|
||||
|
||||
# Directory that contains the deploy-ready HTML files and
|
||||
# assets generated by the build. This is an absolute path relative
|
||||
# assets generated by the build. This is an absolute path relative
|
||||
# to the base directory, which is the root by default (/).
|
||||
# This sample publishes the directory located at the absolute
|
||||
# This sample publishes the directory located at the absolute
|
||||
# path "root/project/build-output"
|
||||
|
||||
publish = "mermaid-live-editor/docs"
|
||||
|
||||
@@ -368,7 +368,20 @@ const cutPathAtIntersect = (_points, boundryNode) => {
|
||||
return points;
|
||||
};
|
||||
|
||||
//(edgePaths, e, edge, clusterDb, diagramtype, graph)
|
||||
/**
|
||||
* Calculate the deltas and angle between two points
|
||||
* @param {{x: number, y:number}} point1
|
||||
* @param {{x: number, y:number}} point2
|
||||
* @returns {{angle: number, deltaX: number, deltaY: number}}
|
||||
*/
|
||||
function calculateDeltaAndAngle(point1, point2) {
|
||||
const [x1, y1] = [point1.x, point1.y];
|
||||
const [x2, y2] = [point2.x, point2.y];
|
||||
const deltaX = x2 - x1;
|
||||
const deltaY = y2 - y1;
|
||||
return { angle: Math.atan(deltaY / deltaX), deltaX, deltaY };
|
||||
}
|
||||
|
||||
export const insertEdge = function (elem, e, edge, clusterDb, diagramType, graph) {
|
||||
let points = edge.points;
|
||||
let pointsHasChanged = false;
|
||||
@@ -435,22 +448,52 @@ export const insertEdge = function (elem, e, edge, clusterDb, diagramType, graph
|
||||
const lineData = points.filter((p) => !Number.isNaN(p.y));
|
||||
|
||||
// This is the accessor function we talked about above
|
||||
let curve;
|
||||
let curve = curveBasis;
|
||||
// Currently only flowcharts get the curve from the settings, perhaps this should
|
||||
// be expanded to a common setting? Restricting it for now in order not to cause side-effects that
|
||||
// have not been thought through
|
||||
if (diagramType === 'graph' || diagramType === 'flowchart') {
|
||||
curve = edge.curve || curveBasis;
|
||||
} else {
|
||||
curve = curveBasis;
|
||||
if (edge.curve && (diagramType === 'graph' || diagramType === 'flowchart')) {
|
||||
curve = edge.curve;
|
||||
}
|
||||
// curve = curveLinear;
|
||||
|
||||
const markerOffsets = {
|
||||
aggregation: 18,
|
||||
extension: 18,
|
||||
composition: 18,
|
||||
dependency: 6,
|
||||
lollipop: 13.5,
|
||||
};
|
||||
|
||||
const lineFunction = line()
|
||||
.x(function (d) {
|
||||
return d.x;
|
||||
.x(function (d, i, data) {
|
||||
let offset = 0;
|
||||
if (i === 0 && Object.hasOwn(markerOffsets, edge.arrowTypeStart)) {
|
||||
const { angle, deltaX } = calculateDeltaAndAngle(data[0], data[1]);
|
||||
offset = markerOffsets[edge.arrowTypeStart] * Math.cos(angle) * (deltaX >= 0 ? 1 : -1) || 0;
|
||||
} else if (i === data.length - 1 && Object.hasOwn(markerOffsets, edge.arrowTypeEnd)) {
|
||||
const { angle, deltaX } = calculateDeltaAndAngle(
|
||||
data[data.length - 1],
|
||||
data[data.length - 2]
|
||||
);
|
||||
offset = markerOffsets[edge.arrowTypeEnd] * Math.cos(angle) * (deltaX >= 0 ? 1 : -1) || 0;
|
||||
}
|
||||
return d.x + offset;
|
||||
})
|
||||
.y(function (d) {
|
||||
return d.y;
|
||||
.y(function (d, i, data) {
|
||||
let offset = 0;
|
||||
if (i === 0 && Object.hasOwn(markerOffsets, edge.arrowTypeStart)) {
|
||||
const { angle, deltaY } = calculateDeltaAndAngle(data[0], data[1]);
|
||||
offset =
|
||||
markerOffsets[edge.arrowTypeStart] * Math.abs(Math.sin(angle)) * (deltaY >= 0 ? 1 : -1);
|
||||
} else if (i === data.length - 1 && Object.hasOwn(markerOffsets, edge.arrowTypeEnd)) {
|
||||
const { angle, deltaY } = calculateDeltaAndAngle(
|
||||
data[data.length - 1],
|
||||
data[data.length - 2]
|
||||
);
|
||||
offset =
|
||||
markerOffsets[edge.arrowTypeEnd] * Math.abs(Math.sin(angle)) * (deltaY >= 0 ? 1 : -1);
|
||||
}
|
||||
return d.y + offset;
|
||||
})
|
||||
.curve(curve);
|
||||
|
||||
@@ -489,15 +532,15 @@ export const insertEdge = function (elem, e, edge, clusterDb, diagramType, graph
|
||||
.attr('style', edge.style);
|
||||
|
||||
// DEBUG code, adds a red circle at each edge coordinate
|
||||
// edge.points.forEach((point) => {
|
||||
// elem
|
||||
// .append('circle')
|
||||
// .style('stroke', 'red')
|
||||
// .style('fill', 'red')
|
||||
// .attr('r', 1)
|
||||
// .attr('cx', point.x)
|
||||
// .attr('cy', point.y);
|
||||
// });
|
||||
edge.points.forEach((point) => {
|
||||
elem
|
||||
.append('circle')
|
||||
.style('stroke', 'red')
|
||||
.style('fill', 'red')
|
||||
.attr('r', 1)
|
||||
.attr('cx', point.x)
|
||||
.attr('cy', point.y);
|
||||
});
|
||||
|
||||
let url = '';
|
||||
// // TODO: Can we load this config only from the rendered graph type?
|
||||
|
||||
@@ -155,7 +155,7 @@ export const render = async (elem, graph, markers, diagramtype, id) => {
|
||||
clearClusters();
|
||||
clearGraphlib();
|
||||
|
||||
log.warn('Graph at first:', graphlibJson.write(graph));
|
||||
log.warn('Graph at first:', JSON.stringify(graphlibJson.write(graph)));
|
||||
adjustClustersAndEdges(graph);
|
||||
log.warn('Graph after:', graphlibJson.write(graph));
|
||||
// log.warn('Graph ever after:', graphlibJson.write(graph.node('A').graph));
|
||||
|
||||
@@ -16,7 +16,7 @@ const extension = (elem, type, id) => {
|
||||
.append('marker')
|
||||
.attr('id', type + '-extensionStart')
|
||||
.attr('class', 'marker extension ' + type)
|
||||
.attr('refX', 0)
|
||||
.attr('refX', 18)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 190)
|
||||
.attr('markerHeight', 240)
|
||||
@@ -29,7 +29,7 @@ const extension = (elem, type, id) => {
|
||||
.append('marker')
|
||||
.attr('id', type + '-extensionEnd')
|
||||
.attr('class', 'marker extension ' + type)
|
||||
.attr('refX', 19)
|
||||
.attr('refX', 1)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 20)
|
||||
.attr('markerHeight', 28)
|
||||
@@ -44,7 +44,7 @@ const composition = (elem, type) => {
|
||||
.append('marker')
|
||||
.attr('id', type + '-compositionStart')
|
||||
.attr('class', 'marker composition ' + type)
|
||||
.attr('refX', 0)
|
||||
.attr('refX', 18)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 190)
|
||||
.attr('markerHeight', 240)
|
||||
@@ -57,7 +57,7 @@ const composition = (elem, type) => {
|
||||
.append('marker')
|
||||
.attr('id', type + '-compositionEnd')
|
||||
.attr('class', 'marker composition ' + type)
|
||||
.attr('refX', 19)
|
||||
.attr('refX', 1)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 20)
|
||||
.attr('markerHeight', 28)
|
||||
@@ -71,7 +71,7 @@ const aggregation = (elem, type) => {
|
||||
.append('marker')
|
||||
.attr('id', type + '-aggregationStart')
|
||||
.attr('class', 'marker aggregation ' + type)
|
||||
.attr('refX', 0)
|
||||
.attr('refX', 18)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 190)
|
||||
.attr('markerHeight', 240)
|
||||
@@ -84,7 +84,7 @@ const aggregation = (elem, type) => {
|
||||
.append('marker')
|
||||
.attr('id', type + '-aggregationEnd')
|
||||
.attr('class', 'marker aggregation ' + type)
|
||||
.attr('refX', 19)
|
||||
.attr('refX', 1)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 20)
|
||||
.attr('markerHeight', 28)
|
||||
@@ -98,7 +98,7 @@ const dependency = (elem, type) => {
|
||||
.append('marker')
|
||||
.attr('id', type + '-dependencyStart')
|
||||
.attr('class', 'marker dependency ' + type)
|
||||
.attr('refX', 0)
|
||||
.attr('refX', 6)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 190)
|
||||
.attr('markerHeight', 240)
|
||||
@@ -111,7 +111,7 @@ const dependency = (elem, type) => {
|
||||
.append('marker')
|
||||
.attr('id', type + '-dependencyEnd')
|
||||
.attr('class', 'marker dependency ' + type)
|
||||
.attr('refX', 19)
|
||||
.attr('refX', 13)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 20)
|
||||
.attr('markerHeight', 28)
|
||||
@@ -125,15 +125,31 @@ const lollipop = (elem, type) => {
|
||||
.append('marker')
|
||||
.attr('id', type + '-lollipopStart')
|
||||
.attr('class', 'marker lollipop ' + type)
|
||||
.attr('refX', 0)
|
||||
.attr('refX', 13)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 190)
|
||||
.attr('markerHeight', 240)
|
||||
.attr('orient', 'auto')
|
||||
.append('circle')
|
||||
.attr('stroke', 'black')
|
||||
.attr('fill', 'white')
|
||||
.attr('cx', 6)
|
||||
.attr('fill', 'transparent')
|
||||
.attr('cx', 7)
|
||||
.attr('cy', 7)
|
||||
.attr('r', 6);
|
||||
elem
|
||||
.append('defs')
|
||||
.append('marker')
|
||||
.attr('id', type + '-lollipopEnd')
|
||||
.attr('class', 'marker lollipop ' + type)
|
||||
.attr('refX', 1)
|
||||
.attr('refY', 7)
|
||||
.attr('markerWidth', 190)
|
||||
.attr('markerHeight', 240)
|
||||
.attr('orient', 'auto')
|
||||
.append('circle')
|
||||
.attr('stroke', 'black')
|
||||
.attr('fill', 'transparent')
|
||||
.attr('cx', 7)
|
||||
.attr('cy', 7)
|
||||
.attr('r', 6);
|
||||
};
|
||||
|
||||
@@ -291,8 +291,8 @@ export const adjustClustersAndEdges = (graph, depth) => {
|
||||
shape: 'labelRect',
|
||||
style: '',
|
||||
});
|
||||
const edge1 = JSON.parse(JSON.stringify(edge));
|
||||
const edge2 = JSON.parse(JSON.stringify(edge));
|
||||
const edge1 = structuredClone(edge);
|
||||
const edge2 = structuredClone(edge);
|
||||
edge1.label = '';
|
||||
edge1.arrowTypeEnd = 'none';
|
||||
edge2.label = '';
|
||||
|
||||
@@ -91,7 +91,7 @@ g.classGroup line {
|
||||
}
|
||||
|
||||
#compositionEnd, .composition {
|
||||
fill: ${options.lineColor} !important;
|
||||
fill: transparent !important;
|
||||
stroke: ${options.lineColor} !important;
|
||||
stroke-width: 1;
|
||||
}
|
||||
|
||||
106
packages/mermaid/src/diagrams/pie/parser/pie.jison
Normal file
106
packages/mermaid/src/diagrams/pie/parser/pie.jison
Normal file
@@ -0,0 +1,106 @@
|
||||
/** mermaid
|
||||
* https://knsv.github.io/mermaid
|
||||
* (c) 2015 Knut Sveidqvist
|
||||
* MIT license.
|
||||
*/
|
||||
%lex
|
||||
%options case-insensitive
|
||||
|
||||
%x string
|
||||
%x title
|
||||
%x open_directive
|
||||
%x type_directive
|
||||
%x arg_directive
|
||||
%x close_directive
|
||||
%x acc_title
|
||||
%x acc_descr
|
||||
%x acc_descr_multiline
|
||||
%%
|
||||
\%\%\{ { this.begin('open_directive'); return 'open_directive'; }
|
||||
<open_directive>((?:(?!\}\%\%)[^:.])*) { this.begin('type_directive'); return 'type_directive'; }
|
||||
<type_directive>":" { this.popState(); this.begin('arg_directive'); return ':'; }
|
||||
<type_directive,arg_directive>\}\%\% { this.popState(); this.popState(); return 'close_directive'; }
|
||||
<arg_directive>((?:(?!\}\%\%).|\n)*) return 'arg_directive';
|
||||
\%\%(?!\{)[^\n]* /* skip comments */
|
||||
[^\}]\%\%[^\n]* /* skip comments */{ /*console.log('');*/ }
|
||||
[\n\r]+ return 'NEWLINE';
|
||||
\%\%[^\n]* /* do nothing */
|
||||
[\s]+ /* ignore */
|
||||
title { this.begin("title");return 'title'; }
|
||||
<title>(?!\n|;|#)*[^\n]* { this.popState(); return "title_value"; }
|
||||
|
||||
accTitle\s*":"\s* { this.begin("acc_title");return 'acc_title'; }
|
||||
<acc_title>(?!\n|;|#)*[^\n]* { this.popState(); return "acc_title_value"; }
|
||||
accDescr\s*":"\s* { this.begin("acc_descr");return 'acc_descr'; }
|
||||
<acc_descr>(?!\n|;|#)*[^\n]* { this.popState(); return "acc_descr_value"; }
|
||||
accDescr\s*"{"\s* { this.begin("acc_descr_multiline");}
|
||||
<acc_descr_multiline>[\}] { this.popState(); }
|
||||
<acc_descr_multiline>[^\}]* return "acc_descr_multiline_value";
|
||||
["] { this.begin("string"); }
|
||||
<string>["] { this.popState(); }
|
||||
<string>[^"]* { return "txt"; }
|
||||
"pie" return 'PIE';
|
||||
"showData" return 'showData';
|
||||
":"[\s]*[\d]+(?:\.[\d]+)? return "value";
|
||||
<<EOF>> return 'EOF';
|
||||
|
||||
/lex
|
||||
|
||||
%start start
|
||||
|
||||
%% /* language grammar */
|
||||
|
||||
start
|
||||
: eol start
|
||||
| directive start
|
||||
| PIE document
|
||||
| PIE showData document {yy.setShowData(true);}
|
||||
;
|
||||
|
||||
document
|
||||
: /* empty */
|
||||
| document line
|
||||
;
|
||||
|
||||
line
|
||||
: statement eol { $$ = $1 }
|
||||
;
|
||||
|
||||
statement
|
||||
:
|
||||
| txt value { yy.addSection($1,yy.cleanupValue($2)); }
|
||||
| title title_value { $$=$2.trim();yy.setDiagramTitle($$); }
|
||||
| acc_title acc_title_value { $$=$2.trim();yy.setAccTitle($$); }
|
||||
| acc_descr acc_descr_value { $$=$2.trim();yy.setAccDescription($$); }
|
||||
| acc_descr_multiline_value { $$=$1.trim();yy.setAccDescription($$); } | section {yy.addSection($1.substr(8));$$=$1.substr(8);}
|
||||
| directive
|
||||
;
|
||||
|
||||
directive
|
||||
: openDirective typeDirective closeDirective
|
||||
| openDirective typeDirective ':' argDirective closeDirective
|
||||
;
|
||||
|
||||
eol
|
||||
: NEWLINE
|
||||
| ';'
|
||||
| EOF
|
||||
;
|
||||
|
||||
openDirective
|
||||
: open_directive { yy.parseDirective('%%{', 'open_directive'); }
|
||||
;
|
||||
|
||||
typeDirective
|
||||
: type_directive { yy.parseDirective($1, 'type_directive'); }
|
||||
;
|
||||
|
||||
argDirective
|
||||
: arg_directive { $1 = $1.trim().replace(/'/g, '"'); yy.parseDirective($1, 'arg_directive'); }
|
||||
;
|
||||
|
||||
closeDirective
|
||||
: close_directive { yy.parseDirective('}%%', 'close_directive', 'pie'); }
|
||||
;
|
||||
|
||||
%%
|
||||
@@ -1,4 +1,5 @@
|
||||
import { parser } from './pieParser.js';
|
||||
// @ts-ignore: JISON doesn't support types
|
||||
import { parser } from './parser/pie.jison';
|
||||
import { DEFAULT_PIE_DB, db } from './pieDb.js';
|
||||
import { setConfig } from '../../config.js';
|
||||
|
||||
@@ -7,7 +8,13 @@ setConfig({
|
||||
});
|
||||
|
||||
describe('pie', () => {
|
||||
beforeEach(() => db.clear());
|
||||
beforeAll(() => {
|
||||
parser.yy = db;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
parser.yy.clear();
|
||||
});
|
||||
|
||||
describe('parse', () => {
|
||||
it('should handle very simple pie', () => {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { log } from '../../logger.js';
|
||||
import { parseDirective as _parseDirective } from '../../directiveUtils.js';
|
||||
import { getConfig as commonGetConfig } from '../../config.js';
|
||||
import { sanitizeText } from '../common/common.js';
|
||||
import {
|
||||
setAccTitle,
|
||||
getAccTitle,
|
||||
@@ -10,7 +12,7 @@ import {
|
||||
clear as commonClear,
|
||||
} from '../../commonDb.js';
|
||||
import type { ParseDirectiveDefinition } from '../../diagram-api/types.js';
|
||||
import type { PieFields, PieDB, Sections, D3Section } from './pieTypes.js';
|
||||
import type { PieFields, PieDB, Sections } from './pieTypes.js';
|
||||
import type { RequiredDeep } from 'type-fest';
|
||||
import type { PieDiagramConfig } from '../../config.type.js';
|
||||
import DEFAULT_CONFIG from '../../defaultConfig.js';
|
||||
@@ -39,7 +41,8 @@ const clear = (): void => {
|
||||
commonClear();
|
||||
};
|
||||
|
||||
const addSection = ({ label, value }: D3Section): void => {
|
||||
const addSection = (label: string, value: number): void => {
|
||||
label = sanitizeText(label, commonGetConfig());
|
||||
if (sections[label] === undefined) {
|
||||
sections[label] = value;
|
||||
log.debug(`added new section: ${label}, with value: ${value}`);
|
||||
@@ -48,6 +51,13 @@ const addSection = ({ label, value }: D3Section): void => {
|
||||
|
||||
const getSections = (): Sections => sections;
|
||||
|
||||
const cleanupValue = (value: string): number => {
|
||||
if (value.substring(0, 1) === ':') {
|
||||
value = value.substring(1).trim();
|
||||
}
|
||||
return Number(value.trim());
|
||||
};
|
||||
|
||||
const setShowData = (toggle: boolean): void => {
|
||||
showData = toggle;
|
||||
};
|
||||
@@ -68,6 +78,7 @@ export const db: PieDB = {
|
||||
|
||||
addSection,
|
||||
getSections,
|
||||
cleanupValue,
|
||||
setShowData,
|
||||
getShowData,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { DiagramDefinition } from '../../diagram-api/types.js';
|
||||
import { parser } from './pieParser.js';
|
||||
// @ts-ignore: JISON doesn't support types
|
||||
import parser from './parser/pie.jison';
|
||||
import { db } from './pieDb.js';
|
||||
import styles from './pieStyles.js';
|
||||
import { renderer } from './pieRenderer.js';
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
import type { Pie, PieSection } from 'mermaid-parser';
|
||||
import { parse } from 'mermaid-parser';
|
||||
|
||||
import { log } from '../../logger.js';
|
||||
import type { ParserDefinition } from '../../diagram-api/types.js';
|
||||
import { populateCommonDb } from '../common/populateCommonDb.js';
|
||||
import type { PieDB } from './pieTypes.js';
|
||||
import { db } from './pieDb.js';
|
||||
|
||||
function populateDb(ast: Pie, db: PieDB) {
|
||||
populateCommonDb(ast, db);
|
||||
db.setShowData(ast.showData);
|
||||
ast.sections.map((section: PieSection) => {
|
||||
db.addSection(section);
|
||||
});
|
||||
}
|
||||
|
||||
export const parser: ParserDefinition = {
|
||||
parse: (input: string): void => {
|
||||
const ast: Pie = parse('pie', input);
|
||||
log.debug(ast);
|
||||
populateDb(ast, db);
|
||||
},
|
||||
};
|
||||
@@ -6,24 +6,24 @@ import { configureSvgSize } from '../../setupGraphViewbox.js';
|
||||
import { getConfig } from '../../config.js';
|
||||
import { cleanAndMerge, parseFontSize } from '../../utils.js';
|
||||
import type { DrawDefinition, Group, SVG } from '../../diagram-api/types.js';
|
||||
import type { D3Section, PieDB, Sections } from './pieTypes.js';
|
||||
import type { D3Sections, PieDB, Sections } from './pieTypes.js';
|
||||
import type { MermaidConfig, PieDiagramConfig } from '../../config.type.js';
|
||||
import { selectSvgElement } from '../../rendering-util/selectSvgElement.js';
|
||||
|
||||
const createPieArcs = (sections: Sections): d3.PieArcDatum<D3Section>[] => {
|
||||
const createPieArcs = (sections: Sections): d3.PieArcDatum<D3Sections>[] => {
|
||||
// Compute the position of each group on the pie:
|
||||
const pieData: D3Section[] = Object.entries(sections)
|
||||
.map((element: [string, number]): D3Section => {
|
||||
const pieData: D3Sections[] = Object.entries(sections)
|
||||
.map((element: [string, number]): D3Sections => {
|
||||
return {
|
||||
label: element[0],
|
||||
value: element[1],
|
||||
};
|
||||
})
|
||||
.sort((a: D3Section, b: D3Section): number => {
|
||||
.sort((a: D3Sections, b: D3Sections): number => {
|
||||
return b.value - a.value;
|
||||
});
|
||||
const pie: d3.Pie<unknown, D3Section> = d3pie<D3Section>().value(
|
||||
(d3Section: D3Section): number => d3Section.value
|
||||
const pie: d3.Pie<unknown, D3Sections> = d3pie<D3Sections>().value(
|
||||
(d3Section: D3Sections): number => d3Section.value
|
||||
);
|
||||
return pie(pieData);
|
||||
};
|
||||
@@ -66,11 +66,13 @@ export const draw: DrawDefinition = (text, id, _version, diagObj) => {
|
||||
const textPosition: number = pieConfig.textPosition;
|
||||
const radius: number = Math.min(width, height) / 2 - MARGIN;
|
||||
// Shape helper to build arcs:
|
||||
const arcGenerator: d3.Arc<unknown, d3.PieArcDatum<D3Section>> = arc<d3.PieArcDatum<D3Section>>()
|
||||
const arcGenerator: d3.Arc<unknown, d3.PieArcDatum<D3Sections>> = arc<
|
||||
d3.PieArcDatum<D3Sections>
|
||||
>()
|
||||
.innerRadius(0)
|
||||
.outerRadius(radius);
|
||||
const labelArcGenerator: d3.Arc<unknown, d3.PieArcDatum<D3Section>> = arc<
|
||||
d3.PieArcDatum<D3Section>
|
||||
const labelArcGenerator: d3.Arc<unknown, d3.PieArcDatum<D3Sections>> = arc<
|
||||
d3.PieArcDatum<D3Sections>
|
||||
>()
|
||||
.innerRadius(radius * textPosition)
|
||||
.outerRadius(radius * textPosition);
|
||||
@@ -83,7 +85,7 @@ export const draw: DrawDefinition = (text, id, _version, diagObj) => {
|
||||
.attr('class', 'pieOuterCircle');
|
||||
|
||||
const sections: Sections = db.getSections();
|
||||
const arcs: d3.PieArcDatum<D3Section>[] = createPieArcs(sections);
|
||||
const arcs: d3.PieArcDatum<D3Sections>[] = createPieArcs(sections);
|
||||
|
||||
const myGeneratedColors = [
|
||||
themeVariables.pie1,
|
||||
@@ -109,7 +111,7 @@ export const draw: DrawDefinition = (text, id, _version, diagObj) => {
|
||||
.enter()
|
||||
.append('path')
|
||||
.attr('d', arcGenerator)
|
||||
.attr('fill', (datum: d3.PieArcDatum<D3Section>) => {
|
||||
.attr('fill', (datum: d3.PieArcDatum<D3Sections>) => {
|
||||
return color(datum.data.label);
|
||||
})
|
||||
.attr('class', 'pieCircle');
|
||||
@@ -125,10 +127,10 @@ export const draw: DrawDefinition = (text, id, _version, diagObj) => {
|
||||
.data(arcs)
|
||||
.enter()
|
||||
.append('text')
|
||||
.text((datum: d3.PieArcDatum<D3Section>): string => {
|
||||
.text((datum: d3.PieArcDatum<D3Sections>): string => {
|
||||
return ((datum.data.value / sum) * 100).toFixed(0) + '%';
|
||||
})
|
||||
.attr('transform', (datum: d3.PieArcDatum<D3Section>): string => {
|
||||
.attr('transform', (datum: d3.PieArcDatum<D3Sections>): string => {
|
||||
return 'translate(' + labelArcGenerator.centroid(datum) + ')';
|
||||
})
|
||||
.style('text-anchor', 'middle')
|
||||
@@ -168,7 +170,7 @@ export const draw: DrawDefinition = (text, id, _version, diagObj) => {
|
||||
.append('text')
|
||||
.attr('x', LEGEND_RECT_SIZE + LEGEND_SPACING)
|
||||
.attr('y', LEGEND_RECT_SIZE - LEGEND_SPACING)
|
||||
.text((datum: d3.PieArcDatum<D3Section>): string => {
|
||||
.text((datum: d3.PieArcDatum<D3Sections>): string => {
|
||||
const { label, value } = datum.data;
|
||||
if (db.getShowData()) {
|
||||
return `${label} [${value}]`;
|
||||
|
||||
@@ -36,7 +36,7 @@ export interface PieStyleOptions {
|
||||
|
||||
export type Sections = Record<string, number>;
|
||||
|
||||
export interface D3Section {
|
||||
export interface D3Sections {
|
||||
label: string;
|
||||
value: number;
|
||||
}
|
||||
@@ -56,8 +56,9 @@ export interface PieDB extends DiagramDB {
|
||||
getAccDescription: () => string;
|
||||
|
||||
// diagram db
|
||||
addSection: ({ label, value }: D3Section) => void;
|
||||
addSection: (label: string, value: number) => void;
|
||||
getSections: () => Sections;
|
||||
cleanupValue: (value: string) => number;
|
||||
setShowData: (toggle: boolean) => void;
|
||||
getShowData: () => boolean;
|
||||
}
|
||||
|
||||
@@ -145,7 +145,7 @@ We know it can sometimes be hard to code _and_ write user documentation.
|
||||
|
||||
Our documentation is managed in `packages/mermaid/src/docs`. Details on how to edit is in the [Contributing Documentation](#contributing-documentation) section.
|
||||
|
||||
Create another issue specifically for the documentation.
|
||||
Create another issue specifically for the documentation.
|
||||
You will need to help with the PR, but definitely ask for help if you feel stuck.
|
||||
When it feels hard to write stuff out, explaining it to someone and having that person ask you clarifying questions can often be 80% of the work!
|
||||
|
||||
|
||||
@@ -94,9 +94,9 @@ b. The importing of mermaid library through the `mermaid.esm.mjs` or `mermaid.es
|
||||
<body>
|
||||
Here is a mermaid diagram:
|
||||
<pre class="mermaid">
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server01]
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server01]
|
||||
B --> D[Server02]
|
||||
</pre>
|
||||
</body>
|
||||
@@ -135,18 +135,18 @@ Rendering in Mermaid is initialized by `mermaid.initialize()` call. However, doi
|
||||
<body>
|
||||
Here is one mermaid diagram:
|
||||
<pre class="mermaid">
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server1]
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server1]
|
||||
B --> D[Server2]
|
||||
</pre>
|
||||
|
||||
And here is another:
|
||||
<pre class="mermaid">
|
||||
graph TD
|
||||
graph TD
|
||||
A[Client] -->|tcp_123| B
|
||||
B(Load Balancer)
|
||||
B -->|tcp_456| C[Server1]
|
||||
B(Load Balancer)
|
||||
B -->|tcp_456| C[Server1]
|
||||
B -->|tcp_456| D[Server2]
|
||||
</pre>
|
||||
|
||||
@@ -168,15 +168,15 @@ In this example mermaid.js is referenced in `src` as a separate JavaScript file,
|
||||
</head>
|
||||
<body>
|
||||
<pre class="mermaid">
|
||||
graph LR
|
||||
A --- B
|
||||
B-->C[fa:fa-ban forbidden]
|
||||
graph LR
|
||||
A --- B
|
||||
B-->C[fa:fa-ban forbidden]
|
||||
B-->D(fa:fa-spinner);
|
||||
</pre>
|
||||
<pre class="mermaid">
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server1]
|
||||
graph TD
|
||||
A[Client] --> B[Load Balancer]
|
||||
B --> C[Server1]
|
||||
B --> D[Server2]
|
||||
</pre>
|
||||
<script type="module">
|
||||
|
||||
@@ -5,11 +5,6 @@
|
||||
"id": "info",
|
||||
"grammar": "src/language/info/info.langium",
|
||||
"fileExtensions": [".mmd", ".mermaid"]
|
||||
},
|
||||
{
|
||||
"id": "pie",
|
||||
"grammar": "src/language/pie/pie.langium",
|
||||
"fileExtensions": [".mmd", ".mermaid"]
|
||||
}
|
||||
],
|
||||
"mode": "production",
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/mermaid-parser.core.mjs",
|
||||
"import": "./dist/mermaid-parser.esm.mjs",
|
||||
"types": "./dist/src/index.d.ts"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
export type { Info, Pie, PieSection } from './language/index.js';
|
||||
export type { Info } from './language/index.js';
|
||||
export type { DiagramAST } from './parse.js';
|
||||
export { parse, MermaidParseError } from './parse.js';
|
||||
|
||||
@@ -1,45 +1,38 @@
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
import type { CstNode, GrammarAST, ValueType } from 'langium';
|
||||
import { DefaultValueConverter } from 'langium';
|
||||
|
||||
import { accessibilityDescrRegex, accessibilityTitleRegex, titleRegex } from './commonMatcher.js';
|
||||
|
||||
export abstract class MermaidValueConverter extends DefaultValueConverter {
|
||||
/**
|
||||
* A method contains convert logic to be used by class.
|
||||
*
|
||||
* @param rule - Parsed rule.
|
||||
* @param input - Matched string.
|
||||
* @param cstNode - Node in the Concrete Syntax Tree (CST).
|
||||
* @returns converted the value if it's available or `undefined` if it's not.
|
||||
*/
|
||||
protected abstract runCustomConverter(
|
||||
rule: GrammarAST.AbstractRule,
|
||||
input: string,
|
||||
cstNode: CstNode
|
||||
): ValueType | undefined;
|
||||
|
||||
export class CommonValueConverter extends DefaultValueConverter {
|
||||
protected override runConverter(
|
||||
rule: GrammarAST.AbstractRule,
|
||||
input: string,
|
||||
cstNode: CstNode
|
||||
): ValueType {
|
||||
let value: ValueType | undefined = this.runCommonConverter(rule, input, cstNode);
|
||||
|
||||
if (value === undefined) {
|
||||
value = this.runCustomConverter(rule, input, cstNode);
|
||||
}
|
||||
|
||||
const value: ValueType | undefined = CommonValueConverter.customRunConverter(
|
||||
rule,
|
||||
input,
|
||||
cstNode
|
||||
);
|
||||
if (value === undefined) {
|
||||
return super.runConverter(rule, input, cstNode);
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
private runCommonConverter(
|
||||
/**
|
||||
* A method contains convert logic to be used by class itself or `MermaidValueConverter`.
|
||||
*
|
||||
* @param rule - Parsed rule.
|
||||
* @param input - Matched string.
|
||||
* @param _cstNode - Node in the Concrete Syntax Tree (CST).
|
||||
* @returns converted the value if it's common rule or `undefined` if it's not.
|
||||
*/
|
||||
public static customRunConverter(
|
||||
rule: GrammarAST.AbstractRule,
|
||||
input: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_cstNode: CstNode
|
||||
): ValueType | undefined {
|
||||
let regex: RegExp | undefined;
|
||||
@@ -79,13 +72,3 @@ export abstract class MermaidValueConverter extends DefaultValueConverter {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export class CommonValueConverter extends MermaidValueConverter {
|
||||
protected runCustomConverter(
|
||||
_rule: GrammarAST.AbstractRule,
|
||||
_input: string,
|
||||
_cstNode: CstNode
|
||||
): ValueType | undefined {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@@ -1,2 +1,2 @@
|
||||
export * from './commonLexer.js';
|
||||
export * from './valueConverter.js';
|
||||
export * from './commonValueConverters.js';
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
import type { GrammarAST, Stream, TokenBuilderOptions } from 'langium';
|
||||
import type { TokenType } from '../chevrotainWrapper.js';
|
||||
|
||||
import { DefaultTokenBuilder } from 'langium';
|
||||
|
||||
export class MermaidTokenBuilder extends DefaultTokenBuilder {
|
||||
private keywords: Set<string>;
|
||||
constructor(public _keywords: string[]) {
|
||||
super();
|
||||
this.keywords = new Set<string>(_keywords);
|
||||
}
|
||||
|
||||
protected override buildKeywordTokens(
|
||||
rules: Stream<GrammarAST.AbstractRule>,
|
||||
terminalTokens: TokenType[],
|
||||
options?: TokenBuilderOptions
|
||||
): TokenType[] {
|
||||
const tokenTypes: TokenType[] = super.buildKeywordTokens(rules, terminalTokens, options);
|
||||
tokenTypes.forEach((tokenType: TokenType): void => {
|
||||
if (this.keywords.has(tokenType.name) && tokenType.PATTERN !== undefined) {
|
||||
tokenType.PATTERN = new RegExp(tokenType.PATTERN.toString() + '(?!\\S)');
|
||||
}
|
||||
});
|
||||
return tokenTypes;
|
||||
}
|
||||
}
|
||||
@@ -4,4 +4,3 @@ export * from './generated/module.js';
|
||||
|
||||
export * from './common/index.js';
|
||||
export * from './info/index.js';
|
||||
export * from './pie/index.js';
|
||||
|
||||
@@ -9,7 +9,7 @@ import { EmptyFileSystem, createDefaultModule, createDefaultSharedModule, inject
|
||||
|
||||
import { MermaidGeneratedSharedModule, InfoGeneratedModule } from '../generated/module.js';
|
||||
import { CommonLexer } from '../common/commonLexer.js';
|
||||
import { CommonValueConverter } from '../common/valueConverter.js';
|
||||
import { CommonValueConverter } from '../common/commonValueConverters.js';
|
||||
import { InfoTokenBuilder } from './infoTokenBuilder.js';
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,7 +1,24 @@
|
||||
import { MermaidTokenBuilder } from '../common/tokenBuilder.js';
|
||||
import type { GrammarAST, Stream, TokenBuilderOptions } from 'langium';
|
||||
import { DefaultTokenBuilder } from 'langium';
|
||||
|
||||
export class InfoTokenBuilder extends MermaidTokenBuilder {
|
||||
constructor() {
|
||||
super(['info', 'showInfo']);
|
||||
import type { TokenType } from '../chevrotainWrapper.js';
|
||||
|
||||
export class InfoTokenBuilder extends DefaultTokenBuilder {
|
||||
protected override buildKeywordTokens(
|
||||
rules: Stream<GrammarAST.AbstractRule>,
|
||||
terminalTokens: TokenType[],
|
||||
options?: TokenBuilderOptions
|
||||
): TokenType[] {
|
||||
const tokenTypes: TokenType[] = super.buildKeywordTokens(rules, terminalTokens, options);
|
||||
// to restrict users, they mustn't have any non-whitespace characters after the keyword.
|
||||
tokenTypes.forEach((tokenType: TokenType): void => {
|
||||
if (
|
||||
(tokenType.name === 'info' || tokenType.name === 'showInfo') &&
|
||||
tokenType.PATTERN !== undefined
|
||||
) {
|
||||
tokenType.PATTERN = new RegExp(tokenType.PATTERN.toString() + '(?!\\S)');
|
||||
}
|
||||
});
|
||||
return tokenTypes;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export * from './pieModule.js';
|
||||
@@ -1,20 +0,0 @@
|
||||
grammar Pie
|
||||
import "../common/common";
|
||||
|
||||
entry Pie:
|
||||
NEWLINE*
|
||||
"pie" showData?="showData"?
|
||||
(
|
||||
NEWLINE* TitleAndAccessibilities sections+=PieSection*
|
||||
| NEWLINE+ sections+=PieSection+
|
||||
| NEWLINE*
|
||||
)
|
||||
;
|
||||
|
||||
PieSection:
|
||||
label=PIE_SECTION_LABEL ":" value=PIE_SECTION_VALUE
|
||||
NEWLINE+
|
||||
;
|
||||
|
||||
terminal PIE_SECTION_LABEL: /"[^"]+"/;
|
||||
terminal PIE_SECTION_VALUE returns number: /(0|[1-9][0-9]*)(\.[0-9]+)?/;
|
||||
@@ -1,68 +0,0 @@
|
||||
import type {
|
||||
DefaultSharedModuleContext,
|
||||
LangiumServices,
|
||||
LangiumSharedServices,
|
||||
Module,
|
||||
PartialLangiumServices,
|
||||
} from 'langium';
|
||||
import { EmptyFileSystem, createDefaultModule, createDefaultSharedModule, inject } from 'langium';
|
||||
|
||||
import { MermaidGeneratedSharedModule, PieGeneratedModule } from '../generated/module.js';
|
||||
import { CommonLexer } from '../common/commonLexer.js';
|
||||
import { PieTokenBuilder } from './pieTokenBuilder.js';
|
||||
import { PieValueConverter } from './pieValueConverter.js';
|
||||
|
||||
/**
|
||||
* Declaration of `Pie` services.
|
||||
*/
|
||||
type PieAddedServices = {
|
||||
parser: {
|
||||
Lexer: CommonLexer;
|
||||
TokenBuilder: PieTokenBuilder;
|
||||
ValueConverter: PieValueConverter;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Union of Langium default services and `Pie` services.
|
||||
*/
|
||||
export type PieServices = LangiumServices & PieAddedServices;
|
||||
|
||||
/**
|
||||
* Dependency injection module that overrides Langium default services and
|
||||
* contributes the declared `Pie` services.
|
||||
*/
|
||||
const PieModule: Module<PieServices, PartialLangiumServices & PieAddedServices> = {
|
||||
parser: {
|
||||
Lexer: (services) => new CommonLexer(services),
|
||||
TokenBuilder: () => new PieTokenBuilder(),
|
||||
ValueConverter: () => new PieValueConverter(),
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Create the full set of services required by Langium.
|
||||
*
|
||||
* First inject the shared services by merging two modules:
|
||||
* - Langium default shared services
|
||||
* - Services generated by langium-cli
|
||||
*
|
||||
* Then inject the language-specific services by merging three modules:
|
||||
* - Langium default language-specific services
|
||||
* - Services generated by langium-cli
|
||||
* - Services specified in this file
|
||||
* @param context - Optional module context with the LSP connection
|
||||
* @returns An object wrapping the shared services and the language-specific services
|
||||
*/
|
||||
export function createPieServices(context: DefaultSharedModuleContext = EmptyFileSystem): {
|
||||
shared: LangiumSharedServices;
|
||||
Pie: PieServices;
|
||||
} {
|
||||
const shared: LangiumSharedServices = inject(
|
||||
createDefaultSharedModule(context),
|
||||
MermaidGeneratedSharedModule
|
||||
);
|
||||
const Pie: PieServices = inject(createDefaultModule({ shared }), PieGeneratedModule, PieModule);
|
||||
shared.ServiceRegistry.register(Pie);
|
||||
return { shared, Pie };
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
import { MermaidTokenBuilder } from '../common/tokenBuilder.js';
|
||||
|
||||
export class PieTokenBuilder extends MermaidTokenBuilder {
|
||||
constructor() {
|
||||
super(['pie', 'showData']);
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import type { CstNode, GrammarAST, ValueType } from 'langium';
|
||||
import { MermaidValueConverter } from '../common/valueConverter.js';
|
||||
|
||||
export class PieValueConverter extends MermaidValueConverter {
|
||||
override runCustomConverter(
|
||||
rule: GrammarAST.AbstractRule,
|
||||
input: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_cstNode: CstNode
|
||||
): ValueType | undefined {
|
||||
if (rule.name !== 'PIE_SECTION_LABEL') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return input.replace(/"/g, '').trim();
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import type { LangiumParser, ParseResult } from 'langium';
|
||||
import type { Info, Pie } from './index.js';
|
||||
import { createInfoServices, createPieServices } from './language/index.js';
|
||||
import type { Info } from './index.js';
|
||||
import { createInfoServices } from './language/index.js';
|
||||
|
||||
export type DiagramAST = Info | Pie;
|
||||
export type DiagramAST = Info;
|
||||
|
||||
const parsers: Record<string, LangiumParser> = {};
|
||||
|
||||
@@ -13,14 +13,8 @@ const initializers = {
|
||||
const parser = createInfoServices().Info.parser.LangiumParser;
|
||||
parsers['info'] = parser;
|
||||
},
|
||||
pie: () => {
|
||||
const parser = createPieServices().Pie.parser.LangiumParser;
|
||||
parsers['pie'] = parser;
|
||||
},
|
||||
} as const;
|
||||
|
||||
export function parse(diagramType: 'info', text: string): Info;
|
||||
export function parse(diagramType: 'pie', text: string): Pie;
|
||||
export function parse<T extends DiagramAST>(
|
||||
diagramType: keyof typeof initializers,
|
||||
text: string
|
||||
|
||||
@@ -1,294 +0,0 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import type { LangiumParser, ParseResult } from 'langium';
|
||||
|
||||
import type { PieServices } from '../src/language/index.js';
|
||||
import { Pie, createPieServices } from '../src/language/index.js';
|
||||
|
||||
const services: PieServices = createPieServices().Pie;
|
||||
const parser: LangiumParser = services.parser.LangiumParser;
|
||||
export function createPieTestServices(): {
|
||||
services: PieServices;
|
||||
parse: (input: string) => ParseResult<Pie>;
|
||||
} {
|
||||
const parse = (input: string) => {
|
||||
return parser.parse<Pie>(input);
|
||||
};
|
||||
|
||||
return { services, parse };
|
||||
}
|
||||
|
||||
describe('pie', () => {
|
||||
const { parse } = createPieTestServices();
|
||||
|
||||
it.each([
|
||||
`pie`,
|
||||
` pie `,
|
||||
`\tpie\t`,
|
||||
`
|
||||
\tpie
|
||||
`,
|
||||
])('should handle regular pie', (context: string) => {
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
});
|
||||
|
||||
it.each([
|
||||
`pie showData`,
|
||||
` pie showData `,
|
||||
`\tpie\tshowData\t`,
|
||||
`
|
||||
pie\tshowData
|
||||
`,
|
||||
])('should handle regular showData', (context: string) => {
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.showData).toBeTruthy();
|
||||
});
|
||||
|
||||
it.each([
|
||||
`pie title sample title`,
|
||||
` pie title sample title `,
|
||||
`\tpie\ttitle sample title\t`,
|
||||
`pie
|
||||
\ttitle sample title
|
||||
`,
|
||||
])('should handle regular pie + title in same line', (context: string) => {
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.title).toBe('sample title');
|
||||
});
|
||||
|
||||
it.each([
|
||||
`pie
|
||||
title sample title`,
|
||||
`pie
|
||||
title sample title
|
||||
`,
|
||||
`pie
|
||||
title sample title`,
|
||||
`pie
|
||||
title sample title
|
||||
`,
|
||||
])('should handle regular pie + title in different line', (context: string) => {
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.title).toBe('sample title');
|
||||
});
|
||||
|
||||
it.each([
|
||||
`pie showData title sample title`,
|
||||
`pie showData title sample title
|
||||
`,
|
||||
])('should handle regular pie + showData + title', (context: string) => {
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.showData).toBeTruthy();
|
||||
expect(value.title).toBe('sample title');
|
||||
});
|
||||
|
||||
it.each([
|
||||
`pie showData
|
||||
title sample title`,
|
||||
`pie showData
|
||||
title sample title
|
||||
`,
|
||||
`pie showData
|
||||
title sample title`,
|
||||
`pie showData
|
||||
title sample title
|
||||
`,
|
||||
])('should handle regular showData + title in different line', (context: string) => {
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.showData).toBeTruthy();
|
||||
expect(value.title).toBe('sample title');
|
||||
});
|
||||
|
||||
describe('sections', () => {
|
||||
describe('normal', () => {
|
||||
it.each([
|
||||
`pie
|
||||
"GitHub":100
|
||||
"GitLab":50`,
|
||||
`pie
|
||||
"GitHub" : 100
|
||||
"GitLab" : 50`,
|
||||
`pie
|
||||
"GitHub"\t:\t100
|
||||
"GitLab"\t:\t50`,
|
||||
`pie
|
||||
\t"GitHub" \t : \t 100
|
||||
\t"GitLab" \t : \t 50
|
||||
`,
|
||||
])('should handle regular secions', (context: string) => {
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
|
||||
const section0 = value.sections[0];
|
||||
expect(section0?.label).toBe('GitHub');
|
||||
expect(section0?.value).toBe(100);
|
||||
|
||||
const section1 = value.sections[1];
|
||||
expect(section1?.label).toBe('GitLab');
|
||||
expect(section1?.value).toBe(50);
|
||||
});
|
||||
|
||||
it('should handle sections with showData', () => {
|
||||
const context = `pie showData
|
||||
"GitHub": 100
|
||||
"GitLab": 50`;
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.showData).toBeTruthy();
|
||||
|
||||
const section0 = value.sections[0];
|
||||
expect(section0?.label).toBe('GitHub');
|
||||
expect(section0?.value).toBe(100);
|
||||
|
||||
const section1 = value.sections[1];
|
||||
expect(section1?.label).toBe('GitLab');
|
||||
expect(section1?.value).toBe(50);
|
||||
});
|
||||
|
||||
it('should handle sections with title', () => {
|
||||
const context = `pie title sample wow
|
||||
"GitHub": 100
|
||||
"GitLab": 50`;
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.title).toBe('sample wow');
|
||||
|
||||
const section0 = value.sections[0];
|
||||
expect(section0?.label).toBe('GitHub');
|
||||
expect(section0?.value).toBe(100);
|
||||
|
||||
const section1 = value.sections[1];
|
||||
expect(section1?.label).toBe('GitLab');
|
||||
expect(section1?.value).toBe(50);
|
||||
});
|
||||
|
||||
it('should handle sections with accTitle', () => {
|
||||
const context = `pie accTitle: sample wow
|
||||
"GitHub": 100
|
||||
"GitLab": 50`;
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.accTitle).toBe('sample wow');
|
||||
|
||||
const section0 = value.sections[0];
|
||||
expect(section0?.label).toBe('GitHub');
|
||||
expect(section0?.value).toBe(100);
|
||||
|
||||
const section1 = value.sections[1];
|
||||
expect(section1?.label).toBe('GitLab');
|
||||
expect(section1?.value).toBe(50);
|
||||
});
|
||||
|
||||
it('should handle sections with single line accDescr', () => {
|
||||
const context = `pie accDescr: sample wow
|
||||
"GitHub": 100
|
||||
"GitLab": 50`;
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.accDescr).toBe('sample wow');
|
||||
|
||||
const section0 = value.sections[0];
|
||||
expect(section0?.label).toBe('GitHub');
|
||||
expect(section0?.value).toBe(100);
|
||||
|
||||
const section1 = value.sections[1];
|
||||
expect(section1?.label).toBe('GitLab');
|
||||
expect(section1?.value).toBe(50);
|
||||
});
|
||||
|
||||
it('should handle sections with multi line accDescr', () => {
|
||||
const context = `pie accDescr {
|
||||
sample wow
|
||||
}
|
||||
"GitHub": 100
|
||||
"GitLab": 50`;
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
expect(value.accDescr).toBe('sample wow');
|
||||
|
||||
const section0 = value.sections[0];
|
||||
expect(section0?.label).toBe('GitHub');
|
||||
expect(section0?.value).toBe(100);
|
||||
|
||||
const section1 = value.sections[1];
|
||||
expect(section1?.label).toBe('GitLab');
|
||||
expect(section1?.value).toBe(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe('duplicate', () => {
|
||||
it('should handle duplicate sections', () => {
|
||||
const context = `pie
|
||||
"GitHub": 100
|
||||
"GitHub": 50`;
|
||||
const result = parse(context);
|
||||
expect(result.parserErrors).toHaveLength(0);
|
||||
expect(result.lexerErrors).toHaveLength(0);
|
||||
|
||||
const value = result.value;
|
||||
expect(value.$type).toBe(Pie);
|
||||
|
||||
const section0 = value.sections[0];
|
||||
expect(section0?.label).toBe('GitHub');
|
||||
expect(section0?.value).toBe(100);
|
||||
|
||||
const section1 = value.sections[1];
|
||||
expect(section1?.label).toBe('GitHub');
|
||||
expect(section1?.value).toBe(50);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -20,7 +20,8 @@ cd mermaid-live-editor
|
||||
npm install
|
||||
|
||||
# Link local mermaid to live editor
|
||||
npm link ../packages/mermaid
|
||||
npm link ../packages/mermaid
|
||||
|
||||
# Force Build the site
|
||||
npm run build -- --force
|
||||
|
||||
|
||||
Reference in New Issue
Block a user