mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-11-12 16:54:10 +01:00
Compare commits
147 Commits
add-blog-p
...
5237-unifi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8fb9b21f59 | ||
|
|
b8dd4b9048 | ||
|
|
4f6586873f | ||
|
|
a56aa56cbc | ||
|
|
48e313bf18 | ||
|
|
fa6bcd8b30 | ||
|
|
eed5ba6c1e | ||
|
|
8e40f31ed3 | ||
|
|
af05df937e | ||
|
|
f01a9d50fb | ||
|
|
f14b65cf43 | ||
|
|
e5069083c9 | ||
|
|
8a079299a6 | ||
|
|
dfe223c3b8 | ||
|
|
e09410448c | ||
|
|
269ddf4a34 | ||
|
|
e41bccad64 | ||
|
|
7da85b9005 | ||
|
|
8d23b684f5 | ||
|
|
47776e3ca1 | ||
|
|
52e0dc6464 | ||
|
|
5413668c1a | ||
|
|
3e60270193 | ||
|
|
ddaa7afbb5 | ||
|
|
2377a0d753 | ||
|
|
a4de1b4944 | ||
|
|
5b66810646 | ||
|
|
aa63f320f8 | ||
|
|
ca2c112c9a | ||
|
|
75b0ee3daf | ||
|
|
60c79dfb2f | ||
|
|
c10ab23387 | ||
|
|
ec7adb7e00 | ||
|
|
0d2c7c3b2c | ||
|
|
c8f2abad18 | ||
|
|
b6ef7367c2 | ||
|
|
4463e9d7a3 | ||
|
|
991721e955 | ||
|
|
47d4929bc6 | ||
|
|
ff36301cc5 | ||
|
|
7fb079ef3c | ||
|
|
7bcba29493 | ||
|
|
8744008630 | ||
|
|
418d34cbee | ||
|
|
fcd5c2c21d | ||
|
|
e3c6281b26 | ||
|
|
ae5b886148 | ||
|
|
c5badf8341 | ||
|
|
da882de0d4 | ||
|
|
633febaeab | ||
|
|
bf9485c231 | ||
|
|
a722bdcd37 | ||
|
|
8a5fe53706 | ||
|
|
362e49aa2c | ||
|
|
6d2939c9b9 | ||
|
|
9a11cb7bb3 | ||
|
|
45cd2c005b | ||
|
|
2d241808e9 | ||
|
|
9fc17f67c6 | ||
|
|
287e903385 | ||
|
|
b1b27f7a2b | ||
|
|
745ec0daae | ||
|
|
64f1d2f005 | ||
|
|
55afd8cdb8 | ||
|
|
42a12a62ac | ||
|
|
e8018ed779 | ||
|
|
00eaebeac8 | ||
|
|
dd92aecdca | ||
|
|
20f9e503cb | ||
|
|
2df26ca323 | ||
|
|
b7cb9673b0 | ||
|
|
8d815f878c | ||
|
|
dab26df9c4 | ||
|
|
c7fe9a6465 | ||
|
|
4f26f3ae2e | ||
|
|
730fa89e4c | ||
|
|
0326d899c4 | ||
|
|
c993adfcdb | ||
|
|
4885b311f8 | ||
|
|
3274f673ab | ||
|
|
f5fefc0499 | ||
|
|
ab077992f5 | ||
|
|
2f80c3a2c4 | ||
|
|
4c0b8f6f40 | ||
|
|
ce6f2739b5 | ||
|
|
7ecb772c6c | ||
|
|
f480fb7c85 | ||
|
|
94bb43907d | ||
|
|
32a62bede8 | ||
|
|
7372d9055a | ||
|
|
3ff4a6910d | ||
|
|
683695bec2 | ||
|
|
fbe13f4e9e | ||
|
|
7529db8254 | ||
|
|
b9a73c88b5 | ||
|
|
d497a5c4ac | ||
|
|
323737f3a6 | ||
|
|
39bffcb3a0 | ||
|
|
16cd7bf649 | ||
|
|
50c9ede69d | ||
|
|
ca4fa11859 | ||
|
|
853948a93d | ||
|
|
58af76384a | ||
|
|
e2ca8e820e | ||
|
|
1cbd730b8d | ||
|
|
6f1a553234 | ||
|
|
b2c286ff1d | ||
|
|
024bffd683 | ||
|
|
5e53d6cfee | ||
|
|
913b29dbcb | ||
|
|
e68ba3f1df | ||
|
|
92f1941fb0 | ||
|
|
a536484408 | ||
|
|
703c25a6e2 | ||
|
|
0c36248073 | ||
|
|
08e4489ede | ||
|
|
76801ff564 | ||
|
|
7e837dfee2 | ||
|
|
21185227ad | ||
|
|
b3e29dc11b | ||
|
|
9f6fc5a074 | ||
|
|
f2d3ac1e7b | ||
|
|
edad740e15 | ||
|
|
b22ae106b2 | ||
|
|
7fe4a2ce6c | ||
|
|
8102ba4d52 | ||
|
|
dd5bfb3b24 | ||
|
|
a2702e4058 | ||
|
|
e6e463092c | ||
|
|
128c69aa76 | ||
|
|
3e4193e3d5 | ||
|
|
da40dbf1dd | ||
|
|
01350467bd | ||
|
|
6b1d568172 | ||
|
|
d220720dde | ||
|
|
b7f864cdb0 | ||
|
|
1f8accd6e0 | ||
|
|
8205e3619a | ||
|
|
357da0ca28 | ||
|
|
9b411fa8d4 | ||
|
|
087a5748c8 | ||
|
|
6105185d05 | ||
|
|
c993a817d1 | ||
|
|
002aa29f19 | ||
|
|
07de090723 | ||
|
|
6b7e1225dd | ||
|
|
fcda3dc8c6 |
@@ -22,9 +22,9 @@ export const packageOptions = {
|
||||
packageName: 'mermaid-zenuml',
|
||||
file: 'detector.ts',
|
||||
},
|
||||
'mermaid-flowchart-elk': {
|
||||
name: 'mermaid-flowchart-elk',
|
||||
packageName: 'mermaid-flowchart-elk',
|
||||
file: 'detector.ts',
|
||||
'mermaid-layout-elk': {
|
||||
name: 'mermaid-layout-elk',
|
||||
packageName: 'mermaid-layout-elk',
|
||||
file: 'layouts.ts',
|
||||
},
|
||||
} as const;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import jison from 'jison';
|
||||
|
||||
export const transformJison = (src: string): string => {
|
||||
// @ts-ignore - Jison is not typed properly
|
||||
const parser = new jison.Generator(src, {
|
||||
moduleType: 'js',
|
||||
'token-stack': true,
|
||||
|
||||
@@ -27,6 +27,7 @@ controly
|
||||
CSSCLASS
|
||||
CYLINDEREND
|
||||
CYLINDERSTART
|
||||
DAGA
|
||||
datakey
|
||||
DEND
|
||||
descr
|
||||
@@ -89,6 +90,7 @@ reqs
|
||||
rewritelinks
|
||||
rgba
|
||||
RIGHTOF
|
||||
roughjs
|
||||
sankey
|
||||
sequencenumber
|
||||
shrc
|
||||
|
||||
@@ -54,6 +54,7 @@ presetAttributify
|
||||
pyplot
|
||||
redmine
|
||||
rehype
|
||||
roughjs
|
||||
rscratch
|
||||
sparkline
|
||||
sphinxcontrib
|
||||
|
||||
@@ -9,6 +9,7 @@ elems
|
||||
gantt
|
||||
gitgraph
|
||||
gzipped
|
||||
handdrawn
|
||||
knsv
|
||||
Knut
|
||||
marginx
|
||||
@@ -17,6 +18,7 @@ Markdownish
|
||||
mermaidjs
|
||||
mindmap
|
||||
mindmaps
|
||||
mrtree
|
||||
multigraph
|
||||
nodesep
|
||||
NOTEGROUP
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
circo
|
||||
handdrawnSeed
|
||||
neato
|
||||
newbranch
|
||||
|
||||
@@ -7,8 +7,8 @@ import { MermaidBuildOptions, defaultOptions, getBuildConfig } from './util.js';
|
||||
const shouldVisualize = process.argv.includes('--visualize');
|
||||
|
||||
const buildPackage = async (entryName: keyof typeof packageOptions) => {
|
||||
const commonOptions = { ...defaultOptions, entryName } as const;
|
||||
const buildConfigs = [
|
||||
const commonOptions: MermaidBuildOptions = { ...defaultOptions, entryName } as const;
|
||||
const buildConfigs: MermaidBuildOptions[] = [
|
||||
// package.mjs
|
||||
{ ...commonOptions },
|
||||
// package.min.mjs
|
||||
|
||||
@@ -8,7 +8,7 @@ import { jisonPlugin } from './jisonPlugin.js';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
|
||||
export interface MermaidBuildOptions {
|
||||
export interface MermaidBuildOptions extends BuildOptions {
|
||||
minify: boolean;
|
||||
core: boolean;
|
||||
metafile: boolean;
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -35,7 +35,7 @@ cypress/snapshots/
|
||||
.tsbuildinfo
|
||||
tsconfig.tsbuildinfo
|
||||
|
||||
knsv*.html
|
||||
#knsv*.html
|
||||
local*.html
|
||||
stats/
|
||||
|
||||
@@ -48,6 +48,7 @@ demos/dev/**
|
||||
!/demos/dev/example.html
|
||||
!/demos/dev/reload.js
|
||||
tsx-0/**
|
||||
vite.config.ts.timestamp-*
|
||||
|
||||
# autogenereated by langium-cli
|
||||
generated/
|
||||
generated/
|
||||
|
||||
@@ -16,3 +16,5 @@ generated/
|
||||
# Ignore the files creates in /demos/dev except for example.html
|
||||
demos/dev/**
|
||||
!/demos/dev/example.html
|
||||
# TODO: Lots of errors to fix
|
||||
cypress/platform/state-refactor.html
|
||||
|
||||
@@ -137,4 +137,9 @@ describe('XSS', () => {
|
||||
cy.wait(1000);
|
||||
cy.get('#the-malware').should('not.exist');
|
||||
});
|
||||
it('should sanitize backticks block diagram labels properly', () => {
|
||||
cy.visit('http://localhost:9000/xss25.html');
|
||||
cy.wait(1000);
|
||||
cy.get('#the-malware').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
||||
775
cypress/platform/flowchart-refactor.html
Normal file
775
cypress/platform/flowchart-refactor.html
Normal file
@@ -0,0 +1,775 @@
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<link href="https://fonts.googleapis.com/css?family=Montserrat&display=swap" rel="stylesheet" />
|
||||
<link href="https://unpkg.com/tailwindcss@^1.0/dist/tailwind.min.css" rel="stylesheet" />
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css" />
|
||||
<link href="https://cdn.jsdelivr.net/npm/@mdi/font@6.9.96/css/materialdesignicons.min.css" rel="stylesheet" />
|
||||
<link href="https://fonts.googleapis.com/css?family=Noto+Sans+SC&display=swap" rel="stylesheet" />
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||
<link href="https://fonts.googleapis.com/css2?family=Kalam:wght@300;400;700&display=swap" rel="stylesheet" />
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Caveat:wght@400..700&family=Kalam:wght@300;400;700&family=Rubik+Mono+One&display=swap"
|
||||
rel="stylesheet" />
|
||||
<link href="https://fonts.googleapis.com/css2?family=Kalam:wght@300;400;700&family=Rubik+Mono+One&display=swap"
|
||||
rel="stylesheet" />
|
||||
|
||||
<style>
|
||||
body {
|
||||
font-family: 'Arial';
|
||||
}
|
||||
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
th,
|
||||
td {
|
||||
border: 1px solid black;
|
||||
padding: 10px;
|
||||
text-align: center;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.separator {
|
||||
height: 20px;
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
|
||||
.vertical-header {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.collapsible {
|
||||
background-color: #f9f9f9;
|
||||
color: #444;
|
||||
cursor: pointer;
|
||||
padding: 18px;
|
||||
width: 100%;
|
||||
border: none;
|
||||
text-align: left;
|
||||
outline: none;
|
||||
font-size: 15px;
|
||||
}
|
||||
|
||||
.active,
|
||||
.collapsible:hover {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
.collapsible:after {
|
||||
content: '\002B';
|
||||
color: #777;
|
||||
font-weight: bold;
|
||||
float: right;
|
||||
margin-left: 2px;
|
||||
}
|
||||
|
||||
.active:after {
|
||||
content: "\2212";
|
||||
}
|
||||
|
||||
.content {
|
||||
padding: 0 5px;
|
||||
max-height: 0;
|
||||
overflow: hidden;
|
||||
transition: max-height 0.2s ease-out;
|
||||
background-color: #f1f1f1;
|
||||
}
|
||||
|
||||
.content .pre-scrollable {
|
||||
max-height: 200px;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<table>
|
||||
<tr>
|
||||
<th></th> <!-- Placeholder for the top left corner -->
|
||||
<th>Dagre</th>
|
||||
<th>Dagre with rough</th>
|
||||
<th>ELK</th>
|
||||
<th>ELK with rough</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Stadium shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1([This is the text in the box])
|
||||
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram1" class="mermaid">
|
||||
flowchart LR
|
||||
id1([This is the text in the box])
|
||||
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram2" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1([This is the text in the box])
|
||||
|
||||
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram3" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1([This is the text in the box])
|
||||
|
||||
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram4" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1([This is the text in the box])
|
||||
|
||||
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Sub-Routine shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1[[This is the text in the box]]
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram5" class="mermaid">
|
||||
flowchart LR
|
||||
id1[[This is the text in the box]]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram6" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1[[This is the text in the box]]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram7" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1[[This is the text in the box]]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram8" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1[[This is the text in the box]]
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Cylindrical shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1[(Database)]
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram9" class="mermaid">
|
||||
flowchart LR
|
||||
id1[(Database)]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram10" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1[(Database)]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram11" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1[(Database)]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram12" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1[(Database)]
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Circle shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1((This is the text in the circle))
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram13" class="mermaid">
|
||||
flowchart LR
|
||||
id1((This is the text in the circle))
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram14" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1((This is the text in the circle))
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram15" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1((This is the text in the circle))
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram16" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1((This is the text in the circle))
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Double Circle shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart TD
|
||||
id1(((This is the text in the circle)))
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram17" class="mermaid">
|
||||
flowchart TD
|
||||
id1(((This is the text in the circle)))
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram18" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart TD
|
||||
id1(((This is the text in the circle)))
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram19" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
id1(((This is the text in the circle)))
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram20" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
id1(((This is the text in the circle)))
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Asymmetric shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1>This is the text in the box]
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram21" class="mermaid">
|
||||
flowchart LR
|
||||
id1>This is the text in the box]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram22" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1>This is the text in the box]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram23" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1>This is the text in the box]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram24" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1>This is the text in the box]
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Rhombus/Diamond/Question shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1{This is the text in the box}
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram25" class="mermaid">
|
||||
flowchart LR
|
||||
id1{This is the text in the box}
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram26" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1{This is the text in the box}
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram27" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1{This is the text in the box}
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram28" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1{This is the text in the box}
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Hexagon shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1{{This is the text in the box}}
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram29" class="mermaid">
|
||||
flowchart LR
|
||||
id1{{This is the text in the box}}
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram30" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1{{This is the text in the box}}
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram31" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1{{This is the text in the box}}
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram32" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1{{This is the text in the box}}
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Parallelogram shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart TD
|
||||
id1[/This is the text in the box/]
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram33" class="mermaid">
|
||||
flowchart TD
|
||||
id1[/This is the text in the box/]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram34" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart TD
|
||||
id1[/This is the text in the box/]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram35" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
id1[/This is the text in the box/]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram36" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
id1[/This is the text in the box/]
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Parallelogram Alt shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart TD
|
||||
id1[\This is the text in the box\]
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram37" class="mermaid">
|
||||
flowchart TD
|
||||
id1[\This is the text in the box\]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram38" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart TD
|
||||
id1[\This is the text in the box\]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram39" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
id1[\This is the text in the box\]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram40" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
id1[\This is the text in the box\]
|
||||
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Trapezoid shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart TD
|
||||
A[/Christmas\]
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram41" class="mermaid">
|
||||
flowchart TD
|
||||
A[/Christmas\]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram42" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart TD
|
||||
A[/Christmas\]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram43" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
A[/Christmas\]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram44" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
A[/Christmas\]
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Trapezoid Alt shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart TD
|
||||
A[\Christmas/]
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram45" class="mermaid">
|
||||
flowchart TD
|
||||
A[\Christmas/]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram46" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart TD
|
||||
A[\Christmas/]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram47" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
A[\Christmas/]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram48" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart TD
|
||||
A[\Christmas/]
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Rect with rounded corner</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1(This is the text in the box)
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram49" class="mermaid">
|
||||
flowchart LR
|
||||
id1(This is the text in the box)
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram50" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1(This is the text in the box)
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram51" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1(This is the text in the box)
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram52" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1(This is the text in the box)
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Rect with sharp corner</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1[This is the text in the box]
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram53" class="mermaid">
|
||||
flowchart LR
|
||||
id1[This is the text in the box]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram54" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1[This is the text in the box]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram55" class="mermaid">
|
||||
%%{init: {"handdrawn": false, "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1[This is the text in the box]
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram56" class="mermaid">
|
||||
%%{init: {"look": "handdrawn", "layout": "elk"} }%%
|
||||
flowchart LR
|
||||
id1[This is the text in the box]
|
||||
</pre>
|
||||
</td>
|
||||
</tr>
|
||||
<!-- Separator row -->
|
||||
<tr class="separator">
|
||||
<td colspan="5"></td> <!-- This cell spans all columns including the vertical header -->
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
|
||||
|
||||
<script type="module">
|
||||
import mermaid from './mermaid.esm.mjs';
|
||||
import { layouts } from './mermaid-layout-elk.esm.mjs';
|
||||
mermaid.registerLayoutLoaders(layouts);
|
||||
mermaid.parseError = function (err, hash) {
|
||||
|
||||
};
|
||||
|
||||
mermaid.initialize({
|
||||
handdrawn: false,
|
||||
mergeEdges: true,
|
||||
layout: 'dagre',
|
||||
flowchart: { titleTopMargin: 10 },
|
||||
// fontFamily: 'Caveat',
|
||||
fontFamily: 'Kalam',
|
||||
sequence: {
|
||||
actorFontFamily: 'courier',
|
||||
noteFontFamily: 'courier',
|
||||
messageFontFamily: 'courier',
|
||||
},
|
||||
fontSize: 16,
|
||||
logLevel: 0,
|
||||
});
|
||||
function callback() {
|
||||
alert('It worked');
|
||||
}
|
||||
mermaid.parseError = function (err, hash) {
|
||||
console.error('In parse error:');
|
||||
console.error(err);
|
||||
};
|
||||
|
||||
|
||||
let coll = document.getElementsByClassName("collapsible");
|
||||
for (const element of coll) {
|
||||
element.addEventListener("click", function () {
|
||||
this.classList.toggle("active");
|
||||
let content = this.nextElementSibling;
|
||||
if (content.style.maxHeight) {
|
||||
content.style.maxHeight = null;
|
||||
} else {
|
||||
content.style.maxHeight = content.scrollHeight + "px";
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
181
cypress/platform/flowchart-sate.html
Normal file
181
cypress/platform/flowchart-sate.html
Normal file
@@ -0,0 +1,181 @@
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<link href="https://fonts.googleapis.com/css?family=Montserrat&display=swap" rel="stylesheet" />
|
||||
<link href="https://unpkg.com/tailwindcss@^1.0/dist/tailwind.min.css" rel="stylesheet" />
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css" />
|
||||
<link href="https://cdn.jsdelivr.net/npm/@mdi/font@6.9.96/css/materialdesignicons.min.css" rel="stylesheet" />
|
||||
<link href="https://fonts.googleapis.com/css?family=Noto+Sans+SC&display=swap" rel="stylesheet" />
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||
<link href="https://fonts.googleapis.com/css2?family=Kalam:wght@300;400;700&display=swap" rel="stylesheet" />
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Caveat:wght@400..700&family=Kalam:wght@300;400;700&family=Rubik+Mono+One&display=swap"
|
||||
rel="stylesheet" />
|
||||
<link href="https://fonts.googleapis.com/css2?family=Kalam:wght@300;400;700&family=Rubik+Mono+One&display=swap"
|
||||
rel="stylesheet" />
|
||||
|
||||
<style>
|
||||
body {
|
||||
font-family: 'Arial';
|
||||
}
|
||||
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
th,
|
||||
td {
|
||||
border: 1px solid black;
|
||||
padding: 10px;
|
||||
text-align: center;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.separator {
|
||||
height: 20px;
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
|
||||
.vertical-header {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.collapsible {
|
||||
background-color: #f9f9f9;
|
||||
color: #444;
|
||||
cursor: pointer;
|
||||
padding: 18px;
|
||||
width: 100%;
|
||||
border: none;
|
||||
text-align: left;
|
||||
outline: none;
|
||||
font-size: 15px;
|
||||
}
|
||||
|
||||
.active,
|
||||
.collapsible:hover {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
.collapsible:after {
|
||||
content: '\002B';
|
||||
color: #777;
|
||||
font-weight: bold;
|
||||
float: right;
|
||||
margin-left: 2px;
|
||||
}
|
||||
|
||||
.active:after {
|
||||
content: "\2212";
|
||||
}
|
||||
|
||||
.content {
|
||||
padding: 0 5px;
|
||||
max-height: 0;
|
||||
overflow: hidden;
|
||||
transition: max-height 0.2s ease-out;
|
||||
background-color: #f1f1f1;
|
||||
}
|
||||
|
||||
.content .pre-scrollable {
|
||||
max-height: 200px;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<table>
|
||||
<tr>
|
||||
<th></th> <!-- Placeholder for the top left corner -->
|
||||
<th>State rough</th>
|
||||
<th>Flowchart rough</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="vertical-header">
|
||||
<button class="collapsible">Stadium shape</button>
|
||||
<div class="content">
|
||||
<div class="pre-scrollable">
|
||||
<pre>
|
||||
flowchart LR
|
||||
id1([This is the text in the box])
|
||||
|
||||
</pre>
|
||||
</div>
|
||||
</th>
|
||||
<td>
|
||||
<pre id="diagram1" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
stateDiagram-v2
|
||||
stateA
|
||||
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<pre id="diagram2" class="mermaid">
|
||||
%%{init: {"look": "handdrawn"} }%%
|
||||
flowchart LR
|
||||
id1[[This is the text in the box]]
|
||||
|
||||
|
||||
</pre>
|
||||
</td>
|
||||
|
||||
</tr>
|
||||
|
||||
|
||||
</table>
|
||||
|
||||
|
||||
<script type="module">
|
||||
import mermaid from './mermaid.esm.mjs';
|
||||
import { layouts } from './mermaid-layout-elk.esm.mjs';
|
||||
mermaid.registerLayoutLoaders(layouts);
|
||||
mermaid.parseError = function (err, hash) {
|
||||
|
||||
};
|
||||
|
||||
mermaid.initialize({
|
||||
handdrawn: false,
|
||||
mergeEdges: true,
|
||||
layout: 'dagre',
|
||||
flowchart: { titleTopMargin: 10 },
|
||||
// fontFamily: 'Caveat',
|
||||
fontFamily: 'Kalam',
|
||||
sequence: {
|
||||
actorFontFamily: 'courier',
|
||||
noteFontFamily: 'courier',
|
||||
messageFontFamily: 'courier',
|
||||
},
|
||||
fontSize: 16,
|
||||
logLevel: 0,
|
||||
});
|
||||
function callback() {
|
||||
alert('It worked');
|
||||
}
|
||||
mermaid.parseError = function (err, hash) {
|
||||
console.error('In parse error:');
|
||||
console.error(err);
|
||||
};
|
||||
|
||||
|
||||
let coll = document.getElementsByClassName("collapsible");
|
||||
for (const element of coll) {
|
||||
element.addEventListener("click", function () {
|
||||
this.classList.toggle("active");
|
||||
let content = this.nextElementSibling;
|
||||
if (content.style.maxHeight) {
|
||||
content.style.maxHeight = null;
|
||||
} else {
|
||||
content.style.maxHeight = content.scrollHeight + "px";
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
433
cypress/platform/knsv-4442.html
Normal file
433
cypress/platform/knsv-4442.html
Normal file
@@ -0,0 +1,433 @@
|
||||
<html>
|
||||
<head>
|
||||
<link href="https://fonts.googleapis.com/css?family=Montserrat&display=swap" rel="stylesheet" />
|
||||
<link href="https://unpkg.com/tailwindcss@^1.0/dist/tailwind.min.css" rel="stylesheet" />
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css"
|
||||
/>
|
||||
<link
|
||||
href="https://cdn.jsdelivr.net/npm/@mdi/font@6.9.96/css/materialdesignicons.min.css"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css?family=Noto+Sans+SC&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<style>
|
||||
body {
|
||||
/* background: rgb(221, 208, 208); */
|
||||
/* background:#333; */
|
||||
font-family: 'Arial';
|
||||
/* font-size: 18px !important; */
|
||||
}
|
||||
h1 {
|
||||
color: grey;
|
||||
}
|
||||
.mermaid2 {
|
||||
display: none;
|
||||
}
|
||||
.mermaid svg {
|
||||
/* font-size: 18px !important; */
|
||||
background-color: #efefef;
|
||||
background-image: radial-gradient(#fff 51%, transparent 91%),
|
||||
radial-gradient(#fff 51%, transparent 91%);
|
||||
background-size: 20px 20px;
|
||||
background-position:
|
||||
0 0,
|
||||
10px 10px;
|
||||
background-repeat: repeat;
|
||||
}
|
||||
.malware {
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 150px;
|
||||
background: red;
|
||||
color: black;
|
||||
display: flex;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
font-family: monospace;
|
||||
font-size: 72px;
|
||||
}
|
||||
/* tspan {
|
||||
font-size: 6px !important;
|
||||
} */
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="diagram" class="mermaid">
|
||||
stateDiagram-v2
|
||||
[*] --> Still
|
||||
Still --> [*]
|
||||
Still --> Moving
|
||||
Moving --> Still
|
||||
Moving --> Crash
|
||||
Crash --> [*] </pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart RL
|
||||
subgraph "`one`"
|
||||
a1 -- l1 --> a2
|
||||
a1 -- l2 --> a2
|
||||
end
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid">
|
||||
flowchart RL
|
||||
subgraph "`one`"
|
||||
a1 -- l1 --> a2
|
||||
a1 -- l2 --> a2
|
||||
end
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart
|
||||
id["`A root with a long text that wraps to keep the node size in check. A root with a long text that wraps to keep the node size in check`"]</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart LR
|
||||
A[A text that needs to be wrapped wraps to another line]
|
||||
B[A text that needs to be<br/>wrapped wraps to another line]
|
||||
C["`A text that needs to be wrapped to another line`"]</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart LR
|
||||
C["`A text
|
||||
that needs
|
||||
to be wrapped
|
||||
in another
|
||||
way`"]
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid">
|
||||
classDiagram-v2
|
||||
note "I love this diagram!\nDo you love it?"
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid">
|
||||
stateDiagram-v2
|
||||
State1: The state with a note with minus - and plus + in it
|
||||
note left of State1
|
||||
Important information! You can write
|
||||
notes with . and in them.
|
||||
end note </pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
mindmap
|
||||
root
|
||||
Child3(A node with an icon and with a long text that wraps to keep the node size in check)
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"theme": "forest"} }%%
|
||||
mindmap
|
||||
id1[**Start2**<br/>end]
|
||||
id2[**Start2**<br />end]
|
||||
%% Another comment
|
||||
id3[**Start2**<br>end] %% Comment
|
||||
id4[**Start2**<br >end<br >the very end]
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
mindmap
|
||||
id1["`**Start2**
|
||||
second line 😎 with long text that is wrapping to the next line`"]
|
||||
id2["`Child **with bold** text`"]
|
||||
id3["`Children of which some
|
||||
is using *italic type of* text`"]
|
||||
id4[Child]
|
||||
id5["`Child
|
||||
Row
|
||||
and another
|
||||
`"]
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
mindmap
|
||||
id1("`**Root**`"]
|
||||
id2["`A formatted text... with **bold** and *italics*`"]
|
||||
id3[Regular labels works as usual]
|
||||
id4["`Emojis and unicode works too: 🤓
|
||||
शान्तिः سلام 和平 `"]
|
||||
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid">
|
||||
%%{init: {"flowchart": {"defaultRenderer": "elk"}} }%%
|
||||
flowchart TB
|
||||
%% I could not figure out how to use double quotes in labels in Mermaid
|
||||
subgraph ibm[IBM Espresso CPU]
|
||||
core0[IBM PowerPC Broadway Core 0]
|
||||
core1[IBM PowerPC Broadway Core 1]
|
||||
core2[IBM PowerPC Broadway Core 2]
|
||||
|
||||
rom[16 KB ROM]
|
||||
|
||||
core0 --- core2
|
||||
|
||||
rom --> core2
|
||||
end
|
||||
|
||||
subgraph amd["`**AMD** Latte GPU`"]
|
||||
mem[Memory & I/O Bridge]
|
||||
dram[DRAM Controller]
|
||||
edram[32 MB EDRAM MEM1]
|
||||
rom[512 B SEEPROM]
|
||||
|
||||
sata[SATA IF]
|
||||
exi[EXI]
|
||||
|
||||
subgraph gx[GX]
|
||||
sram[3 MB 1T-SRAM]
|
||||
end
|
||||
|
||||
radeon[AMD Radeon R7xx GX2]
|
||||
|
||||
mem --- gx
|
||||
mem --- radeon
|
||||
|
||||
rom --- mem
|
||||
|
||||
mem --- sata
|
||||
mem --- exi
|
||||
|
||||
dram --- sata
|
||||
dram --- exi
|
||||
end
|
||||
|
||||
ddr3[2 GB DDR3 RAM MEM2]
|
||||
|
||||
mem --- ddr3
|
||||
dram --- ddr3
|
||||
edram --- ddr3
|
||||
|
||||
core1 --- mem
|
||||
|
||||
exi --- rtc
|
||||
rtc{{rtc}}
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"flowchart": {"defaultRenderer": "elk", "htmlLabels": false}} }%%
|
||||
flowchart TB
|
||||
%% I could not figure out how to use double quotes in labels in Mermaid
|
||||
subgraph ibm[IBM Espresso CPU]
|
||||
core0[IBM PowerPC Broadway Core 0]
|
||||
core1[IBM PowerPC Broadway Core 1]
|
||||
core2[IBM PowerPC Broadway Core 2]
|
||||
|
||||
rom[16 KB ROM]
|
||||
|
||||
core0 --- core2
|
||||
|
||||
rom --> core2
|
||||
end
|
||||
|
||||
subgraph amd["`**AMD** Latte GPU`"]
|
||||
mem[Memory & I/O Bridge]
|
||||
dram[DRAM Controller]
|
||||
edram[32 MB EDRAM MEM1]
|
||||
rom[512 B SEEPROM]
|
||||
|
||||
sata[SATA IF]
|
||||
exi[EXI]
|
||||
|
||||
subgraph gx[GX]
|
||||
sram[3 MB 1T-SRAM]
|
||||
end
|
||||
|
||||
radeon[AMD Radeon R7xx GX2]
|
||||
|
||||
mem --- gx
|
||||
mem --- radeon
|
||||
|
||||
rom --- mem
|
||||
|
||||
mem --- sata
|
||||
mem --- exi
|
||||
|
||||
dram --- sata
|
||||
dram --- exi
|
||||
end
|
||||
|
||||
ddr3[2 GB DDR3 RAM MEM2]
|
||||
|
||||
mem --- ddr3
|
||||
dram --- ddr3
|
||||
edram --- ddr3
|
||||
|
||||
core1 --- mem
|
||||
|
||||
exi --- rtc
|
||||
rtc{{rtc}}
|
||||
</pre
|
||||
>
|
||||
|
||||
<br />
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart TB
|
||||
%% I could not figure out how to use double quotes in labels in Mermaid
|
||||
subgraph ibm[IBM Espresso CPU]
|
||||
core0[IBM PowerPC Broadway Core 0]
|
||||
core1[IBM PowerPC Broadway Core 1]
|
||||
core2[IBM PowerPC Broadway Core 2]
|
||||
|
||||
rom[16 KB ROM]
|
||||
|
||||
core0 --- core2
|
||||
|
||||
rom --> core2
|
||||
end
|
||||
|
||||
subgraph amd[AMD Latte GPU]
|
||||
mem[Memory & I/O Bridge]
|
||||
dram[DRAM Controller]
|
||||
edram[32 MB EDRAM MEM1]
|
||||
rom[512 B SEEPROM]
|
||||
|
||||
sata[SATA IF]
|
||||
exi[EXI]
|
||||
|
||||
subgraph gx[GX]
|
||||
sram[3 MB 1T-SRAM]
|
||||
end
|
||||
|
||||
radeon[AMD Radeon R7xx GX2]
|
||||
|
||||
mem --- gx
|
||||
mem --- radeon
|
||||
|
||||
rom --- mem
|
||||
|
||||
mem --- sata
|
||||
mem --- exi
|
||||
|
||||
dram --- sata
|
||||
dram --- exi
|
||||
end
|
||||
|
||||
ddr3[2 GB DDR3 RAM MEM2]
|
||||
|
||||
mem --- ddr3
|
||||
dram --- ddr3
|
||||
edram --- ddr3
|
||||
|
||||
core1 --- mem
|
||||
|
||||
exi --- rtc
|
||||
rtc{{rtc}}
|
||||
</pre
|
||||
>
|
||||
<br />
|
||||
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart LR
|
||||
B1 --be be--x B2
|
||||
B1 --bo bo--o B3
|
||||
subgraph Ugge
|
||||
B2
|
||||
B3
|
||||
subgraph inner
|
||||
B4
|
||||
B5
|
||||
end
|
||||
subgraph inner2
|
||||
subgraph deeper
|
||||
C4
|
||||
C5
|
||||
end
|
||||
C6
|
||||
end
|
||||
|
||||
B4 --> C4
|
||||
|
||||
B3 -- X --> B4
|
||||
B2 --> inner
|
||||
|
||||
C4 --> C5
|
||||
end
|
||||
|
||||
subgraph outer
|
||||
B6
|
||||
end
|
||||
B6 --> B5
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
sequenceDiagram
|
||||
Customer->>+Stripe: Makes a payment request
|
||||
Stripe->>+Bank: Forwards the payment request to the bank
|
||||
Bank->>+Customer: Asks for authorization
|
||||
Customer->>+Bank: Provides authorization
|
||||
Bank->>+Stripe: Sends a response with payment details
|
||||
Stripe->>+Merchant: Sends a notification of payment receipt
|
||||
Merchant->>+Stripe: Confirms the payment
|
||||
Stripe->>+Customer: Sends a confirmation of payment
|
||||
Customer->>+Merchant: Receives goods or services
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
mindmap
|
||||
root((mindmap))
|
||||
Origins
|
||||
Long history
|
||||
::icon(fa fa-book)
|
||||
Popularisation
|
||||
British popular psychology author Tony Buzan
|
||||
Research
|
||||
On effectiveness<br/>and features
|
||||
On Automatic creation
|
||||
Uses
|
||||
Creative techniques
|
||||
Strategic planning
|
||||
Argument mapping
|
||||
Tools
|
||||
Pen and paper
|
||||
Mermaid
|
||||
</pre>
|
||||
<br />
|
||||
<pre id="diagram" class="mermaid2">
|
||||
example-diagram
|
||||
</pre>
|
||||
|
||||
<!-- <div id="cy"></div> -->
|
||||
<!-- <script src="http://localhost:9000/packages/mermaid-mindmap/dist/mermaid-mindmap-detector.js"></script> -->
|
||||
<!-- <script src="./mermaid-example-diagram-detector.js"></script> -->
|
||||
<!-- <script src="//cdn.jsdelivr.net/npm/mermaid@9.1.7/dist/mermaid.min.js"></script> -->
|
||||
<!-- <script src="./mermaid.js"></script> -->
|
||||
|
||||
<scrpt>
|
||||
// import mindmap from '../../packages/mermaid-mindmap/src/detector'; // import example from
|
||||
'../../packages/mermaid-example-diagram/src/mermaid-example-diagram.core.mjs'; import mermaid
|
||||
from './mermaid.esm.mjs'; // await mermaid.registerExternalDiagrams([example]);
|
||||
mermaid.parseError = function (err, hash) { // console.error('Mermaid error: ', err); };
|
||||
mermaid.initialize({ // theme: 'forest', startOnLoad: true, logLevel: 0, flowchart: { //
|
||||
defaultRenderer: 'elk', useMaxWidth: false, // htmlLabels: false, htmlLabels: true, }, //
|
||||
htmlLabels: false, gantt: { useMaxWidth: false, }, useMaxWidth: false, }); function callback()
|
||||
{ alert('It worked'); } mermaid.parseError = function (err, hash) { console.error('In parse
|
||||
error:'); console.error(err); }; // mermaid.test1('first_slow', 1200).then((r) =>
|
||||
console.info(r)); // mermaid.test1('second_fast', 200).then((r) => console.info(r)); //
|
||||
mermaid.test1('third_fast', 200).then((r) => console.info(r)); // mermaid.test1('forth_slow',
|
||||
1200).then((r) => console.info(r));
|
||||
</scrpt>
|
||||
<script
|
||||
type="text/javascript"
|
||||
src="https://cdn.jsdelivr.net/npm/mermaid@10.2.0/dist/mermaid.min.js"
|
||||
></script>
|
||||
<script type="module">
|
||||
import mermaid from 'https://cdn.jsdelivr.net/npm/mermaid@10.2.0/dist/mermaid.min.js';
|
||||
(function () {
|
||||
mermaid.initialize({ startOnLoad: false });
|
||||
const elements = document.getElementsByClassName('mermaid');
|
||||
console.log(elements);
|
||||
let id = 0;
|
||||
[...elements].forEach((elem) => {
|
||||
const insertSvg = function (svgCode) {
|
||||
elem.innerHTML = svgCode;
|
||||
};
|
||||
|
||||
console.log(atob(elem.innerText));
|
||||
|
||||
mermaid.render(`graphDiv-${id++}`, atob(elem.innerText), insertSvg);
|
||||
});
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -4,7 +4,7 @@
|
||||
<link href="https://unpkg.com/tailwindcss@^1.0/dist/tailwind.min.css" rel="stylesheet" />
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.2/css/font-awesome.min.css"
|
||||
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css"
|
||||
/>
|
||||
<link
|
||||
href="https://cdn.jsdelivr.net/npm/@mdi/font@6.9.96/css/materialdesignicons.min.css"
|
||||
@@ -14,33 +14,45 @@
|
||||
href="https://fonts.googleapis.com/css?family=Noto+Sans+SC&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Kalam:wght@300;400;700&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Caveat:wght@400..700&family=Kalam:wght@300;400;700&family=Rubik+Mono+One&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Kalam:wght@300;400;700&family=Rubik+Mono+One&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
|
||||
<style>
|
||||
body {
|
||||
/* background: rgb(221, 208, 208); */
|
||||
background: #333;
|
||||
/* background: #333; */
|
||||
font-family: 'Arial';
|
||||
/* font-size: 18px !important; */
|
||||
}
|
||||
h1 {
|
||||
color: grey;
|
||||
}
|
||||
.mermaid {
|
||||
border: 1px solid #ddd;
|
||||
margin: 10px;
|
||||
}
|
||||
.mermaid2 {
|
||||
display: none;
|
||||
}
|
||||
.mermaid svg {
|
||||
/* font-size: 18px !important; */
|
||||
/* background-color: #efefef; */
|
||||
background-color: #333;
|
||||
background-image: radial-gradient(#333 51%, transparent 91%),
|
||||
radial-gradient(#333 51%, transparent 91%);
|
||||
|
||||
/* background-color: #efefef;
|
||||
background-image: radial-gradient(#fff 51%, transparent 91%),
|
||||
radial-gradient(#fff 51%, transparent 91%);
|
||||
background-size: 20px 20px;
|
||||
background-position: 0 0, 10px 10px;
|
||||
background-repeat: repeat;
|
||||
border: 2px solid rgb(131, 142, 205);
|
||||
background-position:
|
||||
0 0,
|
||||
10px 10px;
|
||||
background-repeat: repeat; */
|
||||
}
|
||||
.malware {
|
||||
position: fixed;
|
||||
@@ -58,544 +70,202 @@
|
||||
font-size: 72px;
|
||||
}
|
||||
/* tspan {
|
||||
font-size: 6px !important;
|
||||
} */
|
||||
font-size: 6px !important;
|
||||
} */
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="diagram" class="mermaid">
|
||||
block-beta
|
||||
blockArrowId<["Label"]>(right)
|
||||
blockArrowId2<["Label"]>(left)
|
||||
blockArrowId3<["Label"]>(up)
|
||||
blockArrowId4<["Label"]>(down)
|
||||
blockArrowId5<["Label"]>(x)
|
||||
blockArrowId6<["Label"]>(y)
|
||||
blockArrowId6<["Label"]>(x, down)
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid">
|
||||
block-beta
|
||||
block:e:4
|
||||
columns 2
|
||||
f
|
||||
g
|
||||
end
|
||||
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid">
|
||||
block-beta
|
||||
block:e:4
|
||||
columns 2
|
||||
f
|
||||
g
|
||||
h
|
||||
end
|
||||
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid">
|
||||
block-beta
|
||||
columns 4
|
||||
a b c d
|
||||
block:e:4
|
||||
columns 2
|
||||
f
|
||||
g
|
||||
h
|
||||
end
|
||||
i:4
|
||||
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart LR
|
||||
X-- "y" -->z
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
columns 5
|
||||
A space B
|
||||
A --x B
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
columns 3
|
||||
a["A wide one"] b:2 c:2 d
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
block:e
|
||||
f
|
||||
end
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
columns 3
|
||||
a:3
|
||||
block:e:3
|
||||
f
|
||||
end
|
||||
g
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
columns 3
|
||||
a:3
|
||||
block:e:3
|
||||
f
|
||||
g
|
||||
end
|
||||
h
|
||||
i
|
||||
j
|
||||
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
columns 3
|
||||
a b:2
|
||||
block:e:3
|
||||
f
|
||||
end
|
||||
g h i
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid">
|
||||
block-beta
|
||||
columns 3
|
||||
a b c
|
||||
e:3
|
||||
f g h
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid">
|
||||
block-beta
|
||||
columns 1
|
||||
db(("DB"))
|
||||
blockArrowId6<[" "]>(down)
|
||||
block:ID
|
||||
A
|
||||
B["A wide one in the middle"]
|
||||
C
|
||||
end
|
||||
space
|
||||
D
|
||||
ID --> D
|
||||
C --> D
|
||||
style B fill:#f9F,stroke:#333,stroke-width:4px
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid">
|
||||
block-beta
|
||||
columns 5
|
||||
A1:3
|
||||
A2:1
|
||||
A3
|
||||
B1 B2 B3:3
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
block
|
||||
D
|
||||
E
|
||||
end
|
||||
db("This is the text in the box")
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
|
||||
block
|
||||
D
|
||||
end
|
||||
A["A: I am a wide one"]
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
A["square"]
|
||||
B("rounded")
|
||||
C(("circle"))
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
A>"rect_left_inv_arrow"]
|
||||
B{"diamond"}
|
||||
C{{"hexagon"}}
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
A(["stadium"])
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
%% A[["subroutine"]]
|
||||
%% B[("cylinder")]
|
||||
C>"surprise"]
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
block-beta
|
||||
A[/"lean right"/]
|
||||
B[\"lean left"\]
|
||||
C[/"trapezoid"\]
|
||||
D[\"trapezoid"/]
|
||||
</pre>
|
||||
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart
|
||||
B
|
||||
style B fill:#f9F,stroke:#333,stroke-width:4px
|
||||
</pre>
|
||||
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart LR
|
||||
a1 -- apa --> b1
|
||||
</pre>
|
||||
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart RL
|
||||
subgraph "`one`"
|
||||
id
|
||||
end
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart RL
|
||||
subgraph "`one`"
|
||||
a1 -- l1 --> a2
|
||||
a1 -- l2 --> a2
|
||||
end
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart
|
||||
id["`A root with a long text that wraps to keep the node size in check. A root with a long text that wraps to keep the node size in check`"]</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart LR
|
||||
A[A text that needs to be wrapped wraps to another line]
|
||||
B[A text that needs to be<br/>wrapped wraps to another line]
|
||||
C["`A text that needs to be wrapped to another line`"]</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart LR
|
||||
C["`A text
|
||||
that needs
|
||||
to be wrapped
|
||||
in another
|
||||
way`"]
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
classDiagram-v2
|
||||
note "I love this diagram!\nDo you love it?"
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
stateDiagram-v2
|
||||
State1: The state with a note with minus - and plus + in it
|
||||
note left of State1
|
||||
Important information! You can write
|
||||
notes with . and in them.
|
||||
end note </pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
mindmap
|
||||
root
|
||||
Child3(A node with an icon and with a long text that wraps to keep the node size in check)
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"theme": "forest"} }%%
|
||||
mindmap
|
||||
id1[**Start2**<br/>end]
|
||||
id2[**Start2**<br />end]
|
||||
%% Another comment
|
||||
id3[**Start2**<br>end] %% Comment
|
||||
id4[**Start2**<br >end<br >the very end]
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
mindmap
|
||||
id1["`**Start2**
|
||||
second line 😎 with long text that is wrapping to the next line`"]
|
||||
id2["`Child **with bold** text`"]
|
||||
id3["`Children of which some
|
||||
is using *italic type of* text`"]
|
||||
id4[Child]
|
||||
id5["`Child
|
||||
Row
|
||||
and another
|
||||
`"]
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
mindmap
|
||||
id1("`**Root**`"]
|
||||
id2["`A formatted text... with **bold** and *italics*`"]
|
||||
id3[Regular labels works as usual]
|
||||
id4["`Emojis and unicode works too: 🤓
|
||||
शान्तिः سلام 和平 `"]
|
||||
|
||||
</pre>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"flowchart": {"defaultRenderer": "elk"}} }%%
|
||||
flowchart TB
|
||||
%% I could not figure out how to use double quotes in labels in Mermaid
|
||||
subgraph ibm[IBM Espresso CPU]
|
||||
core0[IBM PowerPC Broadway Core 0]
|
||||
core1[IBM PowerPC Broadway Core 1]
|
||||
core2[IBM PowerPC Broadway Core 2]
|
||||
|
||||
rom[16 KB ROM]
|
||||
|
||||
core0 --- core2
|
||||
|
||||
rom --> core2
|
||||
end
|
||||
|
||||
subgraph amd["`**AMD** Latte GPU`"]
|
||||
mem[Memory & I/O Bridge]
|
||||
dram[DRAM Controller]
|
||||
edram[32 MB EDRAM MEM1]
|
||||
rom[512 B SEEPROM]
|
||||
|
||||
sata[SATA IF]
|
||||
exi[EXI]
|
||||
|
||||
subgraph gx[GX]
|
||||
sram[3 MB 1T-SRAM]
|
||||
end
|
||||
|
||||
radeon[AMD Radeon R7xx GX2]
|
||||
|
||||
mem --- gx
|
||||
mem --- radeon
|
||||
|
||||
rom --- mem
|
||||
|
||||
mem --- sata
|
||||
mem --- exi
|
||||
|
||||
dram --- sata
|
||||
dram --- exi
|
||||
end
|
||||
|
||||
ddr3[2 GB DDR3 RAM MEM2]
|
||||
|
||||
mem --- ddr3
|
||||
dram --- ddr3
|
||||
edram --- ddr3
|
||||
|
||||
core1 --- mem
|
||||
|
||||
exi --- rtc
|
||||
rtc{{rtc}}
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"flowchart": {"defaultRenderer": "elk", "htmlLabels": false}} }%%
|
||||
flowchart TB
|
||||
%% I could not figure out how to use double quotes in labels in Mermaid
|
||||
subgraph ibm[IBM Espresso CPU]
|
||||
core0[IBM PowerPC Broadway Core 0]
|
||||
core1[IBM PowerPC Broadway Core 1]
|
||||
core2[IBM PowerPC Broadway Core 2]
|
||||
|
||||
rom[16 KB ROM]
|
||||
|
||||
core0 --- core2
|
||||
|
||||
rom --> core2
|
||||
end
|
||||
|
||||
subgraph amd["`**AMD** Latte GPU`"]
|
||||
mem[Memory & I/O Bridge]
|
||||
dram[DRAM Controller]
|
||||
edram[32 MB EDRAM MEM1]
|
||||
rom[512 B SEEPROM]
|
||||
|
||||
sata[SATA IF]
|
||||
exi[EXI]
|
||||
|
||||
subgraph gx[GX]
|
||||
sram[3 MB 1T-SRAM]
|
||||
end
|
||||
|
||||
radeon[AMD Radeon R7xx GX2]
|
||||
|
||||
mem --- gx
|
||||
mem --- radeon
|
||||
|
||||
rom --- mem
|
||||
|
||||
mem --- sata
|
||||
mem --- exi
|
||||
|
||||
dram --- sata
|
||||
dram --- exi
|
||||
end
|
||||
|
||||
ddr3[2 GB DDR3 RAM MEM2]
|
||||
|
||||
mem --- ddr3
|
||||
dram --- ddr3
|
||||
edram --- ddr3
|
||||
|
||||
core1 --- mem
|
||||
|
||||
exi --- rtc
|
||||
rtc{{rtc}}
|
||||
</pre
|
||||
>
|
||||
|
||||
<br />
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart TB
|
||||
%% I could not figure out how to use double quotes in labels in Mermaid
|
||||
subgraph ibm[IBM Espresso CPU]
|
||||
core0[IBM PowerPC Broadway Core 0]
|
||||
core1[IBM PowerPC Broadway Core 1]
|
||||
core2[IBM PowerPC Broadway Core 2]
|
||||
|
||||
rom[16 KB ROM]
|
||||
|
||||
core0 --- core2
|
||||
|
||||
rom --> core2
|
||||
end
|
||||
|
||||
subgraph amd[AMD Latte GPU]
|
||||
mem[Memory & I/O Bridge]
|
||||
dram[DRAM Controller]
|
||||
edram[32 MB EDRAM MEM1]
|
||||
rom[512 B SEEPROM]
|
||||
|
||||
sata[SATA IF]
|
||||
exi[EXI]
|
||||
|
||||
subgraph gx[GX]
|
||||
sram[3 MB 1T-SRAM]
|
||||
end
|
||||
|
||||
radeon[AMD Radeon R7xx GX2]
|
||||
|
||||
mem --- gx
|
||||
mem --- radeon
|
||||
|
||||
rom --- mem
|
||||
|
||||
mem --- sata
|
||||
mem --- exi
|
||||
|
||||
dram --- sata
|
||||
dram --- exi
|
||||
end
|
||||
|
||||
ddr3[2 GB DDR3 RAM MEM2]
|
||||
|
||||
mem --- ddr3
|
||||
dram --- ddr3
|
||||
edram --- ddr3
|
||||
|
||||
core1 --- mem
|
||||
|
||||
exi --- rtc
|
||||
rtc{{rtc}}
|
||||
</pre
|
||||
>
|
||||
<br />
|
||||
|
||||
<pre id="diagram" class="mermaid2">
|
||||
flowchart LR
|
||||
B1 --be be--x B2
|
||||
B1 --bo bo--o B3
|
||||
subgraph Ugge
|
||||
B2
|
||||
B3
|
||||
subgraph inner
|
||||
B4
|
||||
B5
|
||||
end
|
||||
subgraph inner2
|
||||
subgraph deeper
|
||||
C4
|
||||
C5
|
||||
end
|
||||
C6
|
||||
end
|
||||
|
||||
B4 --> C4
|
||||
|
||||
B3 -- X --> B4
|
||||
B2 --> inner
|
||||
|
||||
C4 --> C5
|
||||
end
|
||||
|
||||
subgraph outer
|
||||
B6
|
||||
end
|
||||
B6 --> B5
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{
|
||||
init: {
|
||||
"theme":"base",
|
||||
"fontFamily": "Kalam",
|
||||
"themeVariables": {
|
||||
"background": "#FFFFFF",
|
||||
"primaryColor": "#7bdfa7",
|
||||
"primaryTextColor": "#3c3c3b",
|
||||
"secondaryColor": "#642470",
|
||||
"secondaryTextColor": "#3c3c3b",
|
||||
"tertiaryColor": "#1c736D",
|
||||
"tertiaryTextColor": "#3c3c3b",
|
||||
"noteBkgColor": "#9fd8ef",
|
||||
"loopTextColor": "#636362",
|
||||
"labelBoxBkgColor": "#642470",
|
||||
"labelBoxBorderColor": "#642470",
|
||||
"labelTextColor": "#d4d4d4",
|
||||
"signalTextColor": "#636362",
|
||||
"signalColor": "#642470"
|
||||
}
|
||||
}
|
||||
}%%
|
||||
sequenceDiagram
|
||||
Customer->>+Stripe: Makes a payment request
|
||||
Stripe->>+Bank: Forwards the payment request to the bank
|
||||
Bank->>+Customer: Asks for authorization
|
||||
Customer->>+Bank: Provides authorization
|
||||
Bank->>+Stripe: Sends a response with payment details
|
||||
Stripe->>+Merchant: Sends a notification of payment receipt
|
||||
Merchant->>+Stripe: Confirms the payment
|
||||
Stripe->>+Customer: Sends a confirmation of payment
|
||||
Customer->>+Merchant: Receives goods or services
|
||||
</pre
|
||||
Alice->>+John: Hello John, how are you?
|
||||
Alice->>+John: John, can you hear me?
|
||||
John-->>-Alice: Hi Alice, I can hear you!
|
||||
John-->>-Alice: I feel great!
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
mindmap
|
||||
root((mindmap))
|
||||
Origins
|
||||
Long history
|
||||
::icon(fa fa-book)
|
||||
Popularisation
|
||||
British popular psychology author Tony Buzan
|
||||
Research
|
||||
On effectiveness<br/>and features
|
||||
On Automatic creation
|
||||
Uses
|
||||
Creative techniques
|
||||
Strategic planning
|
||||
Argument mapping
|
||||
Tools
|
||||
Pen and paper
|
||||
Mermaid
|
||||
</pre>
|
||||
<br />
|
||||
<pre id="diagram" class="mermaid2">
|
||||
example-diagram
|
||||
</pre>
|
||||
%%{
|
||||
init: {
|
||||
"theme":"base",
|
||||
"fontFamily": "Forth Bold",
|
||||
"themeVariables": {
|
||||
"background": "#FFFFFF",
|
||||
"primaryColor": "#7bdfa7",
|
||||
"primaryTextColor": "#3c3c3b",
|
||||
"secondaryColor": "#642470",
|
||||
"secondaryTextColor": "#3c3c3b",
|
||||
"tertiaryColor": "#1c736D",
|
||||
"tertiaryTextColor": "#3c3c3b",
|
||||
"noteBkgColor": "#9fd8ef",
|
||||
"loopTextColor": "#636362",
|
||||
"labelBoxBkgColor": "#642470",
|
||||
"labelBoxBorderColor": "#642470",
|
||||
"labelTextColor": "#d4d4d4",
|
||||
"signalTextColor": "#636362",
|
||||
"signalColor": "#642470"
|
||||
}
|
||||
}
|
||||
}%%
|
||||
sequenceDiagram
|
||||
Alice->>+John: Hello John, how are you?
|
||||
Alice->>+John: John, can you hear me?
|
||||
John-->>-Alice: Hi Alice, I can hear you!
|
||||
John-->>-Alice: I feel great!
|
||||
</pre
|
||||
>
|
||||
|
||||
<!-- <div id="cy"></div> -->
|
||||
<!-- <script src="http://localhost:9000/packages/mermaid-mindmap/dist/mermaid-mindmap-detector.js"></script> -->
|
||||
<!-- <script src="./mermaid-example-diagram-detector.js"></script> -->
|
||||
<!-- <script src="//cdn.jsdelivr.net/npm/mermaid@9.1.7/dist/mermaid.min.js"></script> -->
|
||||
<!-- <script src="./mermaid.js"></script> -->
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"layout": "elk", "mergeEdges": true} }%%
|
||||
stateDiagram
|
||||
A --> B
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid">
|
||||
%%{init: {"layout": "elk", "mergeEdges": true} }%%
|
||||
flowchart
|
||||
A --> B(This is B)
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"layout": "elk", "mergeEdges": false, "elk.nodePlacement.strategy": "NETWORK_SIMPLEX"} }%%
|
||||
stateDiagram
|
||||
State T0 {
|
||||
direction LR
|
||||
A --> B
|
||||
}
|
||||
State T1 {
|
||||
[*] --> NumLockOff
|
||||
NumLockOff --> NumLockOn : EvNumLockPressed
|
||||
NumLockOn --> NumLockOff : EvNumLockPressed
|
||||
}
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"layout": "dagre", "mergeEdges": true} }%%
|
||||
stateDiagram
|
||||
direction TB
|
||||
State T1 {
|
||||
T11
|
||||
}
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"layout": "dagre", "mergeEdges": true} }%%
|
||||
stateDiagram
|
||||
State T1 {
|
||||
T21
|
||||
--
|
||||
T22
|
||||
}
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"layout": "elk", "mergeEdges": true} }%%
|
||||
stateDiagram
|
||||
direction TB
|
||||
State T1 {
|
||||
T11
|
||||
}
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"layout": "elk", "mergeEdges": true} }%%
|
||||
stateDiagram
|
||||
State T1 {
|
||||
T21
|
||||
--
|
||||
T22
|
||||
}
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"layout": "elk", "mergeEdges": true} }%%
|
||||
stateDiagram
|
||||
[*] --> T1
|
||||
T1 --> T2
|
||||
T1 --> T3
|
||||
T1 --> T4
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
%%{init: {"layout": "elk"} }%%
|
||||
stateDiagram
|
||||
[*] --> T1
|
||||
T1 --> T2
|
||||
T2 --> T3
|
||||
T3 --> T1
|
||||
T1 --> T3
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
stateDiagram
|
||||
State1: The state with a note
|
||||
note right of State1
|
||||
Important information! You can write
|
||||
notes.
|
||||
end note
|
||||
</pre
|
||||
>
|
||||
<pre id="diagram" class="mermaid2">
|
||||
stateDiagram-v2
|
||||
direction LR
|
||||
[*] --> Active
|
||||
|
||||
state Active {
|
||||
direction BT
|
||||
[*] --> Inner
|
||||
Inner --> NumLockOn : EvNumLockPressed
|
||||
}
|
||||
%% Outer --> Inner
|
||||
</pre
|
||||
>
|
||||
|
||||
<script type="module">
|
||||
// import mindmap from '../../packages/mermaid-mindmap/src/detector';
|
||||
// import example from '../../packages/mermaid-example-diagram/src/mermaid-example-diagram.core.mjs';
|
||||
import mermaid from './mermaid.esm.mjs';
|
||||
// await mermaid.registerExternalDiagrams([example]);
|
||||
import { layouts } from './mermaid-layout-elk.esm.mjs';
|
||||
mermaid.registerLayoutLoaders(layouts);
|
||||
mermaid.parseError = function (err, hash) {
|
||||
// console.error('Mermaid error: ', err);
|
||||
console.error('Mermaid error: ', err);
|
||||
};
|
||||
// mermaid.initialize({
|
||||
// // theme: 'forest',
|
||||
// startOnLoad: true,
|
||||
// logLevel: 0,
|
||||
// flowchart: {
|
||||
// // defaultRenderer: 'elk',
|
||||
// useMaxWidth: false,
|
||||
// // htmlLabels: false,
|
||||
// htmlLabels: true,
|
||||
// },
|
||||
// // htmlLabels: false,
|
||||
// gantt: {
|
||||
// useMaxWidth: false,
|
||||
// },
|
||||
// useMaxWidth: false,
|
||||
// });
|
||||
mermaid.initialize({
|
||||
theme: 'dark',
|
||||
startOnLoad: true,
|
||||
theme: 'base',
|
||||
handdrawnSeed: 12,
|
||||
look: 'handdrawn',
|
||||
'elk.nodePlacement.strategy': 'NETWORK_SIMPLEX',
|
||||
// layout: 'dagre',
|
||||
layout: 'elk',
|
||||
flowchart: { titleTopMargin: 10 },
|
||||
// fontFamily: 'Caveat',
|
||||
// fontFamily: 'Kalam',
|
||||
fontFamily: 'courier',
|
||||
sequence: {
|
||||
actorFontFamily: 'courier',
|
||||
noteFontFamily: 'courier',
|
||||
messageFontFamily: 'courier',
|
||||
},
|
||||
fontSize: 12,
|
||||
logLevel: 0,
|
||||
});
|
||||
function callback() {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
<!DOCTYPE html>
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
|
||||
1270
cypress/platform/state-refactor.html
Normal file
1270
cypress/platform/state-refactor.html
Normal file
File diff suppressed because it is too large
Load Diff
108
cypress/platform/xss25.html
Normal file
108
cypress/platform/xss25.html
Normal file
@@ -0,0 +1,108 @@
|
||||
<html>
|
||||
<head>
|
||||
<link href="https://fonts.googleapis.com/css?family=Montserrat&display=swap" rel="stylesheet" />
|
||||
<link href="https://unpkg.com/tailwindcss@^1.0/dist/tailwind.min.css" rel="stylesheet" />
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css"
|
||||
/>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css?family=Noto+Sans+SC&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<style>
|
||||
body {
|
||||
/* background: rgb(221, 208, 208); */
|
||||
/* background:#333; */
|
||||
font-family: 'Arial';
|
||||
/* font-size: 18px !important; */
|
||||
}
|
||||
h1 {
|
||||
color: grey;
|
||||
}
|
||||
.mermaid2 {
|
||||
display: none;
|
||||
}
|
||||
.mermaid svg {
|
||||
/* font-size: 18px !important; */
|
||||
}
|
||||
.malware {
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 150px;
|
||||
background: red;
|
||||
color: black;
|
||||
display: flex;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
font-family: monospace;
|
||||
font-size: 72px;
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
function xssAttack() {
|
||||
const div = document.createElement('div');
|
||||
div.id = 'the-malware';
|
||||
div.className = 'malware';
|
||||
div.innerHTML = 'XSS Succeeded';
|
||||
document.getElementsByTagName('body')[0].appendChild(div);
|
||||
throw new Error('XSS Succeeded');
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div>Security check</div>
|
||||
<div class="flex">
|
||||
<div id="diagram" class="mermaid"></div>
|
||||
<div id="res" class=""></div>
|
||||
</div>
|
||||
<script type="module">
|
||||
import mermaid from './mermaid.esm.mjs';
|
||||
mermaid.parseError = function (err, hash) {
|
||||
// console.error('Mermaid error: ', err);
|
||||
};
|
||||
mermaid.initialize({
|
||||
theme: 'forest',
|
||||
arrowMarkerAbsolute: true,
|
||||
// themeCSS: '.edgePath .path {stroke: red;} .arrowheadPath {fill: red;}',
|
||||
logLevel: 0,
|
||||
state: {
|
||||
defaultRenderer: 'dagre-wrapper',
|
||||
},
|
||||
flowchart: {
|
||||
// defaultRenderer: 'dagre-wrapper',
|
||||
nodeSpacing: 10,
|
||||
curve: 'cardinal',
|
||||
htmlLabels: true,
|
||||
},
|
||||
htmlLabels: false,
|
||||
// gantt: { axisFormat: '%m/%d/%Y' },
|
||||
sequence: { actorFontFamily: 'courier', actorMargin: 50, showSequenceNumbers: false },
|
||||
// sequenceDiagram: { actorMargin: 300 } // deprecated
|
||||
// fontFamily: '"times", sans-serif',
|
||||
// fontFamily: 'courier',
|
||||
fontSize: 18,
|
||||
curve: 'basis',
|
||||
securityLevel: 'strict',
|
||||
startOnLoad: false,
|
||||
secure: ['secure', 'securityLevel', 'startOnLoad', 'maxTextSize'],
|
||||
// themeVariables: {relationLabelColor: 'red'}
|
||||
});
|
||||
function callback() {
|
||||
alert('It worked');
|
||||
}
|
||||
|
||||
let diagram = 'block-beta\n';
|
||||
diagram += '`A-- "X<img src=x on';
|
||||
diagram += 'error=xssAttack()>" -->B';
|
||||
|
||||
console.log(diagram);
|
||||
// document.querySelector('#diagram').innerHTML = diagram;
|
||||
const { svg } = await mermaid.render('diagram', diagram);
|
||||
document.querySelector('#res').innerHTML = svg;
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -118,7 +118,7 @@ The siteConfig
|
||||
|
||||
#### Defined in
|
||||
|
||||
[config.ts:218](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/config.ts#L218)
|
||||
[config.ts:221](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/config.ts#L221)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ mermaid.initialize(config);
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:635](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L635)
|
||||
[mermaidAPI.ts:634](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L634)
|
||||
|
||||
## Functions
|
||||
|
||||
@@ -129,7 +129,7 @@ Return the last node appended
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:277](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L277)
|
||||
[mermaidAPI.ts:276](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L276)
|
||||
|
||||
---
|
||||
|
||||
@@ -155,7 +155,7 @@ the cleaned up svgCode
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:223](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L223)
|
||||
[mermaidAPI.ts:222](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L222)
|
||||
|
||||
---
|
||||
|
||||
@@ -167,10 +167,10 @@ Create the user styles
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :---------- | :------------------------------------------------------------------ | :------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `config` | `MermaidConfig` | configuration that has style and theme settings to use |
|
||||
| `classDefs` | `undefined` \| `null` \| `Record`<`string`, `DiagramStyleClassDef`> | the classDefs in the diagram text. Might be null if none were defined. Usually is the result of a call to getClasses(...) |
|
||||
| Name | Type | Description |
|
||||
| :---------- | :--------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `config` | `MermaidConfig` | configuration that has style and theme settings to use |
|
||||
| `classDefs` | `undefined` \| `null` \| `Map`<`string`, `DiagramStyleClassDef`> | the classDefs in the diagram text. Might be null if none were defined. Usually is the result of a call to getClasses(...) |
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -190,12 +190,12 @@ the string with all the user styles
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :---------- | :-------------------------------------------------------- |
|
||||
| `config` | `MermaidConfig` |
|
||||
| `graphType` | `string` |
|
||||
| `classDefs` | `undefined` \| `Record`<`string`, `DiagramStyleClassDef`> |
|
||||
| `svgId` | `string` |
|
||||
| Name | Type |
|
||||
| :---------- | :----------------------------------------------------- |
|
||||
| `config` | `MermaidConfig` |
|
||||
| `graphType` | `string` |
|
||||
| `classDefs` | `undefined` \| `Map`<`string`, `DiagramStyleClassDef`> |
|
||||
| `svgId` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -203,7 +203,7 @@ the string with all the user styles
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:200](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L200)
|
||||
[mermaidAPI.ts:199](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L199)
|
||||
|
||||
---
|
||||
|
||||
@@ -256,7 +256,7 @@ Put the svgCode into an iFrame. Return the iFrame code
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:254](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L254)
|
||||
[mermaidAPI.ts:253](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L253)
|
||||
|
||||
---
|
||||
|
||||
@@ -281,4 +281,4 @@ Remove any existing elements from the given document
|
||||
|
||||
#### Defined in
|
||||
|
||||
[mermaidAPI.ts:327](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L327)
|
||||
[mermaidAPI.ts:326](https://github.com/mermaid-js/mermaid/blob/master/packages/mermaid/src/mermaidAPI.ts#L326)
|
||||
|
||||
@@ -6,6 +6,10 @@
|
||||
|
||||
# Mermaid Chart
|
||||
|
||||
The Future of Diagramming & Visual Collaboration
|
||||
|
||||
Try the Ultimate AI, Mermaid, and Visual Diagramming Suite by creating an account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
<br />
|
||||
|
||||
<a href="https://www.producthunt.com/posts/mermaid-chart?utm_source=badge-featured&utm_medium=badge&utm_souce=badge-mermaid-chart" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=416671&theme=light" alt="Mermaid Chart - A smarter way to create diagrams | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
@@ -18,22 +22,26 @@
|
||||
|
||||
- **Editor** - A web based editor for creating and editing Mermaid diagrams.
|
||||
|
||||
- **Presentation** - A presentation mode for viewing Mermaid diagrams in a slideshow format.
|
||||
- **Visual Editor** - The Visual Editor enables users of all skill levels to create diagrams easily and efficiently, with both GUI and code-based editing options.
|
||||
|
||||
- **Collaboration** - A web based collaboration feature for multi-user editing on Mermaid diagrams in real-time (Pro plan).
|
||||
- **AI Chat** - Use our embedded AI Chat to generate diagrams from natural language descriptions.
|
||||
|
||||
- **Plugins** - A plugin system for extending the functionality of Mermaid.
|
||||
|
||||
Plugins are available for:
|
||||
Official Mermaid Chart plugins:
|
||||
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/chatgpt)
|
||||
- [Mermaid Chart GPT](https://chat.openai.com/g/g-1IRFKwq4G-mermaid-chart)
|
||||
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=MermaidChart.vscode-mermaid-chart)
|
||||
- [JetBrains IDE](https://plugins.jetbrains.com/plugin/23043-mermaid-chart)
|
||||
- [Microsoft PowerPoint and Word](https://appsource.microsoft.com/en-us/product/office/WA200006214?tab=Overview)
|
||||
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=MermaidChart.vscode-mermaid-chart)
|
||||
|
||||
- **AI diagramming** - A feature for generating Mermaid diagrams from text using AI (Pro plan).
|
||||
Visit our [Plugins](https://www.mermaidchart.com/plugins) page for more information.
|
||||
|
||||
- **More** - To learn more, visit our [Product](https://www.mermaidchart.com/product) page.
|
||||
- **Collaboration** - A web based collaboration feature for multi-user editing on Mermaid diagrams in real-time (Pro and Enterprise plans).
|
||||
|
||||
- **Comments** - Enhance collaboration by adding comments to diagrams.
|
||||
|
||||
- **Presentations** - A presentation mode for viewing Mermaid diagrams in a slideshow format.
|
||||
|
||||
## Plans
|
||||
|
||||
@@ -43,11 +51,9 @@
|
||||
|
||||
- **Enterprise** - A paid plan for enterprise use that includes all Pro features, and more.
|
||||
|
||||
## Access
|
||||
To learn more, visit our [Pricing](https://mermaidchart.com/pricing) page.
|
||||
|
||||
Sign up for a free account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
Mermaid Chart is currently offering a 14-day free trial of our newly-launched Pro tier. To learn more, visit our [Pricing](https://mermaidchart.com/pricing) page.
|
||||
Mermaid Chart is currently offering a 14-day free trial on our Pro and Enterprise tiers. Sign up for a free account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
## Mermaid JS contributions
|
||||
|
||||
|
||||
@@ -157,7 +157,7 @@ For a list of Mermaid Plugins and Integrations, visit the [Integrations page](..
|
||||
|
||||
Mermaid Chart plugins are available for:
|
||||
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/chatgpt)
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/mermaid-chart-gpt)
|
||||
- [JetBrains IDE](https://docs.mermaidchart.com/plugins/jetbrains-ide)
|
||||
- [Microsoft PowerPoint](https://docs.mermaidchart.com/plugins/microsoft-powerpoint)
|
||||
- [Microsoft Word](https://docs.mermaidchart.com/plugins/microsoft-word)
|
||||
|
||||
@@ -10,7 +10,13 @@
|
||||
|
||||
7 May 2024 · 5 mins
|
||||
|
||||
How to Choose the Right Documentation Software # Reliable and efficient documentation software is crucial in the fast-paced world of software development.
|
||||
How to Choose the Right Documentation Software. Reliable and efficient documentation software is crucial in the fast-paced world of software development.
|
||||
|
||||
## [AI in software diagramming: What trends will define the future?](https://www.mermaidchart.com/blog/posts/ai-in-software-diagramming/)
|
||||
|
||||
24 April 2024 · 5 mins
|
||||
|
||||
Artificial intelligence (AI) tools are changing the way developers work.
|
||||
|
||||
## [Mermaid Chart Unveils Visual Editor for Sequence Diagrams](https://www.mermaidchart.com/blog/posts/mermaid-chart-unveils-visual-editor-for-sequence-diagrams/)
|
||||
|
||||
|
||||
@@ -881,7 +881,7 @@ Examples of tooltip usage below:
|
||||
|
||||
```html
|
||||
<script>
|
||||
const callback = function () {
|
||||
window.callback = function () {
|
||||
alert('A callback was triggered');
|
||||
};
|
||||
</script>
|
||||
@@ -913,7 +913,7 @@ flowchart LR
|
||||
|
||||
> **Success** The tooltip functionality and the ability to link to urls are available from version 0.5.2.
|
||||
|
||||
?> Due to limitations with how Docsify handles JavaScript callback functions, an alternate working demo for the above code can be viewed at [this jsfiddle](https://jsfiddle.net/Ogglas/2o73vdez/7).
|
||||
?> Due to limitations with how Docsify handles JavaScript callback functions, an alternate working demo for the above code can be viewed at [this jsfiddle](https://jsfiddle.net/yk4h7qou/2/).
|
||||
|
||||
Links are opened in the same browser tab/window by default. It is possible to change this by adding a link target to the click definition (`_self`, `_blank`, `_parent` and `_top` are supported):
|
||||
|
||||
@@ -957,7 +957,7 @@ Beginner's tip—a full example using interactive links in a html context:
|
||||
</pre>
|
||||
|
||||
<script>
|
||||
const callback = function () {
|
||||
window.callback = function () {
|
||||
alert('A callback was triggered');
|
||||
};
|
||||
const config = {
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"version": "10.2.4",
|
||||
"description": "Markdownish syntax for generating flowcharts, sequence diagrams, class diagrams, gantt charts and git graphs.",
|
||||
"type": "module",
|
||||
"packageManager": "pnpm@8.15.8",
|
||||
"packageManager": "pnpm@9.1.2+sha512.127dc83b9ea10c32be65d22a8efb4a65fb952e8fefbdfded39bdc3c97efc32d31b48b00420df2c1187ace28c921c902f0cb5a134a4d032b8b5295cbfa2c681e2",
|
||||
"keywords": [
|
||||
"diagram",
|
||||
"markdown",
|
||||
@@ -19,7 +19,7 @@
|
||||
"build:esbuild": "pnpm run -r clean && tsx .esbuild/build.ts",
|
||||
"build:mermaid": "pnpm build:esbuild --mermaid",
|
||||
"build:viz": "pnpm build:esbuild --visualize",
|
||||
"build:types": "tsx .build/types.ts",
|
||||
"build:types": "pnpm --filter mermaid types:build-config && tsx .build/types.ts",
|
||||
"build:types:watch": "tsc -p ./packages/mermaid/tsconfig.json --emitDeclarationOnly --watch",
|
||||
"dev": "tsx .esbuild/server.ts",
|
||||
"dev:vite": "tsx .vite/server.ts",
|
||||
|
||||
@@ -41,12 +41,12 @@ let nodeDb = {};
|
||||
export const addVertices = async function (vert, svgId, root, doc, diagObj, parentLookupDb, graph) {
|
||||
const svg = root.select(`[id="${svgId}"]`);
|
||||
const nodes = svg.insert('g').attr('class', 'nodes');
|
||||
const keys = Object.keys(vert);
|
||||
const keys = [...vert.keys()];
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
await Promise.all(
|
||||
keys.map(async function (id) {
|
||||
const vertex = vert[id];
|
||||
const vertex = vert.get(id);
|
||||
|
||||
/**
|
||||
* Variable for storing the classes for the vertex
|
||||
@@ -595,7 +595,7 @@ const addMarkersToEdge = function (svgPath, edgeData, diagramType, arrowMarkerAb
|
||||
*
|
||||
* @param text
|
||||
* @param diagObj
|
||||
* @returns {Record<string, import('../../mermaid/src/diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
* @returns {Map<string, import('../../mermaid/src/diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
*/
|
||||
export const getClasses = function (text, diagObj) {
|
||||
log.info('Extracting classes');
|
||||
|
||||
43
packages/mermaid-layout-elk/package.json
Normal file
43
packages/mermaid-layout-elk/package.json
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "@mermaid-js/layout-elk",
|
||||
"version": "0.0.1",
|
||||
"description": "ELK layout engine for mermaid",
|
||||
"module": "dist/mermaid-layout-elk.core.mjs",
|
||||
"types": "dist/packages/mermaid-layout-elk/src/index.d.ts",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/mermaid-layout-elk.core.mjs",
|
||||
"types": "./dist/packages/mermaid-layout-elk/src/index.d.ts"
|
||||
},
|
||||
"./*": "./*"
|
||||
},
|
||||
"keywords": [
|
||||
"diagram",
|
||||
"markdown",
|
||||
"elk",
|
||||
"mermaid"
|
||||
],
|
||||
"scripts": {
|
||||
"prepublishOnly": "pnpm -w run build"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/mermaid-js/mermaid"
|
||||
},
|
||||
"contributors": [
|
||||
"Knut Sveidqvist",
|
||||
"Sidharth Vinod"
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"elkjs": "^0.9.3",
|
||||
"d3": "^7.9.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"mermaid": "workspace:^"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
]
|
||||
}
|
||||
25
packages/mermaid-layout-elk/src/find-common-ancestor.ts
Normal file
25
packages/mermaid-layout-elk/src/find-common-ancestor.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
export interface TreeData {
|
||||
parentById: Record<string, string>;
|
||||
childrenById: Record<string, string[]>;
|
||||
}
|
||||
|
||||
export const findCommonAncestor = (id1: string, id2: string, treeData: TreeData) => {
|
||||
const { parentById } = treeData;
|
||||
const visited = new Set();
|
||||
let currentId = id1;
|
||||
while (currentId) {
|
||||
visited.add(currentId);
|
||||
if (currentId === id2) {
|
||||
return currentId;
|
||||
}
|
||||
currentId = parentById[currentId];
|
||||
}
|
||||
currentId = id2;
|
||||
while (currentId) {
|
||||
if (visited.has(currentId)) {
|
||||
return currentId;
|
||||
}
|
||||
currentId = parentById[currentId];
|
||||
}
|
||||
return 'root';
|
||||
};
|
||||
17
packages/mermaid-layout-elk/src/layouts.ts
Normal file
17
packages/mermaid-layout-elk/src/layouts.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import type { LayoutLoaderDefinition } from 'mermaid';
|
||||
|
||||
const loader = async () => await import(`./render.js`);
|
||||
const algos = ['elk.stress', 'elk.force', 'elk.mrtree', 'elk.sporeOverlap'];
|
||||
|
||||
export const layouts: LayoutLoaderDefinition[] = [
|
||||
{
|
||||
name: 'elk',
|
||||
loader,
|
||||
algorithm: 'elk.layered',
|
||||
},
|
||||
...algos.map((algo) => ({
|
||||
name: algo,
|
||||
loader,
|
||||
algorithm: algo,
|
||||
})),
|
||||
];
|
||||
588
packages/mermaid-layout-elk/src/render.ts
Normal file
588
packages/mermaid-layout-elk/src/render.ts
Normal file
@@ -0,0 +1,588 @@
|
||||
// @ts-nocheck File not ready to check types
|
||||
import { curveLinear } from 'd3';
|
||||
import ELK from 'elkjs/lib/elk.bundled.js';
|
||||
import mermaid from 'mermaid';
|
||||
import { findCommonAncestor } from './find-common-ancestor.js';
|
||||
import config from '../../mermaid/src/defaultConfig';
|
||||
|
||||
const {
|
||||
common,
|
||||
getConfig,
|
||||
insertCluster,
|
||||
insertEdge,
|
||||
insertEdgeLabel,
|
||||
insertMarkers,
|
||||
insertNode,
|
||||
interpolateToCurve,
|
||||
labelHelper,
|
||||
log,
|
||||
positionEdgeLabel,
|
||||
} = mermaid.internalHelpers;
|
||||
|
||||
const nodeDb = {};
|
||||
const portPos = {};
|
||||
const clusterDb = {};
|
||||
|
||||
export const addVertex = async (nodeEl, graph, nodeArr, node) => {
|
||||
const labelData = { width: 0, height: 0 };
|
||||
const ports = [
|
||||
{
|
||||
id: node.id + '-west',
|
||||
layoutOptions: {
|
||||
'port.side': 'WEST',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: node.id + '-east',
|
||||
layoutOptions: {
|
||||
'port.side': 'EAST',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: node.id + '-south',
|
||||
layoutOptions: {
|
||||
'port.side': 'SOUTH',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: node.id + '-north',
|
||||
layoutOptions: {
|
||||
'port.side': 'NORTH',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
let boundingBox;
|
||||
const child = {
|
||||
...node,
|
||||
ports: node.shape === 'diamond' ? ports : [],
|
||||
};
|
||||
graph.children.push(child);
|
||||
nodeDb[node.id] = child;
|
||||
|
||||
// // Add the element to the DOM
|
||||
if (!node.isGroup) {
|
||||
const childNodeEl = await insertNode(nodeEl, node, node.dir);
|
||||
boundingBox = childNodeEl.node().getBBox();
|
||||
child.domId = childNodeEl;
|
||||
child.width = boundingBox.width;
|
||||
child.height = boundingBox.height;
|
||||
} else {
|
||||
child.children = [];
|
||||
await addVertices(nodeEl, nodeArr, child, node.id);
|
||||
|
||||
if (node.label) {
|
||||
const { shapeSvg, bbox } = await labelHelper(nodeEl, node, undefined, true);
|
||||
labelData.width = bbox.width;
|
||||
labelData.wrappingWidth = getConfig().flowchart.wrappingWidth;
|
||||
labelData.height = bbox.height - 8;
|
||||
labelData.labelNode = shapeSvg.node();
|
||||
// We need the label hight to be able to size the subgraph;
|
||||
shapeSvg.remove();
|
||||
} else {
|
||||
// Subgraph without label
|
||||
labelData.width = 0;
|
||||
labelData.height = 0;
|
||||
}
|
||||
child.labelData = labelData;
|
||||
child.domId = nodeEl;
|
||||
}
|
||||
};
|
||||
|
||||
export const addVertices = async function (nodeEl, nodeArr, graph, parentId) {
|
||||
const siblings = nodeArr.filter((node) => node.parentId === parentId);
|
||||
log.info('addVertices DAGA', siblings, parentId);
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
await Promise.all(
|
||||
siblings.map(async (node) => {
|
||||
await addVertex(nodeEl, graph, nodeArr, node);
|
||||
})
|
||||
);
|
||||
return graph;
|
||||
};
|
||||
|
||||
const drawNodes = (relX, relY, nodeArray, svg, subgraphsEl, depth) => {
|
||||
nodeArray.forEach(function (node) {
|
||||
if (node) {
|
||||
nodeDb[node.id] = node;
|
||||
nodeDb[node.id].offset = {
|
||||
posX: node.x + relX,
|
||||
posY: node.y + relY,
|
||||
x: relX,
|
||||
y: relY,
|
||||
depth,
|
||||
width: node.width,
|
||||
height: node.height,
|
||||
};
|
||||
if (node.isGroup) {
|
||||
log.debug('Id abc88 subgraph = ', node.id, node.x, node.y, node.labelData);
|
||||
const subgraphEl = subgraphsEl.insert('g').attr('class', 'subgraph');
|
||||
// TODO use faster way of cloning
|
||||
const clusterNode = JSON.parse(JSON.stringify(node));
|
||||
clusterNode.x = node.offset.posX + node.width / 2;
|
||||
clusterNode.y = node.offset.posY + node.height / 2;
|
||||
const cluster = insertCluster(subgraphEl, clusterNode);
|
||||
|
||||
log.info('Id (UGH)= ', node.shape, node.labels);
|
||||
} else {
|
||||
log.info(
|
||||
'Id NODE = ',
|
||||
node.id,
|
||||
node.x,
|
||||
node.y,
|
||||
relX,
|
||||
relY,
|
||||
node.domId.node(),
|
||||
`translate(${node.x + relX + node.width / 2}, ${node.y + relY + node.height / 2})`
|
||||
);
|
||||
node.domId.attr(
|
||||
'transform',
|
||||
`translate(${node.x + relX + node.width / 2}, ${node.y + relY + node.height / 2})`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
nodeArray.forEach(function (node) {
|
||||
if (node && node.isGroup) {
|
||||
drawNodes(relX + node.x, relY + node.y, node.children, svg, subgraphsEl, depth + 1);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const getNextPort = (node, edgeDirection, graphDirection) => {
|
||||
log.info('getNextPort abc88', { node, edgeDirection, graphDirection });
|
||||
if (!portPos[node]) {
|
||||
switch (graphDirection) {
|
||||
case 'TB':
|
||||
case 'TD':
|
||||
portPos[node] = {
|
||||
inPosition: 'north',
|
||||
outPosition: 'south',
|
||||
};
|
||||
break;
|
||||
case 'BT':
|
||||
portPos[node] = {
|
||||
inPosition: 'south',
|
||||
outPosition: 'north',
|
||||
};
|
||||
break;
|
||||
case 'RL':
|
||||
portPos[node] = {
|
||||
inPosition: 'east',
|
||||
outPosition: 'west',
|
||||
};
|
||||
break;
|
||||
case 'LR':
|
||||
portPos[node] = {
|
||||
inPosition: 'west',
|
||||
outPosition: 'east',
|
||||
};
|
||||
break;
|
||||
}
|
||||
}
|
||||
const result = edgeDirection === 'in' ? portPos[node].inPosition : portPos[node].outPosition;
|
||||
|
||||
if (edgeDirection === 'in') {
|
||||
portPos[node].inPosition = getNextPosition(
|
||||
portPos[node].inPosition,
|
||||
edgeDirection,
|
||||
graphDirection
|
||||
);
|
||||
} else {
|
||||
portPos[node].outPosition = getNextPosition(
|
||||
portPos[node].outPosition,
|
||||
edgeDirection,
|
||||
graphDirection
|
||||
);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
const addSubGraphs = function (nodeArr) {
|
||||
const parentLookupDb = { parentById: {}, childrenById: {} };
|
||||
const subgraphs = nodeArr.filter((node) => node.isGroup);
|
||||
log.info('Subgraphs - ', subgraphs);
|
||||
subgraphs.forEach(function (subgraph) {
|
||||
const children = nodeArr.filter((node) => node.parentId === subgraph.id);
|
||||
children.forEach(function (node) {
|
||||
parentLookupDb.parentById[node.id] = subgraph.id;
|
||||
if (parentLookupDb.childrenById[subgraph.id] === undefined) {
|
||||
parentLookupDb.childrenById[subgraph.id] = [];
|
||||
}
|
||||
parentLookupDb.childrenById[subgraph.id].push(node);
|
||||
});
|
||||
});
|
||||
|
||||
subgraphs.forEach(function (subgraph) {
|
||||
const data = { id: subgraph.id };
|
||||
if (parentLookupDb.parentById[subgraph.id] !== undefined) {
|
||||
data.parent = parentLookupDb.parentById[subgraph.id];
|
||||
}
|
||||
});
|
||||
return parentLookupDb;
|
||||
};
|
||||
|
||||
const getEdgeStartEndPoint = (edge, dir) => {
|
||||
let source = edge.start;
|
||||
let target = edge.end;
|
||||
|
||||
// Save the original source and target
|
||||
const sourceId = source;
|
||||
const targetId = target;
|
||||
|
||||
const startNode = nodeDb[edge.start.id];
|
||||
const endNode = nodeDb[edge.end.id];
|
||||
|
||||
if (!startNode || !endNode) {
|
||||
return { source, target };
|
||||
}
|
||||
|
||||
if (startNode.shape === 'diamond') {
|
||||
source = `${source}-${getNextPort(source, 'out', dir)}`;
|
||||
}
|
||||
|
||||
if (endNode.shape === 'diamond') {
|
||||
target = `${target}-${getNextPort(target, 'in', dir)}`;
|
||||
}
|
||||
|
||||
// Add the edge to the graph
|
||||
return { source, target, sourceId, targetId };
|
||||
};
|
||||
|
||||
const calcOffset = function (src, dest, parentLookupDb) {
|
||||
const ancestor = findCommonAncestor(src, dest, parentLookupDb);
|
||||
if (ancestor === undefined || ancestor === 'root') {
|
||||
return { x: 0, y: 0 };
|
||||
}
|
||||
|
||||
const ancestorOffset = nodeDb[ancestor].offset;
|
||||
return { x: ancestorOffset.posX, y: ancestorOffset.posY };
|
||||
};
|
||||
|
||||
/**
|
||||
* Add edges to graph based on parsed graph definition
|
||||
*/
|
||||
export const addEdges = function (dataForLayout, graph, svg) {
|
||||
log.info('abc78 DAGA edges = ', dataForLayout);
|
||||
const edges = dataForLayout.edges;
|
||||
const labelsEl = svg.insert('g').attr('class', 'edgeLabels');
|
||||
const linkIdCnt = {};
|
||||
const dir = dataForLayout.direction || 'DOWN';
|
||||
let defaultStyle;
|
||||
let defaultLabelStyle;
|
||||
|
||||
edges.forEach(function (edge) {
|
||||
// Identify Link
|
||||
const linkIdBase = edge.id; // 'L-' + edge.start + '-' + edge.end;
|
||||
// count the links from+to the same node to give unique id
|
||||
if (linkIdCnt[linkIdBase] === undefined) {
|
||||
linkIdCnt[linkIdBase] = 0;
|
||||
log.info('abc78 new entry', linkIdBase, linkIdCnt[linkIdBase]);
|
||||
} else {
|
||||
linkIdCnt[linkIdBase]++;
|
||||
log.info('abc78 new entry', linkIdBase, linkIdCnt[linkIdBase]);
|
||||
}
|
||||
const linkId = linkIdBase + '_' + linkIdCnt[linkIdBase];
|
||||
edge.id = linkId;
|
||||
log.info('abc78 new link id to be used is', linkIdBase, linkId, linkIdCnt[linkIdBase]);
|
||||
const linkNameStart = 'LS_' + edge.start;
|
||||
const linkNameEnd = 'LE_' + edge.end;
|
||||
|
||||
const edgeData = { style: '', labelStyle: '' };
|
||||
edgeData.minlen = edge.length || 1;
|
||||
edge.text = edge.label;
|
||||
// Set link type for rendering
|
||||
if (edge.type === 'arrow_open') {
|
||||
edgeData.arrowhead = 'none';
|
||||
} else {
|
||||
edgeData.arrowhead = 'normal';
|
||||
}
|
||||
|
||||
// Check of arrow types, placed here in order not to break old rendering
|
||||
edgeData.arrowTypeStart = 'arrow_open';
|
||||
edgeData.arrowTypeEnd = 'arrow_open';
|
||||
|
||||
/* eslint-disable no-fallthrough */
|
||||
switch (edge.type) {
|
||||
case 'double_arrow_cross':
|
||||
edgeData.arrowTypeStart = 'arrow_cross';
|
||||
case 'arrow_cross':
|
||||
edgeData.arrowTypeEnd = 'arrow_cross';
|
||||
break;
|
||||
case 'double_arrow_point':
|
||||
edgeData.arrowTypeStart = 'arrow_point';
|
||||
case 'arrow_point':
|
||||
edgeData.arrowTypeEnd = 'arrow_point';
|
||||
break;
|
||||
case 'double_arrow_circle':
|
||||
edgeData.arrowTypeStart = 'arrow_circle';
|
||||
case 'arrow_circle':
|
||||
edgeData.arrowTypeEnd = 'arrow_circle';
|
||||
break;
|
||||
}
|
||||
|
||||
let style = '';
|
||||
let labelStyle = '';
|
||||
|
||||
switch (edge.stroke) {
|
||||
case 'normal':
|
||||
style = 'fill:none;';
|
||||
if (defaultStyle !== undefined) {
|
||||
style = defaultStyle;
|
||||
}
|
||||
if (defaultLabelStyle !== undefined) {
|
||||
labelStyle = defaultLabelStyle;
|
||||
}
|
||||
edgeData.thickness = 'normal';
|
||||
edgeData.pattern = 'solid';
|
||||
break;
|
||||
case 'dotted':
|
||||
edgeData.thickness = 'normal';
|
||||
edgeData.pattern = 'dotted';
|
||||
edgeData.style = 'fill:none;stroke-width:2px;stroke-dasharray:3;';
|
||||
break;
|
||||
case 'thick':
|
||||
edgeData.thickness = 'thick';
|
||||
edgeData.pattern = 'solid';
|
||||
edgeData.style = 'stroke-width: 3.5px;fill:none;';
|
||||
break;
|
||||
}
|
||||
// if (edge.style !== undefined) {
|
||||
// const styles = getStylesFromArray(edge.style);
|
||||
// style = styles.style;
|
||||
// labelStyle = styles.labelStyle;
|
||||
// }
|
||||
|
||||
edgeData.style = edgeData.style += style;
|
||||
edgeData.labelStyle = edgeData.labelStyle += labelStyle;
|
||||
|
||||
const conf = getConfig();
|
||||
if (edge.interpolate !== undefined) {
|
||||
edgeData.curve = interpolateToCurve(edge.interpolate, curveLinear);
|
||||
} else if (edges.defaultInterpolate !== undefined) {
|
||||
edgeData.curve = interpolateToCurve(edges.defaultInterpolate, curveLinear);
|
||||
} else {
|
||||
edgeData.curve = interpolateToCurve(conf.curve, curveLinear);
|
||||
}
|
||||
|
||||
if (edge.text === undefined) {
|
||||
if (edge.style !== undefined) {
|
||||
edgeData.arrowheadStyle = 'fill: #333';
|
||||
}
|
||||
} else {
|
||||
edgeData.arrowheadStyle = 'fill: #333';
|
||||
edgeData.labelpos = 'c';
|
||||
}
|
||||
|
||||
edgeData.labelType = edge.labelType;
|
||||
edgeData.label = (edge?.text || '').replace(common.lineBreakRegex, '\n');
|
||||
|
||||
if (edge.style === undefined) {
|
||||
edgeData.style = edgeData.style || 'stroke: #333; stroke-width: 1.5px;fill:none;';
|
||||
}
|
||||
|
||||
edgeData.labelStyle = edgeData.labelStyle.replace('color:', 'fill:');
|
||||
|
||||
edgeData.id = linkId;
|
||||
edgeData.classes = 'flowchart-link ' + linkNameStart + ' ' + linkNameEnd;
|
||||
|
||||
const labelEl = insertEdgeLabel(labelsEl, edgeData);
|
||||
|
||||
// calculate start and end points of the edge, note that the source and target
|
||||
// can be modified for shapes that have ports
|
||||
const { source, target, sourceId, targetId } = getEdgeStartEndPoint(edge, dir);
|
||||
log.debug('abc78 source and target', source, target);
|
||||
// Add the edge to the graph
|
||||
graph.edges.push({
|
||||
id: 'e' + edge.start + edge.end,
|
||||
...edge,
|
||||
sources: [source],
|
||||
targets: [target],
|
||||
sourceId,
|
||||
targetId,
|
||||
labelEl: labelEl,
|
||||
labels: [
|
||||
{
|
||||
width: edgeData.width,
|
||||
height: edgeData.height,
|
||||
orgWidth: edgeData.width,
|
||||
orgHeight: edgeData.height,
|
||||
text: edgeData.label,
|
||||
layoutOptions: {
|
||||
'edgeLabels.inline': 'true',
|
||||
'edgeLabels.placement': 'CENTER',
|
||||
},
|
||||
},
|
||||
],
|
||||
edgeData,
|
||||
});
|
||||
});
|
||||
return graph;
|
||||
};
|
||||
|
||||
function dir2ElkDirection(dir) {
|
||||
switch (dir) {
|
||||
case 'LR':
|
||||
return 'RIGHT';
|
||||
case 'RL':
|
||||
return 'LEFT';
|
||||
case 'TB':
|
||||
return 'DOWN';
|
||||
case 'BT':
|
||||
return 'UP';
|
||||
default:
|
||||
return 'DOWN';
|
||||
}
|
||||
}
|
||||
|
||||
function setIncludeChildrenPolicy(nodeId: string, ancestorId: string) {
|
||||
const node = nodeDb[nodeId];
|
||||
|
||||
if (!node) {
|
||||
return;
|
||||
}
|
||||
if (node?.layoutOptions === undefined) {
|
||||
node.layoutOptions = {};
|
||||
}
|
||||
node.layoutOptions['elk.hierarchyHandling'] = 'INCLUDE_CHILDREN';
|
||||
if (node.id !== ancestorId) {
|
||||
setIncludeChildrenPolicy(node.parentId, ancestorId);
|
||||
}
|
||||
}
|
||||
|
||||
export const render = async (data4Layout, svg, element, algorithm) => {
|
||||
const elk = new ELK();
|
||||
|
||||
// Add the arrowheads to the svg
|
||||
insertMarkers(element, data4Layout.markers, data4Layout.type, data4Layout.diagramId);
|
||||
|
||||
// Setup the graph with the layout options and the data for the layout
|
||||
let elkGraph = {
|
||||
id: 'root',
|
||||
layoutOptions: {
|
||||
'elk.hierarchyHandling': 'INCLUDE_CHILDREN',
|
||||
'elk.algorithm': algorithm,
|
||||
'nodePlacement.strategy': data4Layout.config['elk.nodePlacement.strategy'],
|
||||
'elk.layered.mergeEdges': data4Layout.config.mergeEdges,
|
||||
'elk.direction': 'DOWN',
|
||||
'spacing.baseValue': 30,
|
||||
},
|
||||
children: [],
|
||||
edges: [],
|
||||
};
|
||||
|
||||
log.info('Drawing flowchart using v4 renderer', elk);
|
||||
|
||||
// Set the direction of the graph based on the parsed information
|
||||
const dir = data4Layout.direction || 'DOWN';
|
||||
elkGraph.layoutOptions['elk.direction'] = dir2ElkDirection(dir);
|
||||
|
||||
// Create the lookup db for the subgraphs and their children to used when creating
|
||||
// the tree structured graph
|
||||
const parentLookupDb = addSubGraphs(data4Layout.nodes);
|
||||
|
||||
// Add elements in the svg to be used to hold the subgraphs container
|
||||
// elements and the nodes
|
||||
const subGraphsEl = svg.insert('g').attr('class', 'subgraphs');
|
||||
const nodeEl = svg.insert('g').attr('class', 'nodes');
|
||||
|
||||
// Add the nodes to the graph, this will entail creating the actual nodes
|
||||
// in order to get the size of the node. You can't get the size of a node
|
||||
// that is not in the dom so we need to add it to the dom, get the size
|
||||
// we will position the nodes when we get the layout from elkjs
|
||||
elkGraph = await addVertices(nodeEl, data4Layout.nodes, elkGraph);
|
||||
// Time for the edges, we start with adding an element in the node to hold the edges
|
||||
const edgesEl = svg.insert('g').attr('class', 'edges edgePath');
|
||||
|
||||
// Add the edges to the elk graph, this will entail creating the actual edges
|
||||
elkGraph = addEdges(data4Layout, elkGraph, svg);
|
||||
|
||||
// Iterate through all nodes and add the top level nodes to the graph
|
||||
const nodes = data4Layout.nodes;
|
||||
nodes.forEach((n) => {
|
||||
const node = nodeDb[n.id];
|
||||
|
||||
// Subgraph
|
||||
if (parentLookupDb.childrenById[node.id] !== undefined) {
|
||||
log.trace('Subgraph XCX', node.id, node);
|
||||
node.labels = [
|
||||
{
|
||||
text: node.labelText,
|
||||
layoutOptions: {
|
||||
'nodeLabels.placement': '[H_CENTER, V_TOP, INSIDE]',
|
||||
},
|
||||
width: node?.labelData?.width || 0,
|
||||
height: node?.labelData?.height || 0,
|
||||
},
|
||||
];
|
||||
node.layoutOptions = {
|
||||
'spacing.baseValue': 30,
|
||||
};
|
||||
if (node.dir) {
|
||||
node.layoutOptions = {
|
||||
...node.layoutOptions,
|
||||
'elk.direction': dir2ElkDirection(node.dir),
|
||||
'elk.hierarchyHandling': 'SEPARATE_CHILDREN',
|
||||
};
|
||||
}
|
||||
delete node.x;
|
||||
delete node.y;
|
||||
delete node.width;
|
||||
delete node.height;
|
||||
}
|
||||
});
|
||||
elkGraph.edges.forEach((edge) => {
|
||||
const source = edge.sources[0];
|
||||
const target = edge.targets[0];
|
||||
|
||||
if (nodeDb[source].parentId !== nodeDb[target].parentId) {
|
||||
const ancestorId = findCommonAncestor(source, target, parentLookupDb);
|
||||
// an edge that breaks a subgraph has been identified, set configuration accordingly
|
||||
setIncludeChildrenPolicy(source, ancestorId);
|
||||
setIncludeChildrenPolicy(target, ancestorId);
|
||||
}
|
||||
});
|
||||
|
||||
log.trace('before layout', JSON.stringify(elkGraph, null, 2));
|
||||
const g = await elk.layout(elkGraph);
|
||||
log.info('after layout', JSON.stringify(g));
|
||||
|
||||
// debugger;
|
||||
drawNodes(0, 0, g.children, svg, subGraphsEl, 0);
|
||||
g.edges?.map((edge) => {
|
||||
// (elem, edge, clusterDb, diagramType, graph, id)
|
||||
edge.start = nodeDb[edge.sources[0]];
|
||||
edge.end = nodeDb[edge.targets[0]];
|
||||
const sourceId = edge.start.id;
|
||||
const targetId = edge.end.id;
|
||||
|
||||
const offset = calcOffset(sourceId, targetId, parentLookupDb);
|
||||
|
||||
if (edge.sections) {
|
||||
const src = edge.sections[0].startPoint;
|
||||
const dest = edge.sections[0].endPoint;
|
||||
const segments = edge.sections[0].bendPoints ? edge.sections[0].bendPoints : [];
|
||||
|
||||
const segPoints = segments.map((segment) => {
|
||||
return { x: segment.x + offset.x, y: segment.y + offset.y };
|
||||
});
|
||||
edge.points = [
|
||||
{ x: src.x + offset.x, y: src.y + offset.y },
|
||||
...segPoints,
|
||||
{ x: dest.x + offset.x, y: dest.y + offset.y },
|
||||
];
|
||||
const paths = insertEdge(
|
||||
edgesEl,
|
||||
edge,
|
||||
clusterDb,
|
||||
data4Layout.type,
|
||||
g,
|
||||
data4Layout.diagramId
|
||||
);
|
||||
|
||||
edge.x = edge.labels[0].x + offset.x + edge.labels[0].width / 2;
|
||||
edge.y = edge.labels[0].y + offset.y + edge.labels[0].height / 2;
|
||||
positionEdgeLabel(edge, paths);
|
||||
}
|
||||
});
|
||||
};
|
||||
10
packages/mermaid-layout-elk/tsconfig.json
Normal file
10
packages/mermaid-layout-elk/tsconfig.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"rootDir": "./src",
|
||||
"outDir": "./dist",
|
||||
"types": ["vitest/importMeta", "vitest/globals"]
|
||||
},
|
||||
"include": ["./src/**/*.ts"],
|
||||
"typeRoots": ["./src/types"]
|
||||
}
|
||||
@@ -77,11 +77,11 @@
|
||||
"dagre-d3-es": "7.0.10",
|
||||
"dayjs": "^1.11.10",
|
||||
"dompurify": "^3.0.11",
|
||||
"elkjs": "^0.9.2",
|
||||
"katex": "^0.16.9",
|
||||
"khroma": "^2.1.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"mdast-util-from-markdown": "^2.0.0",
|
||||
"roughjs": "^4.6.6",
|
||||
"stylis": "^4.3.1",
|
||||
"ts-dedent": "^2.2.0",
|
||||
"uuid": "^9.0.1"
|
||||
|
||||
@@ -10,18 +10,16 @@
|
||||
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import _Ajv2019, { type JSONSchemaType } from 'ajv/dist/2019.js';
|
||||
import { JSON_SCHEMA, load } from 'js-yaml';
|
||||
import { compile, type JSONSchema } from 'json-schema-to-typescript';
|
||||
import assert from 'node:assert';
|
||||
import { execFile } from 'node:child_process';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import { promisify } from 'node:util';
|
||||
|
||||
import { load, JSON_SCHEMA } from 'js-yaml';
|
||||
import { compile, type JSONSchema } from 'json-schema-to-typescript';
|
||||
import prettier from 'prettier';
|
||||
|
||||
import _Ajv2019, { type JSONSchemaType } from 'ajv/dist/2019.js';
|
||||
|
||||
// Workaround for wrong AJV types, see
|
||||
// https://github.com/ajv-validator/ajv/issues/2132#issuecomment-1290409907
|
||||
const Ajv2019 = _Ajv2019 as unknown as typeof _Ajv2019.default;
|
||||
@@ -34,28 +32,6 @@ const verifyOnly = process.argv.includes('--verify');
|
||||
/** If `true`, automatically `git add` any changes (i.e. during `pnpm run pre-commit`)*/
|
||||
const git = process.argv.includes('--git');
|
||||
|
||||
/**
|
||||
* All of the keys in the mermaid config that have a mermaid diagram config.
|
||||
*/
|
||||
const MERMAID_CONFIG_DIAGRAM_KEYS = [
|
||||
'flowchart',
|
||||
'sequence',
|
||||
'gantt',
|
||||
'journey',
|
||||
'class',
|
||||
'state',
|
||||
'er',
|
||||
'pie',
|
||||
'quadrantChart',
|
||||
'xyChart',
|
||||
'requirement',
|
||||
'mindmap',
|
||||
'timeline',
|
||||
'gitGraph',
|
||||
'c4',
|
||||
'sankey',
|
||||
];
|
||||
|
||||
/**
|
||||
* Loads the MermaidConfig JSON schema YAML file.
|
||||
*
|
||||
@@ -148,53 +124,9 @@ async function generateTypescript(mermaidConfigSchema: JSONSchemaType<MermaidCon
|
||||
return { ...schema, required: [] };
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a temporary hack to control the order the types are generated in.
|
||||
*
|
||||
* By default, json-schema-to-typescript outputs the $defs in the order they
|
||||
* are used, then any unused schemas at the end.
|
||||
*
|
||||
* **The only purpose of this function is to make the `git diff` simpler**
|
||||
* **We should remove this later to simplify the code**
|
||||
*
|
||||
* @todo TODO: Remove this function in a future PR.
|
||||
* @param schema - The input schema.
|
||||
* @returns The schema with all `$ref`s removed.
|
||||
*/
|
||||
function unrefSubschemas(schema: JSONSchemaType<Record<string, any>>) {
|
||||
return {
|
||||
...schema,
|
||||
properties: Object.fromEntries(
|
||||
Object.entries(schema.properties).map(([key, propertySchema]) => {
|
||||
if (MERMAID_CONFIG_DIAGRAM_KEYS.includes(key)) {
|
||||
const { $ref, ...propertySchemaWithoutRef } = propertySchema as JSONSchemaType<unknown>;
|
||||
if ($ref === undefined) {
|
||||
throw Error(
|
||||
`subSchema ${key} is in MERMAID_CONFIG_DIAGRAM_KEYS but does not have a $ref field`
|
||||
);
|
||||
}
|
||||
const [
|
||||
_root, // eslint-disable-line @typescript-eslint/no-unused-vars
|
||||
_defs, // eslint-disable-line @typescript-eslint/no-unused-vars
|
||||
defName,
|
||||
] = $ref.split('/');
|
||||
return [
|
||||
key,
|
||||
{
|
||||
...propertySchemaWithoutRef,
|
||||
tsType: defName,
|
||||
},
|
||||
];
|
||||
}
|
||||
return [key, propertySchema];
|
||||
})
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
assert.ok(mermaidConfigSchema.$defs);
|
||||
const modifiedSchema = {
|
||||
...unrefSubschemas(removeRequired(mermaidConfigSchema)),
|
||||
...removeRequired(mermaidConfigSchema),
|
||||
|
||||
$defs: Object.fromEntries(
|
||||
Object.entries(mermaidConfigSchema.$defs).map(([key, subSchema]) => {
|
||||
|
||||
@@ -190,7 +190,10 @@ export const addDirective = (directive: MermaidConfig) => {
|
||||
|
||||
// If the directive has a fontFamily, but no themeVariables, add the fontFamily to the themeVariables
|
||||
if (directive.fontFamily && (!directive.themeVariables || !directive.themeVariables.fontFamily)) {
|
||||
directive.themeVariables = { fontFamily: directive.fontFamily };
|
||||
directive.themeVariables = {
|
||||
...directive.themeVariables,
|
||||
fontFamily: directive.fontFamily,
|
||||
};
|
||||
}
|
||||
|
||||
directives.push(directive);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,12 @@
|
||||
import { select } from 'd3';
|
||||
import { log } from '../logger.js';
|
||||
import { labelHelper, updateNodeBounds, insertPolygonShape } from './shapes/util.js';
|
||||
import { getConfig } from '../diagram-api/diagramAPI.js';
|
||||
import intersect from './intersect/index.js';
|
||||
import createLabel from './createLabel.js';
|
||||
import note from './shapes/note.js';
|
||||
import { evaluate } from '../diagrams/common/common.js';
|
||||
import { log } from '../logger.js';
|
||||
import { getArrowPoints } from './blockArrowHelper.js';
|
||||
import createLabel from './createLabel.js';
|
||||
import intersect from './intersect/index.js';
|
||||
import note from './shapes/note.js';
|
||||
import { insertPolygonShape, labelHelper, updateNodeBounds } from './shapes/util.js';
|
||||
|
||||
const formatClass = (str) => {
|
||||
if (str) {
|
||||
@@ -395,6 +395,7 @@ const rect = async (parent, node) => {
|
||||
// add the rect
|
||||
const rect = shapeSvg.insert('rect', ':first-child');
|
||||
|
||||
// console.log('Rect node:', node, 'bbox:', bbox, 'halfPadding:', halfPadding, 'node.padding:', node.padding);
|
||||
// const totalWidth = bbox.width + node.padding * 2;
|
||||
// const totalHeight = bbox.height + node.padding * 2;
|
||||
const totalWidth = node.positioned ? node.width : bbox.width + node.padding;
|
||||
|
||||
@@ -15,6 +15,8 @@ export const labelHelper = async (parent, node, _classes, isNode) => {
|
||||
classes = _classes;
|
||||
}
|
||||
|
||||
// console.log('parentY', parent.node());
|
||||
|
||||
// Add outer g element
|
||||
const shapeSvg = parent
|
||||
.insert('g')
|
||||
@@ -33,6 +35,7 @@ export const labelHelper = async (parent, node, _classes, isNode) => {
|
||||
}
|
||||
|
||||
const textNode = label.node();
|
||||
// console.log('parentX', parent, 'node',node,'labelText',labelText, textNode, node.labelType, 'label', label.node());
|
||||
let text;
|
||||
if (node.labelType === 'markdown') {
|
||||
// text = textNode;
|
||||
|
||||
@@ -70,7 +70,7 @@ export interface DiagramRenderer {
|
||||
getClasses?: (
|
||||
text: string,
|
||||
diagram: Pick<DiagramDefinition, 'db'>
|
||||
) => Record<string, DiagramStyleClassDef>;
|
||||
) => Map<string, DiagramStyleClassDef>;
|
||||
}
|
||||
|
||||
export interface DiagramDefinition {
|
||||
|
||||
@@ -1,21 +1,26 @@
|
||||
import clone from 'lodash-es/clone.js';
|
||||
import * as configApi from '../../config.js';
|
||||
import { getConfig } from '../../diagram-api/diagramAPI.js';
|
||||
import type { DiagramDB } from '../../diagram-api/types.js';
|
||||
import { log } from '../../logger.js';
|
||||
import common from '../common/common.js';
|
||||
import { clear as commonClear } from '../common/commonDb.js';
|
||||
import type { Block, ClassDef } from './blockTypes.js';
|
||||
|
||||
// Initialize the node database for simple lookups
|
||||
let blockDatabase: Record<string, Block> = {};
|
||||
let blockDatabase: Map<string, Block> = new Map();
|
||||
let edgeList: Block[] = [];
|
||||
let edgeCount: Record<string, number> = {};
|
||||
let edgeCount: Map<string, number> = new Map();
|
||||
|
||||
const COLOR_KEYWORD = 'color';
|
||||
const FILL_KEYWORD = 'fill';
|
||||
const BG_FILL = 'bgFill';
|
||||
const STYLECLASS_SEP = ',';
|
||||
const config = getConfig();
|
||||
|
||||
let classes = {} as Record<string, ClassDef>;
|
||||
let classes: Map<string, ClassDef> = new Map();
|
||||
|
||||
const sanitizeText = (txt: string) => common.sanitizeText(txt, config);
|
||||
|
||||
/**
|
||||
* Called when the parser comes across a (style) class definition
|
||||
@@ -26,10 +31,11 @@ let classes = {} as Record<string, ClassDef>;
|
||||
*/
|
||||
export const addStyleClass = function (id: string, styleAttributes = '') {
|
||||
// create a new style class object with this id
|
||||
if (classes[id] === undefined) {
|
||||
classes[id] = { id: id, styles: [], textStyles: [] }; // This is a classDef
|
||||
let foundClass = classes.get(id);
|
||||
if (!foundClass) {
|
||||
foundClass = { id: id, styles: [], textStyles: [] };
|
||||
classes.set(id, foundClass); // This is a classDef
|
||||
}
|
||||
const foundClass = classes[id];
|
||||
if (styleAttributes !== undefined && styleAttributes !== null) {
|
||||
styleAttributes.split(STYLECLASS_SEP).forEach((attrib) => {
|
||||
// remove any trailing ;
|
||||
@@ -54,7 +60,7 @@ export const addStyleClass = function (id: string, styleAttributes = '') {
|
||||
* @param styles - the string with 1 or more style attributes (each separated by a comma)
|
||||
*/
|
||||
export const addStyle2Node = function (id: string, styles = '') {
|
||||
const foundBlock = blockDatabase[id];
|
||||
const foundBlock = blockDatabase.get(id)!;
|
||||
if (styles !== undefined && styles !== null) {
|
||||
foundBlock.styles = styles.split(STYLECLASS_SEP);
|
||||
}
|
||||
@@ -70,11 +76,11 @@ export const addStyle2Node = function (id: string, styles = '') {
|
||||
*/
|
||||
export const setCssClass = function (itemIds: string, cssClassName: string) {
|
||||
itemIds.split(',').forEach(function (id: string) {
|
||||
let foundBlock = blockDatabase[id];
|
||||
let foundBlock = blockDatabase.get(id);
|
||||
if (foundBlock === undefined) {
|
||||
const trimmedId = id.trim();
|
||||
blockDatabase[trimmedId] = { id: trimmedId, type: 'na', children: [] } as Block;
|
||||
foundBlock = blockDatabase[trimmedId];
|
||||
foundBlock = { id: trimmedId, type: 'na', children: [] } as Block;
|
||||
blockDatabase.set(trimmedId, foundBlock);
|
||||
}
|
||||
if (!foundBlock.classes) {
|
||||
foundBlock.classes = [];
|
||||
@@ -87,6 +93,9 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
const blockList = _blockList.flat();
|
||||
const children = [];
|
||||
for (const block of blockList) {
|
||||
if (block.label) {
|
||||
block.label = sanitizeText(block.label);
|
||||
}
|
||||
if (block.type === 'classDef') {
|
||||
addStyleClass(block.id, block.css);
|
||||
continue;
|
||||
@@ -104,12 +113,9 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
if (block.type === 'column-setting') {
|
||||
parent.columns = block.columns || -1;
|
||||
} else if (block.type === 'edge') {
|
||||
if (edgeCount[block.id]) {
|
||||
edgeCount[block.id]++;
|
||||
} else {
|
||||
edgeCount[block.id] = 1;
|
||||
}
|
||||
block.id = edgeCount[block.id] + '-' + block.id;
|
||||
const count = (edgeCount.get(block.id) ?? 0) + 1;
|
||||
edgeCount.set(block.id, count);
|
||||
block.id = count + '-' + block.id;
|
||||
edgeList.push(block);
|
||||
} else {
|
||||
if (!block.label) {
|
||||
@@ -120,16 +126,17 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
block.label = block.id;
|
||||
}
|
||||
}
|
||||
const newBlock = !blockDatabase[block.id];
|
||||
if (newBlock) {
|
||||
blockDatabase[block.id] = block;
|
||||
const existingBlock = blockDatabase.get(block.id);
|
||||
|
||||
if (existingBlock === undefined) {
|
||||
blockDatabase.set(block.id, block);
|
||||
} else {
|
||||
// Add newer relevant data to aggregated node
|
||||
if (block.type !== 'na') {
|
||||
blockDatabase[block.id].type = block.type;
|
||||
existingBlock.type = block.type;
|
||||
}
|
||||
if (block.label !== block.id) {
|
||||
blockDatabase[block.id].label = block.label;
|
||||
existingBlock.label = block.label;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,10 +149,10 @@ const populateBlockDatabase = (_blockList: Block[] | Block[][], parent: Block):
|
||||
for (let j = 0; j < w; j++) {
|
||||
const newBlock = clone(block);
|
||||
newBlock.id = newBlock.id + '-' + j;
|
||||
blockDatabase[newBlock.id] = newBlock;
|
||||
blockDatabase.set(newBlock.id, newBlock);
|
||||
children.push(newBlock);
|
||||
}
|
||||
} else if (newBlock) {
|
||||
} else if (existingBlock === undefined) {
|
||||
children.push(block);
|
||||
}
|
||||
}
|
||||
@@ -160,12 +167,12 @@ const clear = (): void => {
|
||||
log.debug('Clear called');
|
||||
commonClear();
|
||||
rootBlock = { id: 'root', type: 'composite', children: [], columns: -1 } as Block;
|
||||
blockDatabase = { root: rootBlock };
|
||||
blockDatabase = new Map([['root', rootBlock]]);
|
||||
blocks = [] as Block[];
|
||||
classes = {} as Record<string, ClassDef>;
|
||||
classes = new Map();
|
||||
|
||||
edgeList = [];
|
||||
edgeCount = {};
|
||||
edgeCount = new Map();
|
||||
};
|
||||
|
||||
export function typeStr2Type(typeStr: string) {
|
||||
@@ -241,7 +248,7 @@ const setHierarchy = (block: Block[]): void => {
|
||||
};
|
||||
|
||||
const getColumns = (blockId: string): number => {
|
||||
const block = blockDatabase[blockId];
|
||||
const block = blockDatabase.get(blockId);
|
||||
if (!block) {
|
||||
return -1;
|
||||
}
|
||||
@@ -259,7 +266,7 @@ const getColumns = (blockId: string): number => {
|
||||
* @returns
|
||||
*/
|
||||
const getBlocksFlat = () => {
|
||||
return [...Object.values(blockDatabase)];
|
||||
return [...blockDatabase.values()];
|
||||
};
|
||||
/**
|
||||
* Returns the hierarchy of blocks
|
||||
@@ -273,11 +280,11 @@ const getEdges = () => {
|
||||
return edgeList;
|
||||
};
|
||||
const getBlock = (id: string) => {
|
||||
return blockDatabase[id];
|
||||
return blockDatabase.get(id);
|
||||
};
|
||||
|
||||
const setBlock = (block: Block) => {
|
||||
blockDatabase[block.id] = block;
|
||||
blockDatabase.set(block.id, block);
|
||||
};
|
||||
|
||||
const getLogger = () => console;
|
||||
|
||||
@@ -388,7 +388,7 @@ describe('Block diagram', function () {
|
||||
const mc = blocks[0];
|
||||
expect(mc.classes).toContain('black');
|
||||
const classes = db.getClasses();
|
||||
const black = classes.black;
|
||||
const black = classes.get('black')!;
|
||||
expect(black.id).toBe('black');
|
||||
expect(black.styles[0]).toEqual('color:#ffffff');
|
||||
});
|
||||
@@ -406,4 +406,21 @@ columns 1
|
||||
expect(B.styles).toContain('fill:#f9F');
|
||||
});
|
||||
});
|
||||
|
||||
describe('prototype properties', function () {
|
||||
function validateProperty(prop: string) {
|
||||
expect(() => block.parse(`block-beta\n${prop}`)).not.toThrow();
|
||||
expect(() =>
|
||||
block.parse(`block-beta\nA; classDef ${prop} color:#ffffff,fill:#000000; class A ${prop}`)
|
||||
).not.toThrow();
|
||||
}
|
||||
|
||||
it('should work with a __proto__ property', function () {
|
||||
validateProperty('__proto__');
|
||||
});
|
||||
|
||||
it('should work with a constructor property', function () {
|
||||
validateProperty('constructor');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -26,10 +26,10 @@ import type {
|
||||
const MERMAID_DOM_ID_PREFIX = 'classId-';
|
||||
|
||||
let relations: ClassRelation[] = [];
|
||||
let classes: ClassMap = {};
|
||||
let classes: Map<string, ClassNode> = new Map();
|
||||
let notes: ClassNote[] = [];
|
||||
let classCounter = 0;
|
||||
let namespaces: NamespaceMap = {};
|
||||
let namespaces: Map<string, NamespaceNode> = new Map();
|
||||
let namespaceCounter = 0;
|
||||
|
||||
let functions: any[] = [];
|
||||
@@ -57,7 +57,7 @@ export const setClassLabel = function (_id: string, label: string) {
|
||||
}
|
||||
|
||||
const { className } = splitClassNameAndType(id);
|
||||
classes[className].label = label;
|
||||
classes.get(className)!.label = label;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -70,13 +70,13 @@ export const addClass = function (_id: string) {
|
||||
const id = common.sanitizeText(_id, getConfig());
|
||||
const { className, type } = splitClassNameAndType(id);
|
||||
// Only add class if not exists
|
||||
if (Object.hasOwn(classes, className)) {
|
||||
if (classes.has(className)) {
|
||||
return;
|
||||
}
|
||||
// alert('Adding class: ' + className);
|
||||
const name = common.sanitizeText(className, getConfig());
|
||||
// alert('Adding class after: ' + name);
|
||||
classes[name] = {
|
||||
classes.set(name, {
|
||||
id: name,
|
||||
type: type,
|
||||
label: name,
|
||||
@@ -86,7 +86,7 @@ export const addClass = function (_id: string) {
|
||||
annotations: [],
|
||||
styles: [],
|
||||
domId: MERMAID_DOM_ID_PREFIX + name + '-' + classCounter,
|
||||
} as ClassNode;
|
||||
} as ClassNode);
|
||||
|
||||
classCounter++;
|
||||
};
|
||||
@@ -99,25 +99,25 @@ export const addClass = function (_id: string) {
|
||||
*/
|
||||
export const lookUpDomId = function (_id: string): string {
|
||||
const id = common.sanitizeText(_id, getConfig());
|
||||
if (id in classes) {
|
||||
return classes[id].domId;
|
||||
if (classes.has(id)) {
|
||||
return classes.get(id)!.domId;
|
||||
}
|
||||
throw new Error('Class not found: ' + id);
|
||||
};
|
||||
|
||||
export const clear = function () {
|
||||
relations = [];
|
||||
classes = {};
|
||||
classes = new Map();
|
||||
notes = [];
|
||||
functions = [];
|
||||
functions.push(setupToolTips);
|
||||
namespaces = {};
|
||||
namespaces = new Map();
|
||||
namespaceCounter = 0;
|
||||
commonClear();
|
||||
};
|
||||
|
||||
export const getClass = function (id: string): ClassNode {
|
||||
return classes[id];
|
||||
return classes.get(id)!;
|
||||
};
|
||||
|
||||
export const getClasses = function (): ClassMap {
|
||||
@@ -157,7 +157,7 @@ export const addRelation = function (relation: ClassRelation) {
|
||||
*/
|
||||
export const addAnnotation = function (className: string, annotation: string) {
|
||||
const validatedClassName = splitClassNameAndType(className).className;
|
||||
classes[validatedClassName].annotations.push(annotation);
|
||||
classes.get(validatedClassName)!.annotations.push(annotation);
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -173,7 +173,7 @@ export const addMember = function (className: string, member: string) {
|
||||
addClass(className);
|
||||
|
||||
const validatedClassName = splitClassNameAndType(className).className;
|
||||
const theClass = classes[validatedClassName];
|
||||
const theClass = classes.get(validatedClassName)!;
|
||||
|
||||
if (typeof member === 'string') {
|
||||
// Member can contain white spaces, we trim them out
|
||||
@@ -226,8 +226,9 @@ export const setCssClass = function (ids: string, className: string) {
|
||||
if (_id[0].match(/\d/)) {
|
||||
id = MERMAID_DOM_ID_PREFIX + id;
|
||||
}
|
||||
if (classes[id] !== undefined) {
|
||||
classes[id].cssClasses.push(className);
|
||||
const classNode = classes.get(id);
|
||||
if (classNode) {
|
||||
classNode.cssClasses.push(className);
|
||||
}
|
||||
});
|
||||
};
|
||||
@@ -241,17 +242,17 @@ export const setCssClass = function (ids: string, className: string) {
|
||||
const setTooltip = function (ids: string, tooltip?: string) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (tooltip !== undefined) {
|
||||
classes[id].tooltip = sanitizeText(tooltip);
|
||||
classes.get(id)!.tooltip = sanitizeText(tooltip);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const getTooltip = function (id: string, namespace?: string) {
|
||||
if (namespace) {
|
||||
return namespaces[namespace].classes[id].tooltip;
|
||||
if (namespace && namespaces.has(namespace)) {
|
||||
return namespaces.get(namespace)!.classes.get(id)!.tooltip;
|
||||
}
|
||||
|
||||
return classes[id].tooltip;
|
||||
return classes.get(id)!.tooltip;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -268,14 +269,15 @@ export const setLink = function (ids: string, linkStr: string, target: string) {
|
||||
if (_id[0].match(/\d/)) {
|
||||
id = MERMAID_DOM_ID_PREFIX + id;
|
||||
}
|
||||
if (classes[id] !== undefined) {
|
||||
classes[id].link = utils.formatUrl(linkStr, config);
|
||||
const theClass = classes.get(id);
|
||||
if (theClass) {
|
||||
theClass.link = utils.formatUrl(linkStr, config);
|
||||
if (config.securityLevel === 'sandbox') {
|
||||
classes[id].linkTarget = '_top';
|
||||
theClass.linkTarget = '_top';
|
||||
} else if (typeof target === 'string') {
|
||||
classes[id].linkTarget = sanitizeText(target);
|
||||
theClass.linkTarget = sanitizeText(target);
|
||||
} else {
|
||||
classes[id].linkTarget = '_blank';
|
||||
theClass.linkTarget = '_blank';
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -292,7 +294,7 @@ export const setLink = function (ids: string, linkStr: string, target: string) {
|
||||
export const setClickEvent = function (ids: string, functionName: string, functionArgs: string) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
setClickFunc(id, functionName, functionArgs);
|
||||
classes[id].haveCallback = true;
|
||||
classes.get(id)!.haveCallback = true;
|
||||
});
|
||||
setCssClass(ids, 'clickable');
|
||||
};
|
||||
@@ -308,7 +310,7 @@ const setClickFunc = function (_domId: string, functionName: string, functionArg
|
||||
}
|
||||
|
||||
const id = domId;
|
||||
if (classes[id] !== undefined) {
|
||||
if (classes.has(id)) {
|
||||
const elemId = lookUpDomId(id);
|
||||
let argList: string[] = [];
|
||||
if (typeof functionArgs === 'string') {
|
||||
@@ -417,22 +419,22 @@ const setDirection = (dir: string) => {
|
||||
* @public
|
||||
*/
|
||||
export const addNamespace = function (id: string) {
|
||||
if (namespaces[id] !== undefined) {
|
||||
if (namespaces.has(id)) {
|
||||
return;
|
||||
}
|
||||
|
||||
namespaces[id] = {
|
||||
namespaces.set(id, {
|
||||
id: id,
|
||||
classes: {},
|
||||
classes: new Map(),
|
||||
children: {},
|
||||
domId: MERMAID_DOM_ID_PREFIX + id + '-' + namespaceCounter,
|
||||
} as NamespaceNode;
|
||||
} as NamespaceNode);
|
||||
|
||||
namespaceCounter++;
|
||||
};
|
||||
|
||||
const getNamespace = function (name: string): NamespaceNode {
|
||||
return namespaces[name];
|
||||
return namespaces.get(name)!;
|
||||
};
|
||||
|
||||
const getNamespaces = function (): NamespaceMap {
|
||||
@@ -447,18 +449,18 @@ const getNamespaces = function (): NamespaceMap {
|
||||
* @public
|
||||
*/
|
||||
export const addClassesToNamespace = function (id: string, classNames: string[]) {
|
||||
if (namespaces[id] === undefined) {
|
||||
if (!namespaces.has(id)) {
|
||||
return;
|
||||
}
|
||||
for (const name of classNames) {
|
||||
const { className } = splitClassNameAndType(name);
|
||||
classes[className].parent = id;
|
||||
namespaces[id].classes[className] = classes[className];
|
||||
classes.get(className)!.parent = id;
|
||||
namespaces.get(id)!.classes.set(className, classes.get(className)!);
|
||||
}
|
||||
};
|
||||
|
||||
export const setCssStyle = function (id: string, styles: string[]) {
|
||||
const thisClass = classes[id];
|
||||
const thisClass = classes.get(id);
|
||||
if (!styles || !thisClass) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { parser } from './parser/classDiagram.jison';
|
||||
import classDb from './classDb.js';
|
||||
import { vi, describe, it, expect } from 'vitest';
|
||||
import type { ClassMap, NamespaceNode } from './classTypes.js';
|
||||
const spyOn = vi.spyOn;
|
||||
|
||||
const staticCssStyle = 'text-decoration:underline;';
|
||||
@@ -392,8 +393,8 @@ class C13["With Città foreign language"]
|
||||
Student "1" --o "1" IdCard : carries
|
||||
Student "1" --o "1" Bike : rides`);
|
||||
|
||||
expect(Object.keys(classDb.getClasses()).length).toBe(3);
|
||||
expect(classDb.getClasses().Student).toMatchInlineSnapshot(`
|
||||
expect(classDb.getClasses().size).toBe(3);
|
||||
expect(classDb.getClasses().get('Student')).toMatchInlineSnapshot(`
|
||||
{
|
||||
"annotations": [],
|
||||
"cssClasses": [],
|
||||
@@ -1539,12 +1540,12 @@ class Class2
|
||||
}`;
|
||||
parser.parse(str);
|
||||
|
||||
const testNamespace = parser.yy.getNamespace('Namespace1');
|
||||
const testClasses = parser.yy.getClasses();
|
||||
expect(Object.keys(testNamespace.classes).length).toBe(2);
|
||||
const testNamespace: NamespaceNode = parser.yy.getNamespace('Namespace1');
|
||||
const testClasses: ClassMap = parser.yy.getClasses();
|
||||
expect(testNamespace.classes.size).toBe(2);
|
||||
expect(Object.keys(testNamespace.children).length).toBe(0);
|
||||
expect(testNamespace.classes['Class1'].id).toBe('Class1');
|
||||
expect(Object.keys(testClasses).length).toBe(2);
|
||||
expect(testNamespace.classes.get('Class1')?.id).toBe('Class1');
|
||||
expect(testClasses.size).toBe(2);
|
||||
});
|
||||
|
||||
it('should add relations between classes of different namespaces', function () {
|
||||
@@ -1573,25 +1574,25 @@ class Class2
|
||||
const testNamespaceB = parser.yy.getNamespace('B');
|
||||
const testClasses = parser.yy.getClasses();
|
||||
const testRelations = parser.yy.getRelations();
|
||||
expect(Object.keys(testNamespaceA.classes).length).toBe(2);
|
||||
expect(testNamespaceA.classes['A1'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceA.classes.size).toBe(2);
|
||||
expect(testNamespaceA.classes.get('A1').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+foo : string'
|
||||
);
|
||||
expect(testNamespaceA.classes['A2'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceA.classes.get('A2').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+bar : int'
|
||||
);
|
||||
expect(Object.keys(testNamespaceB.classes).length).toBe(2);
|
||||
expect(testNamespaceB.classes['B1'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceB.classes.size).toBe(2);
|
||||
expect(testNamespaceB.classes.get('B1').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+foo : bool'
|
||||
);
|
||||
expect(testNamespaceB.classes['B2'].members[0].getDisplayDetails().displayText).toBe(
|
||||
expect(testNamespaceB.classes.get('B2').members[0].getDisplayDetails().displayText).toBe(
|
||||
'+bar : float'
|
||||
);
|
||||
expect(Object.keys(testClasses).length).toBe(4);
|
||||
expect(testClasses['A1'].parent).toBe('A');
|
||||
expect(testClasses['A2'].parent).toBe('A');
|
||||
expect(testClasses['B1'].parent).toBe('B');
|
||||
expect(testClasses['B2'].parent).toBe('B');
|
||||
expect(testClasses.size).toBe(4);
|
||||
expect(testClasses.get('A1').parent).toBe('A');
|
||||
expect(testClasses.get('A2').parent).toBe('A');
|
||||
expect(testClasses.get('B1').parent).toBe('B');
|
||||
expect(testClasses.get('B2').parent).toBe('B');
|
||||
expect(testRelations[0].id1).toBe('A1');
|
||||
expect(testRelations[0].id2).toBe('B1');
|
||||
expect(testRelations[1].id1).toBe('A2');
|
||||
|
||||
@@ -44,14 +44,11 @@ export const addNamespaces = function (
|
||||
_id: string,
|
||||
diagObj: any
|
||||
) {
|
||||
const keys = Object.keys(namespaces);
|
||||
log.info('keys:', keys);
|
||||
log.info('keys:', [...namespaces.keys()]);
|
||||
log.info(namespaces);
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
keys.forEach(function (id) {
|
||||
const vertex = namespaces[id];
|
||||
|
||||
namespaces.forEach(function (vertex) {
|
||||
// parent node must be one of [rect, roundedWithTitle, noteGroup, divider]
|
||||
const shape = 'rect';
|
||||
|
||||
@@ -89,16 +86,13 @@ export const addClasses = function (
|
||||
diagObj: any,
|
||||
parent?: string
|
||||
) {
|
||||
const keys = Object.keys(classes);
|
||||
log.info('keys:', keys);
|
||||
log.info('keys:', [...classes.keys()]);
|
||||
log.info(classes);
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
keys
|
||||
.filter((id) => classes[id].parent == parent)
|
||||
.forEach(function (id) {
|
||||
const vertex = classes[id];
|
||||
|
||||
[...classes.values()]
|
||||
.filter((vertex) => vertex.parent === parent)
|
||||
.forEach(function (vertex) {
|
||||
/**
|
||||
* Variable for storing the classes for the vertex
|
||||
*/
|
||||
@@ -187,7 +181,7 @@ export const addNotes = function (
|
||||
g.setNode(vertex.id, node);
|
||||
log.info('setNode', node);
|
||||
|
||||
if (!vertex.class || !(vertex.class in classes)) {
|
||||
if (!vertex.class || !classes.has(vertex.class)) {
|
||||
return;
|
||||
}
|
||||
const edgeId = startEdgeId + i;
|
||||
@@ -349,7 +343,7 @@ export const draw = async function (text: string, id: string, _version: string,
|
||||
}
|
||||
const root =
|
||||
securityLevel === 'sandbox'
|
||||
? select(sandboxElement.nodes()[0].contentDocument.body)
|
||||
? select(sandboxElement!.nodes()[0]!.contentDocument.body)
|
||||
: select('body');
|
||||
const svg = root.select(`[id="${id}"]`);
|
||||
|
||||
@@ -369,7 +363,8 @@ export const draw = async function (text: string, id: string, _version: string,
|
||||
|
||||
// Add label rects for non html labels
|
||||
if (!conf?.htmlLabels) {
|
||||
const doc = securityLevel === 'sandbox' ? sandboxElement.nodes()[0].contentDocument : document;
|
||||
const doc =
|
||||
securityLevel === 'sandbox' ? sandboxElement!.nodes()[0]!.contentDocument : document;
|
||||
const labels = doc.querySelectorAll('[id="' + id + '"] .edgeLabel .label');
|
||||
for (const label of labels) {
|
||||
// Get dimensions of label
|
||||
|
||||
@@ -175,10 +175,10 @@ export const draw = function (text, id, _version, diagObj) {
|
||||
});
|
||||
|
||||
const classes = diagObj.db.getClasses();
|
||||
const keys = Object.keys(classes);
|
||||
const keys = [...classes.keys()];
|
||||
|
||||
for (const key of keys) {
|
||||
const classDef = classes[key];
|
||||
const classDef = classes.get(key);
|
||||
const node = svgDraw.drawClass(diagram, classDef, conf, diagObj);
|
||||
idCache[node.id] = node;
|
||||
|
||||
@@ -216,7 +216,7 @@ export const draw = function (text, id, _version, diagObj) {
|
||||
// metadata about the node. In this case we're going to add labels to each of
|
||||
// our nodes.
|
||||
g.setNode(node.id, node);
|
||||
if (note.class && note.class in classes) {
|
||||
if (note.class && classes.has(note.class)) {
|
||||
g.setEdge(
|
||||
note.id,
|
||||
getGraphId(note.class),
|
||||
|
||||
@@ -161,5 +161,5 @@ export interface NamespaceNode {
|
||||
children: NamespaceMap;
|
||||
}
|
||||
|
||||
export type ClassMap = Record<string, ClassNode>;
|
||||
export type NamespaceMap = Record<string, NamespaceNode>;
|
||||
export type ClassMap = Map<string, ClassNode>;
|
||||
export type NamespaceMap = Map<string, NamespaceNode>;
|
||||
|
||||
16
packages/mermaid/src/diagrams/class/parser/class.spec.js
Normal file
16
packages/mermaid/src/diagrams/class/parser/class.spec.js
Normal file
@@ -0,0 +1,16 @@
|
||||
import { parser } from './classDiagram.jison';
|
||||
import classDb from '../classDb.js';
|
||||
|
||||
describe('class diagram', function () {
|
||||
beforeEach(function () {
|
||||
parser.yy = classDb;
|
||||
parser.yy.clear();
|
||||
});
|
||||
|
||||
describe('prototype properties', function () {
|
||||
it.each(['__proto__', 'constructor'])('should work with a %s property', function (prop) {
|
||||
expect(() => parser.parse(`classDiagram\nclass ${prop}`)).not.toThrow();
|
||||
expect(() => parser.parse(`classDiagram\nnamespace ${prop} {\n\tclass A\n}`)).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
getDiagramTitle,
|
||||
} from '../common/commonDb.js';
|
||||
|
||||
let entities = {};
|
||||
let entities = new Map();
|
||||
let relationships = [];
|
||||
|
||||
const Cardinality = {
|
||||
@@ -28,15 +28,15 @@ const Identification = {
|
||||
};
|
||||
|
||||
const addEntity = function (name, alias = undefined) {
|
||||
if (entities[name] === undefined) {
|
||||
entities[name] = { attributes: [], alias: alias };
|
||||
if (!entities.has(name)) {
|
||||
entities.set(name, { attributes: [], alias: alias });
|
||||
log.info('Added new entity :', name);
|
||||
} else if (entities[name] && !entities[name].alias && alias) {
|
||||
entities[name].alias = alias;
|
||||
} else if (entities.has(name) && !entities.get(name).alias && alias) {
|
||||
entities.get(name).alias = alias;
|
||||
log.info(`Add alias '${alias}' to entity '${name}'`);
|
||||
}
|
||||
|
||||
return entities[name];
|
||||
return entities.get(name);
|
||||
};
|
||||
|
||||
const getEntities = () => entities;
|
||||
@@ -75,7 +75,7 @@ const addRelationship = function (entA, rolA, entB, rSpec) {
|
||||
const getRelationships = () => relationships;
|
||||
|
||||
const clear = function () {
|
||||
entities = {};
|
||||
entities = new Map();
|
||||
relationships = [];
|
||||
commonClear();
|
||||
};
|
||||
|
||||
@@ -296,12 +296,12 @@ const drawAttributes = (groupNode, entityTextNode, attributes) => {
|
||||
* Use D3 to construct the svg elements for the entities
|
||||
*
|
||||
* @param svgNode The svg node that contains the diagram
|
||||
* @param entities The entities to be drawn
|
||||
* @param {Map<string, object>} entities The entities to be drawn
|
||||
* @param graph The graph that contains the vertex and edge definitions post-layout
|
||||
* @returns {object} The first entity that was inserted
|
||||
*/
|
||||
const drawEntities = function (svgNode, entities, graph) {
|
||||
const keys = Object.keys(entities);
|
||||
const keys = [...entities.keys()];
|
||||
let firstOne;
|
||||
|
||||
keys.forEach(function (entityName) {
|
||||
@@ -326,12 +326,12 @@ const drawEntities = function (svgNode, entities, graph) {
|
||||
.style('text-anchor', 'middle')
|
||||
.style('font-family', getConfig().fontFamily)
|
||||
.style('font-size', conf.fontSize + 'px')
|
||||
.text(entities[entityName].alias ?? entityName);
|
||||
.text(entities.get(entityName).alias ?? entityName);
|
||||
|
||||
const { width: entityWidth, height: entityHeight } = drawAttributes(
|
||||
groupNode,
|
||||
textNode,
|
||||
entities[entityName].attributes
|
||||
entities.get(entityName).attributes
|
||||
);
|
||||
|
||||
// Draw the rectangle - insert it before the text so that the text is not obscured
|
||||
|
||||
@@ -17,7 +17,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
const line2 = 'MAINLAND';
|
||||
erDiagram.parser.parse(`erDiagram\n${line1}\n${line2}`);
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(erDb.getRelationships().length).toBe(0);
|
||||
});
|
||||
|
||||
@@ -27,7 +27,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n ${name}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
describe('has non A-Za-z0-9_- chars', function () {
|
||||
@@ -47,7 +47,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n ${name}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
@@ -55,21 +55,21 @@ describe('when parsing ER diagram it...', function () {
|
||||
const name = singleOccurrence;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
|
||||
it(`"${repeatedOccurrence}" repeated occurrence`, function () {
|
||||
const name = repeatedOccurrence;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
|
||||
it(`"${singleOccurrence}" ends with`, function () {
|
||||
const name = endsWith;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
|
||||
it(`"${cannontStartWith}" cannot start with the character`, function () {
|
||||
@@ -77,7 +77,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
@@ -88,7 +88,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
const name = 'a' + allCombined;
|
||||
erDiagram.parser.parse(`erDiagram\n "${name}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(true);
|
||||
expect(entities.has(name)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -96,14 +96,14 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "Blo%rf"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
it('cannot contain \\ because it could start and escape code', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "Blo\\rf"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(name)).toBe(false);
|
||||
expect(entities.has(name)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
@@ -114,7 +114,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(() => {
|
||||
erDiagram.parser.parse(`erDiagram\n "${badName}"\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(badName)).toBe(false);
|
||||
expect(entities.has(badName)).toBe(false);
|
||||
}).toThrow();
|
||||
});
|
||||
});
|
||||
@@ -124,14 +124,14 @@ describe('when parsing ER diagram it...', function () {
|
||||
const beyondEnglishName = 'DUCK-àáâäæãåā';
|
||||
erDiagram.parser.parse(`erDiagram\n${beyondEnglishName}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(beyondEnglishName)).toBe(true);
|
||||
expect(entities.has(beyondEnglishName)).toBe(true);
|
||||
});
|
||||
|
||||
it('can contain - _ without needing ""', function () {
|
||||
const hyphensUnderscore = 'DUCK-BILLED_PLATYPUS';
|
||||
erDiagram.parser.parse(`erDiagram\n${hyphensUnderscore}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(hyphensUnderscore)).toBe(true);
|
||||
expect(entities.has(hyphensUnderscore)).toBe(true);
|
||||
});
|
||||
|
||||
it('can have an alias', function () {
|
||||
@@ -139,8 +139,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
const alias = 'bar';
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}["${alias}"]\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(entity)).toBe(true);
|
||||
expect(entities[entity].alias).toBe(alias);
|
||||
expect(entities.has(entity)).toBe(true);
|
||||
expect(entities.get(entity).alias).toBe(alias);
|
||||
});
|
||||
|
||||
it('can have an alias even if the relationship is defined before class', function () {
|
||||
@@ -151,10 +151,10 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${firstEntity} ||--o| ${secondEntity} : rel\nclass ${firstEntity}["${alias}"]\n`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(firstEntity)).toBe(true);
|
||||
expect(entities.hasOwnProperty(secondEntity)).toBe(true);
|
||||
expect(entities[firstEntity].alias).toBe(alias);
|
||||
expect(entities[secondEntity].alias).toBeUndefined();
|
||||
expect(entities.has(firstEntity)).toBe(true);
|
||||
expect(entities.has(secondEntity)).toBe(true);
|
||||
expect(entities.get(firstEntity).alias).toBe(alias);
|
||||
expect(entities.get(secondEntity).alias).toBeUndefined();
|
||||
});
|
||||
|
||||
it('can have an alias even if the relationship is defined after class', function () {
|
||||
@@ -165,17 +165,17 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\nclass ${firstEntity}["${alias}"]\n${firstEntity} ||--o| ${secondEntity} : rel\n`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(firstEntity)).toBe(true);
|
||||
expect(entities.hasOwnProperty(secondEntity)).toBe(true);
|
||||
expect(entities[firstEntity].alias).toBe(alias);
|
||||
expect(entities[secondEntity].alias).toBeUndefined();
|
||||
expect(entities.has(firstEntity)).toBe(true);
|
||||
expect(entities.has(secondEntity)).toBe(true);
|
||||
expect(entities.get(firstEntity).alias).toBe(alias);
|
||||
expect(entities.get(secondEntity).alias).toBeUndefined();
|
||||
});
|
||||
|
||||
it('can start with an underscore', function () {
|
||||
const entity = '_foo';
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}\n`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty(entity)).toBe(true);
|
||||
expect(entities.has(entity)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -191,11 +191,11 @@ describe('when parsing ER diagram it...', function () {
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities[entity].attributes[0].attributeName).toBe('myBookTitle');
|
||||
expect(entities[entity].attributes[1].attributeName).toBe('MYBOOKSUBTITLE_1');
|
||||
expect(entities[entity].attributes[2].attributeName).toBe('author-ref[name](1)');
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
expect(entities.get(entity).attributes[0].attributeName).toBe('myBookTitle');
|
||||
expect(entities.get(entity).attributes[1].attributeName).toBe('MYBOOKSUBTITLE_1');
|
||||
expect(entities.get(entity).attributes[2].attributeName).toBe('author-ref[name](1)');
|
||||
});
|
||||
|
||||
it('should allow asterisk at the start of attribute name', function () {
|
||||
@@ -204,8 +204,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}{\n${attribute}}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow asterisks at the start of attribute declared with type and name', () => {
|
||||
@@ -214,8 +214,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should not allow leading numbers, dashes or brackets', function () {
|
||||
@@ -236,8 +236,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow an entity with a single attribute to be defined with a key', function () {
|
||||
@@ -246,8 +246,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow an entity with a single attribute to be defined with a comment', function () {
|
||||
@@ -256,9 +256,9 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities[entity].attributes[0].attributeComment).toBe('comment');
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
expect(entities.get(entity).attributes[0].attributeComment).toBe('comment');
|
||||
});
|
||||
|
||||
it('should allow an entity with a single attribute to be defined with a key and a comment', function () {
|
||||
@@ -267,8 +267,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute starting with fk, pk or uk and a comment', function () {
|
||||
@@ -282,7 +282,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1} \n\n${attribute2}\n${attribute3}\n${attribute4}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(4);
|
||||
expect(entities.get(entity).attributes.length).toBe(4);
|
||||
});
|
||||
|
||||
it('should allow an entity with attributes that have many constraints and comments', function () {
|
||||
@@ -297,14 +297,14 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n${attribute4}\n${attribute5}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes[0].attributeKeyTypeList).toEqual(['PK', 'FK']);
|
||||
expect(entities[entity].attributes[0].attributeComment).toBe('comment1');
|
||||
expect(entities[entity].attributes[1].attributeKeyTypeList).toEqual(['PK', 'UK', 'FK']);
|
||||
expect(entities[entity].attributes[2].attributeKeyTypeList).toEqual(['PK', 'UK']);
|
||||
expect(entities[entity].attributes[2].attributeComment).toBe('comment3');
|
||||
expect(entities[entity].attributes[3].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities[entity].attributes[4].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities[entity].attributes[4].attributeComment).toBe('comment5');
|
||||
expect(entities.get(entity).attributes[0].attributeKeyTypeList).toEqual(['PK', 'FK']);
|
||||
expect(entities.get(entity).attributes[0].attributeComment).toBe('comment1');
|
||||
expect(entities.get(entity).attributes[1].attributeKeyTypeList).toEqual(['PK', 'UK', 'FK']);
|
||||
expect(entities.get(entity).attributes[2].attributeKeyTypeList).toEqual(['PK', 'UK']);
|
||||
expect(entities.get(entity).attributes[2].attributeComment).toBe('comment3');
|
||||
expect(entities.get(entity).attributes[3].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities.get(entity).attributes[4].attributeKeyTypeList).toBeUndefined();
|
||||
expect(entities.get(entity).attributes[4].attributeComment).toBe('comment5');
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute that has a generic type', function () {
|
||||
@@ -317,8 +317,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute that is an array', function () {
|
||||
@@ -328,8 +328,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(2);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow an entity with attribute that is a limited length string', function () {
|
||||
@@ -339,10 +339,10 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(2);
|
||||
expect(entities[entity].attributes[0].attributeType).toBe('character(10)');
|
||||
expect(entities[entity].attributes[1].attributeType).toBe('varchar(5)');
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(2);
|
||||
expect(entities.get(entity).attributes[0].attributeType).toBe('character(10)');
|
||||
expect(entities.get(entity).attributes[1].attributeType).toBe('varchar(5)');
|
||||
});
|
||||
|
||||
it('should allow an entity with multiple attributes to be defined', function () {
|
||||
@@ -355,7 +355,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow attribute definitions to be split into multiple blocks', function () {
|
||||
@@ -368,7 +368,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
`erDiagram\n${entity} {\n${attribute1}\n}\n${entity} {\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
expect(entities.get(entity).attributes.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow an empty attribute block', function () {
|
||||
@@ -376,8 +376,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty('BOOK')).toBe(true);
|
||||
expect(entities[entity].attributes.length).toBe(0);
|
||||
expect(entities.has('BOOK')).toBe(true);
|
||||
expect(entities.get(entity).attributes.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should allow an attribute block to start immediately after the entity name', function () {
|
||||
@@ -385,8 +385,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}{}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty('BOOK')).toBe(true);
|
||||
expect(entities[entity].attributes.length).toBe(0);
|
||||
expect(entities.has('BOOK')).toBe(true);
|
||||
expect(entities.get(entity).attributes.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should allow an attribute block to be separated from the entity name by spaces', function () {
|
||||
@@ -394,8 +394,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities.hasOwnProperty('BOOK')).toBe(true);
|
||||
expect(entities[entity].attributes.length).toBe(0);
|
||||
expect(entities.has('BOOK')).toBe(true);
|
||||
expect(entities.get(entity).attributes.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should allow whitespace before and after attribute definitions', function () {
|
||||
@@ -404,8 +404,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n \n\n ${attribute}\n\n \n}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should allow no whitespace before and after attribute definitions', function () {
|
||||
@@ -414,8 +414,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity}{${attribute}}`);
|
||||
const entities = erDb.getEntities();
|
||||
expect(Object.keys(entities).length).toBe(1);
|
||||
expect(entities[entity].attributes.length).toBe(1);
|
||||
expect(entities.size).toBe(1);
|
||||
expect(entities.get(entity).attributes.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should associate two entities correctly', function () {
|
||||
@@ -423,8 +423,8 @@ describe('when parsing ER diagram it...', function () {
|
||||
const entities = erDb.getEntities();
|
||||
const relationships = erDb.getRelationships();
|
||||
|
||||
expect(entities.hasOwnProperty('CAR')).toBe(true);
|
||||
expect(entities.hasOwnProperty('DRIVER')).toBe(true);
|
||||
expect(entities.has('CAR')).toBe(true);
|
||||
expect(entities.has('DRIVER')).toBe(true);
|
||||
expect(relationships.length).toBe(1);
|
||||
expect(relationships[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(relationships[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@@ -437,7 +437,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse(`erDiagram\n${line1}\n${line2}`);
|
||||
const entities = erDb.getEntities();
|
||||
|
||||
expect(Object.keys(entities).length).toBe(3);
|
||||
expect(entities.size).toBe(3);
|
||||
});
|
||||
|
||||
it('should create the role specified', function () {
|
||||
@@ -451,7 +451,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
|
||||
it('should allow recursive relationships', function () {
|
||||
erDiagram.parser.parse('erDiagram\nNODE ||--o{ NODE : "leads to"');
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(1);
|
||||
expect(erDb.getEntities().size).toBe(1);
|
||||
});
|
||||
|
||||
describe('accessible title and description', () => {
|
||||
@@ -491,7 +491,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
const entities = erDb.getEntities();
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(entities).length).toBe(2);
|
||||
expect(entities.size).toBe(2);
|
||||
expect(rels.length).toBe(2);
|
||||
});
|
||||
|
||||
@@ -507,7 +507,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@@ -517,7 +517,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||..o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@@ -527,7 +527,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o..o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@@ -537,7 +537,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@@ -547,7 +547,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|--|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@@ -557,7 +557,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o--|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@@ -567,7 +567,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o..o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@@ -577,7 +577,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|..o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@@ -587,7 +587,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o..|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@@ -597,7 +597,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||..|| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@@ -607,7 +607,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||--o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@@ -617,7 +617,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA |o..o| B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@@ -627,7 +627,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o--o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@@ -637,7 +637,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|..|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@@ -647,7 +647,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }o--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@@ -657,7 +657,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA }|..o{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@@ -667,7 +667,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA one or zero to many B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
@@ -677,7 +677,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA one or many optionally to zero or one B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
@@ -687,7 +687,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA zero or more to zero or many B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@@ -697,7 +697,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA many(0) to many(1) B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@@ -707,7 +707,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA many optionally to one B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@@ -717,7 +717,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA only one optionally to 1+ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONE_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
@@ -727,7 +727,7 @@ describe('when parsing ER diagram it...', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA 0+ optionally to 1 B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
@@ -786,10 +786,21 @@ describe('when parsing ER diagram it...', function () {
|
||||
it('should represent parent-child relationship correctly', function () {
|
||||
erDiagram.parser.parse('erDiagram\nPROJECT u--o{ TEAM_MEMBER : "parent"');
|
||||
const rels = erDb.getRelationships();
|
||||
expect(Object.keys(erDb.getEntities()).length).toBe(2);
|
||||
expect(erDb.getEntities().size).toBe(2);
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.MD_PARENT);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prototype properties', function () {
|
||||
it.each(['__proto__', 'constructor', 'prototype'])(
|
||||
'should work with a %s property',
|
||||
function (prop) {
|
||||
expect(() =>
|
||||
erDiagram.parser.parse(`erDiagram\n${prop} ||--|{ ORDER : place`)
|
||||
).not.toThrow();
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -51,17 +51,17 @@ describe('flow db addClass', () => {
|
||||
flowDb.addClass('a,b', ['stroke-width: 8px']);
|
||||
const classes = flowDb.getClasses();
|
||||
|
||||
expect(classes.hasOwnProperty('a')).toBe(true);
|
||||
expect(classes.hasOwnProperty('b')).toBe(true);
|
||||
expect(classes['a']['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes['b']['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes.has('a')).toBe(true);
|
||||
expect(classes.has('b')).toBe(true);
|
||||
expect(classes.get('a')?.styles).toEqual(['stroke-width: 8px']);
|
||||
expect(classes.get('b')?.styles).toEqual(['stroke-width: 8px']);
|
||||
});
|
||||
|
||||
it('should detect single class', () => {
|
||||
flowDb.addClass('a', ['stroke-width: 8px']);
|
||||
const classes = flowDb.getClasses();
|
||||
|
||||
expect(classes.hasOwnProperty('a')).toBe(true);
|
||||
expect(classes['a']['styles']).toEqual(['stroke-width: 8px']);
|
||||
expect(classes.has('a')).toBe(true);
|
||||
expect(classes.get('a')?.styles).toEqual(['stroke-width: 8px']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { select } from 'd3';
|
||||
import utils from '../../utils.js';
|
||||
import { getConfig, defaultConfig } from '../../diagram-api/diagramAPI.js';
|
||||
import common from '../common/common.js';
|
||||
import type { LayoutData, LayoutMethod, Node, Edge } from '../../rendering-util/types.js';
|
||||
import { log } from '../../logger.js';
|
||||
import {
|
||||
setAccTitle,
|
||||
@@ -17,12 +18,12 @@ import type { FlowVertex, FlowClass, FlowSubGraph, FlowText, FlowEdge, FlowLink
|
||||
const MERMAID_DOM_ID_PREFIX = 'flowchart-';
|
||||
let vertexCounter = 0;
|
||||
let config = getConfig();
|
||||
let vertices: Record<string, FlowVertex> = {};
|
||||
let vertices: Map<string, FlowVertex> = new Map();
|
||||
let edges: FlowEdge[] & { defaultInterpolate?: string; defaultStyle?: string[] } = [];
|
||||
let classes: Record<string, FlowClass> = {};
|
||||
let classes: Map<string, FlowClass> = new Map();
|
||||
let subGraphs: FlowSubGraph[] = [];
|
||||
let subGraphLookup: Record<string, FlowSubGraph> = {};
|
||||
let tooltips: Record<string, string> = {};
|
||||
let subGraphLookup: Map<string, FlowSubGraph> = new Map();
|
||||
let tooltips: Map<string, string> = new Map();
|
||||
let subCount = 0;
|
||||
let firstGraphFlag = true;
|
||||
let direction: string;
|
||||
@@ -40,10 +41,9 @@ const sanitizeText = (txt: string) => common.sanitizeText(txt, config);
|
||||
* @param id - id of the node
|
||||
*/
|
||||
export const lookUpDomId = function (id: string) {
|
||||
const vertexKeys = Object.keys(vertices);
|
||||
for (const vertexKey of vertexKeys) {
|
||||
if (vertices[vertexKey].id === id) {
|
||||
return vertices[vertexKey].domId;
|
||||
for (const vertex of vertices.values()) {
|
||||
if (vertex.id === id) {
|
||||
return vertex.domId;
|
||||
}
|
||||
}
|
||||
return id;
|
||||
@@ -67,50 +67,53 @@ export const addVertex = function (
|
||||
}
|
||||
let txt;
|
||||
|
||||
if (vertices[id] === undefined) {
|
||||
vertices[id] = {
|
||||
let vertex = vertices.get(id);
|
||||
if (vertex === undefined) {
|
||||
vertex = {
|
||||
id,
|
||||
labelType: 'text',
|
||||
domId: MERMAID_DOM_ID_PREFIX + id + '-' + vertexCounter,
|
||||
styles: [],
|
||||
classes: [],
|
||||
};
|
||||
vertices.set(id, vertex);
|
||||
}
|
||||
vertexCounter++;
|
||||
|
||||
if (textObj !== undefined) {
|
||||
config = getConfig();
|
||||
txt = sanitizeText(textObj.text.trim());
|
||||
vertices[id].labelType = textObj.type;
|
||||
vertex.labelType = textObj.type;
|
||||
// strip quotes if string starts and ends with a quote
|
||||
if (txt[0] === '"' && txt[txt.length - 1] === '"') {
|
||||
txt = txt.substring(1, txt.length - 1);
|
||||
}
|
||||
vertices[id].text = txt;
|
||||
vertex.text = txt;
|
||||
} else {
|
||||
if (vertices[id].text === undefined) {
|
||||
vertices[id].text = id;
|
||||
if (vertex.text === undefined) {
|
||||
vertex.text = id;
|
||||
}
|
||||
}
|
||||
if (type !== undefined) {
|
||||
vertices[id].type = type;
|
||||
vertex.type = type;
|
||||
}
|
||||
if (style !== undefined && style !== null) {
|
||||
style.forEach(function (s) {
|
||||
vertices[id].styles.push(s);
|
||||
vertex.styles.push(s);
|
||||
});
|
||||
}
|
||||
if (classes !== undefined && classes !== null) {
|
||||
classes.forEach(function (s) {
|
||||
vertices[id].classes.push(s);
|
||||
vertex.classes.push(s);
|
||||
});
|
||||
}
|
||||
if (dir !== undefined) {
|
||||
vertices[id].dir = dir;
|
||||
vertex.dir = dir;
|
||||
}
|
||||
if (vertices[id].props === undefined) {
|
||||
vertices[id].props = props;
|
||||
if (vertex.props === undefined) {
|
||||
vertex.props = props;
|
||||
} else if (props !== undefined) {
|
||||
Object.assign(vertices[id].props, props);
|
||||
Object.assign(vertex.props, props);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -208,17 +211,19 @@ export const updateLink = function (positions: ('default' | number)[], style: st
|
||||
|
||||
export const addClass = function (ids: string, style: string[]) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (classes[id] === undefined) {
|
||||
classes[id] = { id, styles: [], textStyles: [] };
|
||||
let classNode = classes.get(id);
|
||||
if (classNode === undefined) {
|
||||
classNode = { id, styles: [], textStyles: [] };
|
||||
classes.set(id, classNode);
|
||||
}
|
||||
|
||||
if (style !== undefined && style !== null) {
|
||||
style.forEach(function (s) {
|
||||
if (s.match('color')) {
|
||||
const newStyle = s.replace('fill', 'bgFill').replace('color', 'fill');
|
||||
classes[id].textStyles.push(newStyle);
|
||||
classNode.textStyles.push(newStyle);
|
||||
}
|
||||
classes[id].styles.push(s);
|
||||
classNode.styles.push(s);
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -255,11 +260,13 @@ export const setDirection = function (dir: string) {
|
||||
*/
|
||||
export const setClass = function (ids: string, className: string) {
|
||||
for (const id of ids.split(',')) {
|
||||
if (vertices[id]) {
|
||||
vertices[id].classes.push(className);
|
||||
const vertex = vertices.get(id);
|
||||
if (vertex) {
|
||||
vertex.classes.push(className);
|
||||
}
|
||||
if (subGraphLookup[id]) {
|
||||
subGraphLookup[id].classes.push(className);
|
||||
const subGraph = subGraphLookup.get(id);
|
||||
if (subGraph) {
|
||||
subGraph.classes.push(className);
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -270,7 +277,7 @@ const setTooltip = function (ids: string, tooltip: string) {
|
||||
}
|
||||
tooltip = sanitizeText(tooltip);
|
||||
for (const id of ids.split(',')) {
|
||||
tooltips[version === 'gen-1' ? lookUpDomId(id) : id] = tooltip;
|
||||
tooltips.set(version === 'gen-1' ? lookUpDomId(id) : id, tooltip);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -303,8 +310,9 @@ const setClickFun = function (id: string, functionName: string, functionArgs: st
|
||||
argList.push(id);
|
||||
}
|
||||
|
||||
if (vertices[id] !== undefined) {
|
||||
vertices[id].haveCallback = true;
|
||||
const vertex = vertices.get(id);
|
||||
if (vertex) {
|
||||
vertex.haveCallback = true;
|
||||
funs.push(function () {
|
||||
const elem = document.querySelector(`[id="${domId}"]`);
|
||||
if (elem !== null) {
|
||||
@@ -329,19 +337,17 @@ const setClickFun = function (id: string, functionName: string, functionArgs: st
|
||||
*/
|
||||
export const setLink = function (ids: string, linkStr: string, target: string) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (vertices[id] !== undefined) {
|
||||
vertices[id].link = utils.formatUrl(linkStr, config);
|
||||
vertices[id].linkTarget = target;
|
||||
const vertex = vertices.get(id);
|
||||
if (vertex !== undefined) {
|
||||
vertex.link = utils.formatUrl(linkStr, config);
|
||||
vertex.linkTarget = target;
|
||||
}
|
||||
});
|
||||
setClass(ids, 'clickable');
|
||||
};
|
||||
|
||||
export const getTooltip = function (id: string) {
|
||||
if (tooltips.hasOwnProperty(id)) {
|
||||
return tooltips[id];
|
||||
}
|
||||
return undefined;
|
||||
return tooltips.get(id);
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -435,14 +441,14 @@ funs.push(setupToolTips);
|
||||
*
|
||||
*/
|
||||
export const clear = function (ver = 'gen-1') {
|
||||
vertices = {};
|
||||
classes = {};
|
||||
vertices = new Map();
|
||||
classes = new Map();
|
||||
edges = [];
|
||||
funs = [setupToolTips];
|
||||
subGraphs = [];
|
||||
subGraphLookup = {};
|
||||
subGraphLookup = new Map();
|
||||
subCount = 0;
|
||||
tooltips = {};
|
||||
tooltips = new Map();
|
||||
firstGraphFlag = true;
|
||||
version = ver;
|
||||
config = getConfig();
|
||||
@@ -516,7 +522,7 @@ export const addSubGraph = function (
|
||||
// Remove the members in the new subgraph if they already belong to another subgraph
|
||||
subGraph.nodes = makeUniq(subGraph, subGraphs).nodes;
|
||||
subGraphs.push(subGraph);
|
||||
subGraphLookup[id] = subGraph;
|
||||
subGraphLookup.set(id, subGraph);
|
||||
return id;
|
||||
};
|
||||
|
||||
@@ -750,11 +756,55 @@ export const lex = {
|
||||
firstGraph,
|
||||
};
|
||||
|
||||
const getTypeFromVertex = (vertex: FlowVertex) => {
|
||||
if (vertex.type === 'square') {
|
||||
return 'squareRect';
|
||||
}
|
||||
if (vertex.type === 'round') {
|
||||
return 'roundedRect';
|
||||
}
|
||||
|
||||
return vertex.type || 'squareRect';
|
||||
};
|
||||
|
||||
export const getData = () => {
|
||||
const config = getConfig();
|
||||
const nodes: Node[] = [];
|
||||
const edges: Edge[] = [];
|
||||
|
||||
// extract(getRootDocV2());
|
||||
// const diagramStates = getStates();
|
||||
const useRough = config.look === 'handdrawn';
|
||||
const n = getVertices();
|
||||
n.forEach((vertex) => {
|
||||
const node: Node = {
|
||||
id: vertex.id,
|
||||
label: vertex.text,
|
||||
labelStyle: '',
|
||||
padding: config.flowchart?.padding || 8,
|
||||
cssStyles: vertex.styles.join(' '),
|
||||
cssClasses: vertex.classes.join(' '),
|
||||
shape: getTypeFromVertex(vertex),
|
||||
dir: vertex.dir,
|
||||
domId: vertex.domId,
|
||||
type: undefined,
|
||||
isGroup: false,
|
||||
useRough,
|
||||
};
|
||||
nodes.push(node);
|
||||
});
|
||||
|
||||
//const useRough = config.look === 'handdrawn';
|
||||
|
||||
return { nodes, edges, other: {}, config };
|
||||
};
|
||||
|
||||
export default {
|
||||
defaultConfig: () => defaultConfig.flowchart,
|
||||
setAccTitle,
|
||||
getAccTitle,
|
||||
getAccDescription,
|
||||
getData,
|
||||
setAccDescription,
|
||||
addVertex,
|
||||
lookUpDomId,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
// @ts-ignore: JISON doesn't support types
|
||||
import flowParser from './parser/flow.jison';
|
||||
import flowDb from './flowDb.js';
|
||||
import flowRendererV2 from './flowRenderer-v2.js';
|
||||
// import flowRendererV2 from './flowRenderer-v2.js';
|
||||
import flowRendererV3 from './flowRenderer-v3-unified.js';
|
||||
import flowStyles from './styles.js';
|
||||
import type { MermaidConfig } from '../../config.type.js';
|
||||
import { setConfig } from '../../diagram-api/diagramAPI.js';
|
||||
@@ -9,7 +10,8 @@ import { setConfig } from '../../diagram-api/diagramAPI.js';
|
||||
export const diagram = {
|
||||
parser: flowParser,
|
||||
db: flowDb,
|
||||
renderer: flowRendererV2,
|
||||
// renderer: flowRendererV2,
|
||||
renderer: flowRendererV3,
|
||||
styles: flowStyles,
|
||||
init: (cnf: MermaidConfig) => {
|
||||
if (!cnf.flowchart) {
|
||||
@@ -18,7 +20,7 @@ export const diagram = {
|
||||
cnf.flowchart.arrowMarkerAbsolute = cnf.arrowMarkerAbsolute;
|
||||
// flowchart-v2 uses dagre-wrapper, which doesn't have access to flowchart cnf
|
||||
setConfig({ flowchart: { arrowMarkerAbsolute: cnf.arrowMarkerAbsolute } });
|
||||
flowRendererV2.setConf(cnf.flowchart);
|
||||
flowRendererV3.setConf(cnf.flowchart);
|
||||
flowDb.clear();
|
||||
flowDb.setGen('gen-2');
|
||||
},
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
// @ts-ignore: JISON doesn't support types
|
||||
import flowParser from './parser/flow.jison';
|
||||
import flowDb from './flowDb.js';
|
||||
import flowRendererV2 from './flowRenderer-v2.js';
|
||||
import flowStyles from './styles.js';
|
||||
import type { MermaidConfig } from '../../config.type.js';
|
||||
import { setConfig } from '../../diagram-api/diagramAPI.js';
|
||||
|
||||
export const diagram = {
|
||||
parser: flowParser,
|
||||
db: flowDb,
|
||||
renderer: flowRendererV2,
|
||||
styles: flowStyles,
|
||||
init: (cnf: MermaidConfig) => {
|
||||
if (!cnf.flowchart) {
|
||||
cnf.flowchart = {};
|
||||
}
|
||||
cnf.flowchart.arrowMarkerAbsolute = cnf.arrowMarkerAbsolute;
|
||||
// flowchart-v2 uses dagre-wrapper, which doesn't have access to flowchart cnf
|
||||
setConfig({ flowchart: { arrowMarkerAbsolute: cnf.arrowMarkerAbsolute } });
|
||||
flowRendererV2.setConf(cnf.flowchart);
|
||||
flowDb.clear();
|
||||
flowDb.setGen('gen-2');
|
||||
},
|
||||
};
|
||||
@@ -29,11 +29,13 @@ export const setConf = function (cnf) {
|
||||
*/
|
||||
export const addVertices = async function (vert, g, svgId, root, doc, diagObj) {
|
||||
const svg = root.select(`[id="${svgId}"]`);
|
||||
const keys = Object.keys(vert);
|
||||
// console.log('SVG:', svg, svg.node(), 'root:', root, root.node());
|
||||
|
||||
const keys = vert.keys();
|
||||
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
for (const id of keys) {
|
||||
const vertex = vert[id];
|
||||
const vertex = vert.get(id);
|
||||
|
||||
/**
|
||||
* Variable for storing the classes for the vertex
|
||||
@@ -341,7 +343,7 @@ export const addEdges = async function (edges, g, diagObj) {
|
||||
*
|
||||
* @param text
|
||||
* @param diagObj
|
||||
* @returns {Record<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
* @returns {Map<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
*/
|
||||
export const getClasses = function (text, diagObj) {
|
||||
return diagObj.db.getClasses();
|
||||
@@ -468,9 +470,9 @@ export const draw = async function (text, id, _version, diagObj) {
|
||||
}
|
||||
|
||||
// If node has a link, wrap it in an anchor SVG object.
|
||||
const keys = Object.keys(vert);
|
||||
keys.forEach(function (key) {
|
||||
const vertex = vert[key];
|
||||
const keys = [...vert.keys()];
|
||||
keys.forEach((key) => {
|
||||
const vertex = vert.get(key);
|
||||
|
||||
if (vertex.link) {
|
||||
const node = select('#' + id + ' [id="' + key + '"]');
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
import { log } from '../../logger.js';
|
||||
import type { DiagramStyleClassDef } from '../../diagram-api/types.js';
|
||||
import type { LayoutData, LayoutMethod } from '../../rendering-util/types.js';
|
||||
import { getConfig } from '../../diagram-api/diagramAPI.js';
|
||||
import { render } from '../../rendering-util/render.js';
|
||||
import { getDiagramElements } from '../../rendering-util/insertElementsForSize.js';
|
||||
import { setupViewPortForSVG } from '../../rendering-util/setupViewPortForSVG.js';
|
||||
import { getDirection } from './flowDb.js';
|
||||
|
||||
import utils from '../../utils.js';
|
||||
|
||||
// Configuration
|
||||
const conf: Record<string, any> = {};
|
||||
|
||||
export const setConf = function (cnf: Record<string, any>) {
|
||||
const keys = Object.keys(cnf);
|
||||
for (const key of keys) {
|
||||
conf[key] = cnf[key];
|
||||
}
|
||||
};
|
||||
|
||||
export const getClasses = function (
|
||||
text: string,
|
||||
diagramObj: any
|
||||
): Record<string, DiagramStyleClassDef> {
|
||||
// diagramObj.db.extract(diagramObj.db.getRootDocV2());
|
||||
return diagramObj.db.getClasses();
|
||||
};
|
||||
|
||||
export const draw = async function (text: string, id: string, _version: string, diag: any) {
|
||||
log.info('REF0:');
|
||||
log.info('Drawing state diagram (v2)', id);
|
||||
const { securityLevel, state: conf, layout } = getConfig();
|
||||
|
||||
const DIR = getDirection();
|
||||
|
||||
// The getData method provided in all supported diagrams is used to extract the data from the parsed structure
|
||||
// into the Layout data format
|
||||
console.log('Before getData: ');
|
||||
const data4Layout = diag.db.getData() as LayoutData;
|
||||
console.log('Data: ', data4Layout);
|
||||
// Create the root SVG - the element is the div containing the SVG element
|
||||
const { element, svg } = getDiagramElements(id, securityLevel);
|
||||
|
||||
// // For some diagrams this call is not needed, but in the state diagram it is
|
||||
// await insertElementsForSize(element, data4Layout);
|
||||
|
||||
// console.log('data4Layout:', data4Layout);
|
||||
|
||||
// // Now we have layout data with real sizes, we can perform the layout
|
||||
// const data4Rendering = doLayout(data4Layout, id, _version, 'dagre-wrapper');
|
||||
|
||||
// // The performRender method provided in all supported diagrams is used to render the data
|
||||
// performRender(data4Rendering);
|
||||
|
||||
data4Layout.type = diag.type;
|
||||
// data4Layout.layoutAlgorithm = 'dagre-wrapper';
|
||||
// data4Layout.layoutAlgorithm = 'elk';
|
||||
data4Layout.layoutAlgorithm = layout;
|
||||
data4Layout.direction = DIR;
|
||||
data4Layout.nodeSpacing = conf?.nodeSpacing || 50;
|
||||
data4Layout.rankSpacing = conf?.rankSpacing || 50;
|
||||
data4Layout.markers = ['barb'];
|
||||
data4Layout.diagramId = id;
|
||||
console.log('REF1:', data4Layout);
|
||||
await render(data4Layout, svg, element);
|
||||
const padding = 8;
|
||||
utils.insertTitle(
|
||||
element,
|
||||
'statediagramTitleText',
|
||||
conf?.titleTopMargin || 0,
|
||||
diag.db.getDiagramTitle()
|
||||
);
|
||||
setupViewPortForSVG(svg, padding, 'flowchart', conf?.useMaxWidth || false);
|
||||
};
|
||||
|
||||
export default {
|
||||
setConf,
|
||||
getClasses,
|
||||
draw,
|
||||
};
|
||||
@@ -268,7 +268,7 @@ export const addEdges = async function (edges, g, diagObj) {
|
||||
*
|
||||
* @param text
|
||||
* @param diagObj
|
||||
* @returns {Record<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
* @returns {Map<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles
|
||||
*/
|
||||
export const getClasses = function (text, diagObj) {
|
||||
log.info('Extracting classes');
|
||||
@@ -455,9 +455,9 @@ export const draw = async function (text, id, _version, diagObj) {
|
||||
setupGraphViewbox(g, svg, conf.diagramPadding, conf.useMaxWidth);
|
||||
|
||||
// If node has a link, wrap it in an anchor SVG object.
|
||||
const keys = Object.keys(vert);
|
||||
const keys = [...vert.keys()];
|
||||
keys.forEach(function (key) {
|
||||
const vertex = vert[key];
|
||||
const vertex = vert.get(key);
|
||||
|
||||
if (vertex.link) {
|
||||
const node = root.select('#' + id + ' [id="' + diagObj.db.lookUpDomId(key) + '"]');
|
||||
|
||||
@@ -18,8 +18,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -38,8 +38,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('LR');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -58,8 +58,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('RL');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -78,8 +78,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('BT');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -99,8 +99,8 @@ describe('[Arrows] when parsing', () => {
|
||||
|
||||
expect(direction).toBe('TB');
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -116,8 +116,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -133,8 +133,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -150,8 +150,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[1].start).toBe('B');
|
||||
expect(edges[1].end).toBe('C');
|
||||
@@ -169,8 +169,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -186,8 +186,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -203,8 +203,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -220,8 +220,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -237,8 +237,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -254,8 +254,8 @@ describe('[Arrows] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
||||
@@ -19,8 +19,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -34,8 +34,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -49,8 +49,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -64,8 +64,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -79,8 +79,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -94,8 +94,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -109,8 +109,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -124,8 +124,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -143,8 +143,8 @@ describe('[Comments] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
||||
@@ -48,8 +48,6 @@ describe('[Edges] when parsing', () => {
|
||||
|
||||
it('should handle open ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A---B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_open');
|
||||
@@ -57,8 +55,6 @@ describe('[Edges] when parsing', () => {
|
||||
|
||||
it('should handle cross ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A--xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
@@ -66,8 +62,6 @@ describe('[Edges] when parsing', () => {
|
||||
|
||||
it('should handle open ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A--oB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_circle');
|
||||
@@ -81,8 +75,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -99,8 +93,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -119,8 +113,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -139,8 +133,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -164,8 +158,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -183,8 +177,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -202,8 +196,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -221,8 +215,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -240,8 +234,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -259,8 +253,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -278,8 +272,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -297,8 +291,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -316,8 +310,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -335,8 +329,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -354,8 +348,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -373,8 +367,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -392,8 +386,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -411,8 +405,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -430,8 +424,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -449,8 +443,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -468,8 +462,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -487,8 +481,8 @@ describe('[Edges] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(1);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
||||
@@ -23,7 +23,7 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
expect(edges.length).toBe(47917);
|
||||
expect(Object.keys(vert).length).toBe(2);
|
||||
expect(vert.size).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -19,12 +19,12 @@ A["\`The cat in **the** hat\`"]-- "\`The *bat* in the chat\`" -->B["The dog in t
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['A'].text).toBe('The cat in **the** hat');
|
||||
expect(vert['A'].labelType).toBe('markdown');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['B'].text).toBe('The dog in the hog');
|
||||
expect(vert['B'].labelType).toBe('string');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('A').text).toBe('The cat in **the** hat');
|
||||
expect(vert.get('A').labelType).toBe('markdown');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('B').text).toBe('The dog in the hog');
|
||||
expect(vert.get('B').labelType).toBe('string');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
||||
@@ -43,7 +43,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['A'].styles.length).toBe(0);
|
||||
expect(vert.get('A').styles.length).toBe(0);
|
||||
});
|
||||
it('should handle a single node with white space after it (SN1)', function () {
|
||||
// Silly but syntactically correct
|
||||
@@ -53,7 +53,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['A'].styles.length).toBe(0);
|
||||
expect(vert.get('A').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single square node', function () {
|
||||
@@ -64,8 +64,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].styles.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('square');
|
||||
expect(vert.get('a').styles.length).toBe(0);
|
||||
expect(vert.get('a').type).toBe('square');
|
||||
});
|
||||
|
||||
it('should handle a single round square node', function () {
|
||||
@@ -76,8 +76,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].styles.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('square');
|
||||
expect(vert.get('a').styles.length).toBe(0);
|
||||
expect(vert.get('a').type).toBe('square');
|
||||
});
|
||||
|
||||
it('should handle a single circle node', function () {
|
||||
@@ -88,7 +88,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('circle');
|
||||
expect(vert.get('a').type).toBe('circle');
|
||||
});
|
||||
|
||||
it('should handle a single round node', function () {
|
||||
@@ -99,7 +99,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('round');
|
||||
expect(vert.get('a').type).toBe('round');
|
||||
});
|
||||
|
||||
it('should handle a single odd node', function () {
|
||||
@@ -110,7 +110,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('odd');
|
||||
expect(vert.get('a').type).toBe('odd');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node', function () {
|
||||
@@ -121,7 +121,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
expect(vert.get('a').type).toBe('diamond');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node with whitespace after it', function () {
|
||||
@@ -132,7 +132,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
expect(vert.get('a').type).toBe('diamond');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node with html in it (SN3)', function () {
|
||||
@@ -143,8 +143,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('diamond');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single hexagon node', function () {
|
||||
@@ -155,7 +155,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('hexagon');
|
||||
expect(vert.get('a').type).toBe('hexagon');
|
||||
});
|
||||
|
||||
it('should handle a single hexagon node with html in it', function () {
|
||||
@@ -166,8 +166,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('hexagon');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('hexagon');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single round node with html in it', function () {
|
||||
@@ -178,8 +178,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('round');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('round');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single double circle node', function () {
|
||||
@@ -190,7 +190,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('doublecircle');
|
||||
expect(vert.get('a').type).toBe('doublecircle');
|
||||
});
|
||||
|
||||
it('should handle a single double circle node with whitespace after it', function () {
|
||||
@@ -201,7 +201,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('doublecircle');
|
||||
expect(vert.get('a').type).toBe('doublecircle');
|
||||
});
|
||||
|
||||
it('should handle a single double circle node with html in it (SN3)', function () {
|
||||
@@ -212,8 +212,8 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['a'].type).toBe('doublecircle');
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
expect(vert.get('a').type).toBe('doublecircle');
|
||||
expect(vert.get('a').text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics starting on a char', function () {
|
||||
@@ -224,7 +224,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['id1'].styles.length).toBe(0);
|
||||
expect(vert.get('id1').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with a single digit', function () {
|
||||
@@ -235,7 +235,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['1'].text).toBe('1');
|
||||
expect(vert.get('1').text).toBe('1');
|
||||
});
|
||||
|
||||
it('should handle a single node with a single digit in a subgraph', function () {
|
||||
@@ -247,7 +247,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['1'].text).toBe('1');
|
||||
expect(vert.get('1').text).toBe('1');
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics starting on a num', function () {
|
||||
@@ -258,7 +258,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['1id'].styles.length).toBe(0);
|
||||
expect(vert.get('1id').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics containing a minus sign', function () {
|
||||
@@ -269,7 +269,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['i-d'].styles.length).toBe(0);
|
||||
expect(vert.get('i-d').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics containing a underscore sign', function () {
|
||||
@@ -280,33 +280,33 @@ describe('[Singlenodes] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['i_d'].styles.length).toBe(0);
|
||||
expect(vert.get('i_d').styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle keywords between dashes "-"', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;a-${keyword}-node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`a-${keyword}-node`].text).toBe(`a-${keyword}-node`);
|
||||
expect(vert.get(`a-${keyword}-node`).text).toBe(`a-${keyword}-node`);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle keywords between periods "."', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;a.${keyword}.node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`a.${keyword}.node`].text).toBe(`a.${keyword}.node`);
|
||||
expect(vert.get(`a.${keyword}.node`).text).toBe(`a.${keyword}.node`);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle keywords between underscores "_"', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;a_${keyword}_node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`a_${keyword}_node`].text).toBe(`a_${keyword}_node`);
|
||||
expect(vert.get(`a_${keyword}_node`).text).toBe(`a_${keyword}_node`);
|
||||
});
|
||||
|
||||
it.each(keywords)('should handle nodes ending in %s', function (keyword) {
|
||||
const res = flow.parser.parse(`graph TD;node_${keyword};node.${keyword};node-${keyword};`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`node_${keyword}`].text).toBe(`node_${keyword}`);
|
||||
expect(vert[`node.${keyword}`].text).toBe(`node.${keyword}`);
|
||||
expect(vert[`node-${keyword}`].text).toBe(`node-${keyword}`);
|
||||
expect(vert.get(`node_${keyword}`).text).toBe(`node_${keyword}`);
|
||||
expect(vert.get(`node.${keyword}`).text).toBe(`node.${keyword}`);
|
||||
expect(vert.get(`node-${keyword}`).text).toBe(`node-${keyword}`);
|
||||
});
|
||||
|
||||
const errorKeywords = [
|
||||
@@ -337,9 +337,9 @@ describe('[Singlenodes] when parsing', () => {
|
||||
it.each(workingKeywords)('should parse node beginning with %s', function (keyword) {
|
||||
flow.parser.parse(`graph TD; ${keyword}.node;${keyword}-node;${keyword}/node;`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`${keyword}.node`].text).toBe(`${keyword}.node`);
|
||||
expect(vert[`${keyword}-node`].text).toBe(`${keyword}-node`);
|
||||
expect(vert[`${keyword}/node`].text).toBe(`${keyword}/node`);
|
||||
expect(vert.get(`${keyword}.node`).text).toBe(`${keyword}.node`);
|
||||
expect(vert.get(`${keyword}-node`).text).toBe(`${keyword}-node`);
|
||||
expect(vert.get(`${keyword}/node`).text).toBe(`${keyword}/node`);
|
||||
});
|
||||
|
||||
it.each(specialChars)(
|
||||
@@ -347,7 +347,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
function (specialChar) {
|
||||
flow.parser.parse(`graph TD; ${specialChar} --> A`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`${specialChar}`].text).toBe(`${specialChar}`);
|
||||
expect(vert.get(`${specialChar}`).text).toBe(`${specialChar}`);
|
||||
}
|
||||
);
|
||||
|
||||
@@ -356,7 +356,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
function (specialChar) {
|
||||
flow.parser.parse(`graph TD; ${specialChar}node --> A`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`${specialChar}node`].text).toBe(`${specialChar}node`);
|
||||
expect(vert.get(`${specialChar}node`).text).toBe(`${specialChar}node`);
|
||||
}
|
||||
);
|
||||
|
||||
@@ -365,7 +365,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
function (specialChar) {
|
||||
flow.parser.parse(`graph TD; node${specialChar} --> A`);
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
expect(vert[`node${specialChar}`].text).toBe(`node${specialChar}`);
|
||||
expect(vert.get(`node${specialChar}`).text).toBe(`node${specialChar}`);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
@@ -20,10 +20,8 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
const style = vert['Q'].styles[0];
|
||||
|
||||
expect(vert['Q'].styles.length).toBe(1);
|
||||
expect(vert['Q'].styles[0]).toBe('background:#fff');
|
||||
expect(vert.get('Q').styles.length).toBe(1);
|
||||
expect(vert.get('Q').styles[0]).toBe('background:#fff');
|
||||
});
|
||||
|
||||
it('should handle multiple styles for a vortex', function () {
|
||||
@@ -32,9 +30,9 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['R'].styles.length).toBe(2);
|
||||
expect(vert['R'].styles[0]).toBe('background:#fff');
|
||||
expect(vert['R'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('R').styles.length).toBe(2);
|
||||
expect(vert.get('R').styles[0]).toBe('background:#fff');
|
||||
expect(vert.get('R').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle multiple styles in a graph', function () {
|
||||
@@ -45,11 +43,11 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['S'].styles.length).toBe(1);
|
||||
expect(vert['T'].styles.length).toBe(2);
|
||||
expect(vert['S'].styles[0]).toBe('background:#aaa');
|
||||
expect(vert['T'].styles[0]).toBe('background:#bbb');
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('S').styles.length).toBe(1);
|
||||
expect(vert.get('T').styles.length).toBe(2);
|
||||
expect(vert.get('S').styles[0]).toBe('background:#aaa');
|
||||
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle styles and graph definitions in a graph', function () {
|
||||
@@ -60,11 +58,11 @@ describe('[Style] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['S'].styles.length).toBe(1);
|
||||
expect(vert['T'].styles.length).toBe(2);
|
||||
expect(vert['S'].styles[0]).toBe('background:#aaa');
|
||||
expect(vert['T'].styles[0]).toBe('background:#bbb');
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('S').styles.length).toBe(1);
|
||||
expect(vert.get('T').styles.length).toBe(2);
|
||||
expect(vert.get('S').styles[0]).toBe('background:#aaa');
|
||||
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle styles and graph definitions in a graph', function () {
|
||||
@@ -73,9 +71,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['T'].styles.length).toBe(2);
|
||||
expect(vert['T'].styles[0]).toBe('background:#bbb');
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('T').styles.length).toBe(2);
|
||||
expect(vert.get('T').styles[0]).toBe('background:#bbb');
|
||||
expect(vert.get('T').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should keep node label text (if already defined) when a style is applied', function () {
|
||||
@@ -85,10 +83,10 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['A'].text).toBe('');
|
||||
expect(vert['B'].text).toBe('Test');
|
||||
expect(vert['C'].text).toBe('C');
|
||||
expect(vert['D'].text).toBe('D');
|
||||
expect(vert.get('A').text).toBe('');
|
||||
expect(vert.get('B').text).toBe('Test');
|
||||
expect(vert.get('C').text).toBe('C');
|
||||
expect(vert.get('D').text).toBe('D');
|
||||
});
|
||||
|
||||
it('should be possible to declare a class', function () {
|
||||
@@ -99,9 +97,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to declare multiple classes', function () {
|
||||
@@ -111,13 +109,13 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['firstClass'].styles.length).toBe(2);
|
||||
expect(classes['firstClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['firstClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('firstClass').styles.length).toBe(2);
|
||||
expect(classes.get('firstClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('firstClass').styles[1]).toBe('border:1px solid red');
|
||||
|
||||
expect(classes['secondClass'].styles.length).toBe(2);
|
||||
expect(classes['secondClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['secondClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('secondClass').styles.length).toBe(2);
|
||||
expect(classes.get('secondClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('secondClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to declare a class with a dot in the style', function () {
|
||||
@@ -128,9 +126,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
it('should be possible to declare a class with a space in the style', function () {
|
||||
const res = flow.parser.parse(
|
||||
@@ -140,9 +138,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background: #bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background: #bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex', function () {
|
||||
let statement = '';
|
||||
@@ -156,9 +154,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex with an id containing _', function () {
|
||||
let statement = '';
|
||||
@@ -172,9 +170,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex directly', function () {
|
||||
let statement = '';
|
||||
@@ -187,10 +185,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class ', function () {
|
||||
@@ -204,10 +202,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class-->B[test2] ', function () {
|
||||
@@ -221,10 +219,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['A'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('A').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly 2', function () {
|
||||
@@ -238,10 +236,10 @@ describe('[Style] when parsing', () => {
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a comma separated list of vertices', function () {
|
||||
let statement = '';
|
||||
@@ -256,11 +254,11 @@ describe('[Style] when parsing', () => {
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vertices['a'].classes[0]).toBe('exClass');
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
expect(vertices.get('a').classes[0]).toBe('exClass');
|
||||
expect(vertices.get('b').classes[0]).toBe('exClass');
|
||||
});
|
||||
|
||||
it('should handle style definitions with more then 1 digit in a row', function () {
|
||||
@@ -364,9 +362,9 @@ describe('[Style] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['A'].classes.length).toBe(0);
|
||||
expect(vert['B'].classes[0]).toBe('C1');
|
||||
expect(vert['D'].classes[0]).toBe('C1');
|
||||
expect(vert['E'].classes[0]).toBe('C2');
|
||||
expect(vert.get('A').classes.length).toBe(0);
|
||||
expect(vert.get('B').classes[0]).toBe('C1');
|
||||
expect(vert.get('D').classes[0]).toBe('C1');
|
||||
expect(vert.get('E').classes[0]).toBe('C2');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -113,7 +113,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['v'].text).toBe('my text');
|
||||
expect(vert.get('v').text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids v at end', function () {
|
||||
// v at end
|
||||
@@ -123,7 +123,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['csv'].text).toBe('my text');
|
||||
expect(vert.get('csv').text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids v in middle', function () {
|
||||
// v in middle
|
||||
@@ -133,7 +133,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['ava'].text).toBe('my text');
|
||||
expect(vert.get('ava').text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids, v at start', function () {
|
||||
// v at start
|
||||
@@ -143,7 +143,7 @@ describe('[Text] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['va'].text).toBe('my text');
|
||||
expect(vert.get('va').text).toBe('my text');
|
||||
});
|
||||
it('should handle keywords', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text including graph space|B;');
|
||||
@@ -157,7 +157,7 @@ describe('[Text] when parsing', () => {
|
||||
const res = flow.parser.parse('graph TD;V-->a[v]');
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['a'].text).toBe('v');
|
||||
expect(vert.get('a').text).toBe('v');
|
||||
});
|
||||
it('should handle quoted text', function () {
|
||||
const res = flow.parser.parse('graph TD;V-- "test string()" -->a[v]');
|
||||
@@ -302,8 +302,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('Chimpansen hoppar');
|
||||
});
|
||||
|
||||
const keywords = [
|
||||
@@ -353,8 +353,8 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe(`${shape.name}`);
|
||||
expect(vert['B'].text).toBe(`This node has a ${keyword} as text`);
|
||||
expect(vert.get('B').type).toBe(`${shape.name}`);
|
||||
expect(vert.get('B').text).toBe(`This node has a ${keyword} as text`);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -365,24 +365,24 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe('rect');
|
||||
expect(vert['B'].text).toBe(`This node has a ${keyword} as text`);
|
||||
expect(vert.get('B').type).toBe('rect');
|
||||
expect(vert.get('B').text).toBe(`This node has a ${keyword} as text`);
|
||||
});
|
||||
|
||||
it('should handle edge case for odd vertex with node id ending with minus', function () {
|
||||
const res = flow.parser.parse('graph TD;A_node-->odd->Vertex Text];');
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['odd-'].type).toBe('odd');
|
||||
expect(vert['odd-'].text).toBe('Vertex Text');
|
||||
expect(vert.get('odd-').type).toBe('odd');
|
||||
expect(vert.get('odd-').text).toBe('Vertex Text');
|
||||
});
|
||||
it('should allow forward slashes in lean_right vertices', function () {
|
||||
const rest = flow.parser.parse(`graph TD;A_node-->B[/This node has a / as text/];`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe('lean_right');
|
||||
expect(vert['B'].text).toBe(`This node has a / as text`);
|
||||
expect(vert.get('B').type).toBe('lean_right');
|
||||
expect(vert.get('B').text).toBe(`This node has a / as text`);
|
||||
});
|
||||
|
||||
it('should allow back slashes in lean_left vertices', function () {
|
||||
@@ -390,8 +390,8 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['B'].type).toBe('lean_left');
|
||||
expect(vert['B'].text).toBe(`This node has a \\ as text`);
|
||||
expect(vert.get('B').type).toBe('lean_left');
|
||||
expect(vert.get('B').text).toBe(`This node has a \\ as text`);
|
||||
});
|
||||
|
||||
it('should handle åäö and minus', function () {
|
||||
@@ -400,8 +400,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('diamond');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar åäö-ÅÄÖ');
|
||||
expect(vert.get('C').type).toBe('diamond');
|
||||
expect(vert.get('C').text).toBe('Chimpansen hoppar åäö-ÅÄÖ');
|
||||
});
|
||||
|
||||
it('should handle with åäö, minus and space and br', function () {
|
||||
@@ -410,8 +410,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar åäö <br> - ÅÄÖ');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('Chimpansen hoppar åäö <br> - ÅÄÖ');
|
||||
});
|
||||
// it.skip('should handle åäö, minus and space and br',function(){
|
||||
// const res = flow.parser.parse('graph TD; A[Object(foo,bar)]-->B(Thing);');
|
||||
@@ -419,22 +419,22 @@ describe('[Text] when parsing', () => {
|
||||
// const vert = flow.parser.yy.getVertices();
|
||||
// const edges = flow.parser.yy.getEdges();
|
||||
//
|
||||
// expect(vert['C'].type).toBe('round');
|
||||
// expect(vert['C'].text).toBe(' A[Object(foo,bar)]-->B(Thing);');
|
||||
// expect(vert.get('C').type).toBe('round');
|
||||
// expect(vert.get('C').text).toBe(' A[Object(foo,bar)]-->B(Thing);');
|
||||
// });
|
||||
it('should handle unicode chars', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(Начало);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['C'].text).toBe('Начало');
|
||||
expect(vert.get('C').text).toBe('Начало');
|
||||
});
|
||||
it('should handle backslask', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(c:\\windows);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['C'].text).toBe('c:\\windows');
|
||||
expect(vert.get('C').text).toBe('c:\\windows');
|
||||
});
|
||||
it('should handle CAPS', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(some CAPS);');
|
||||
@@ -442,8 +442,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('some CAPS');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('some CAPS');
|
||||
});
|
||||
it('should handle directions', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(some URL);');
|
||||
@@ -451,8 +451,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('some URL');
|
||||
expect(vert.get('C').type).toBe('round');
|
||||
expect(vert.get('C').text).toBe('some URL');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -464,9 +464,9 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_circle');
|
||||
expect(edges[1].type).toBe('arrow_point');
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -482,8 +482,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in vertices with space with spaces between vertices and link', function () {
|
||||
@@ -492,8 +492,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
it('should handle text including _ in vertices', function () {
|
||||
const res = flow.parser.parse('graph TD;A[chimpansen_hoppar] --> C;');
|
||||
@@ -501,8 +501,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen_hoppar');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen_hoppar');
|
||||
});
|
||||
|
||||
it('should handle quoted text in vertices ', function () {
|
||||
@@ -511,8 +511,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar ()[]');
|
||||
expect(vert.get('A').type).toBe('square');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar ()[]');
|
||||
});
|
||||
|
||||
it('should handle text in circle vertices with space', function () {
|
||||
@@ -521,8 +521,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('circle');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('circle');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in ellipse vertices', function () {
|
||||
@@ -531,8 +531,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('ellipse');
|
||||
expect(vert['A'].text).toBe('this is an ellipse');
|
||||
expect(vert.get('A').type).toBe('ellipse');
|
||||
expect(vert.get('A').text).toBe('this is an ellipse');
|
||||
});
|
||||
|
||||
it('should not freeze when ellipse text has a `(`', function () {
|
||||
@@ -545,8 +545,8 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].type).toBe('round');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
expect(vert.get('A').type).toBe('round');
|
||||
expect(vert.get('A').text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in with ?', function () {
|
||||
@@ -555,7 +555,7 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].text).toBe('?');
|
||||
expect(vert.get('A').text).toBe('?');
|
||||
expect(edges[0].text).toBe('?');
|
||||
});
|
||||
it('should handle text in with éèêàçô', function () {
|
||||
@@ -564,7 +564,7 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].text).toBe('éèêàçô');
|
||||
expect(vert.get('A').text).toBe('éèêàçô');
|
||||
expect(edges[0].text).toBe('éèêàçô');
|
||||
});
|
||||
|
||||
@@ -574,7 +574,7 @@ describe('[Text] when parsing', () => {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].text).toBe(',.?!+-*');
|
||||
expect(vert.get('A').text).toBe(',.?!+-*');
|
||||
expect(edges[0].text).toBe(',.?!+-*');
|
||||
});
|
||||
|
||||
|
||||
@@ -22,9 +22,9 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -44,9 +44,9 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('C');
|
||||
@@ -66,9 +66,9 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -88,10 +88,10 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D'].id).toBe('D');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D').id).toBe('D');
|
||||
expect(edges.length).toBe(4);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('C');
|
||||
@@ -119,10 +119,10 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D'].id).toBe('D');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D').id).toBe('D');
|
||||
expect(edges.length).toBe(4);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('C');
|
||||
@@ -150,11 +150,11 @@ describe('when parsing flowcharts', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['B2'].id).toBe('B2');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D2'].id).toBe('D2');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('B2').id).toBe('B2');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D2').id).toBe('D2');
|
||||
expect(edges.length).toBe(6);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -193,14 +193,14 @@ describe('when parsing flowcharts', function () {
|
||||
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exClass'].styles.length).toBe(2);
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert['B'].classes[0]).toBe('exClass');
|
||||
expect(vert['C'].id).toBe('C');
|
||||
expect(vert['D'].id).toBe('D');
|
||||
expect(classes.get('exClass').styles.length).toBe(2);
|
||||
expect(classes.get('exClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exClass').styles[1]).toBe('border:1px solid red');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(vert.get('B').classes[0]).toBe('exClass');
|
||||
expect(vert.get('C').id).toBe('C');
|
||||
expect(vert.get('D').id).toBe('D');
|
||||
expect(edges.length).toBe(4);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
|
||||
@@ -19,8 +19,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
expect(edges.length).toBe(2);
|
||||
expect(edges[0].start).toBe('A');
|
||||
expect(edges[0].end).toBe('B');
|
||||
@@ -34,8 +34,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['endpoint'].id).toBe('endpoint');
|
||||
expect(vert['sender'].id).toBe('sender');
|
||||
expect(vert.get('endpoint').id).toBe('endpoint');
|
||||
expect(vert.get('sender').id).toBe('sender');
|
||||
expect(edges[0].start).toBe('endpoint');
|
||||
expect(edges[0].end).toBe('sender');
|
||||
});
|
||||
@@ -46,8 +46,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['blend'].id).toBe('blend');
|
||||
expect(vert['monograph'].id).toBe('monograph');
|
||||
expect(vert.get('blend').id).toBe('blend');
|
||||
expect(vert.get('monograph').id).toBe('monograph');
|
||||
expect(edges[0].start).toBe('blend');
|
||||
expect(edges[0].end).toBe('monograph');
|
||||
});
|
||||
@@ -58,8 +58,8 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['default'].id).toBe('default');
|
||||
expect(vert['monograph'].id).toBe('monograph');
|
||||
expect(vert.get('default').id).toBe('default');
|
||||
expect(vert.get('monograph').id).toBe('monograph');
|
||||
expect(edges[0].start).toBe('default');
|
||||
expect(edges[0].end).toBe('monograph');
|
||||
});
|
||||
@@ -71,12 +71,12 @@ describe('parsing a flow chart', function () {
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(vert['A'].id).toBe('A');
|
||||
expect(vert['B'].id).toBe('B');
|
||||
expect(vert.get('A').id).toBe('A');
|
||||
expect(vert.get('B').id).toBe('B');
|
||||
if (result) {
|
||||
expect(vert['A'].text).toBe(result);
|
||||
expect(vert.get('A').text).toBe(result);
|
||||
} else {
|
||||
expect(vert['A'].text).toBe(char);
|
||||
expect(vert.get('A').text).toBe(char);
|
||||
}
|
||||
flow.parser.yy.clear();
|
||||
};
|
||||
@@ -135,7 +135,7 @@ describe('parsing a flow chart', function () {
|
||||
const res = flow.parser.parse(statement);
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['node1TB'].id).toBe('node1TB');
|
||||
expect(vertices.get('node1TB').id).toBe('node1TB');
|
||||
});
|
||||
|
||||
it('should be possible to use direction in node ids', function () {
|
||||
@@ -145,7 +145,7 @@ describe('parsing a flow chart', function () {
|
||||
const res = flow.parser.parse(statement);
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['A'].id).toBe('A');
|
||||
expect(vertices.get('A').id).toBe('A');
|
||||
});
|
||||
|
||||
it('should be possible to use numbers as labels', function () {
|
||||
@@ -154,8 +154,8 @@ describe('parsing a flow chart', function () {
|
||||
statement = statement + 'graph TB;subgraph "number as labels";1;end;';
|
||||
const res = flow.parser.parse(statement);
|
||||
const vertices = flow.parser.yy.getVertices();
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['1'].id).toBe('1');
|
||||
|
||||
expect(vertices.get('1').id).toBe('1');
|
||||
});
|
||||
|
||||
it('should add accTitle and accDescr to flow chart', function () {
|
||||
@@ -194,4 +194,47 @@ describe('parsing a flow chart', function () {
|
||||
with a second line`
|
||||
);
|
||||
});
|
||||
|
||||
for (const unsafeProp of ['__proto__', 'constructor']) {
|
||||
it(`should work with node id ${unsafeProp}`, function () {
|
||||
const flowChart = `graph LR
|
||||
${unsafeProp} --> A;`;
|
||||
|
||||
expect(() => {
|
||||
flow.parser.parse(flowChart);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it(`should work with tooltip id ${unsafeProp}`, function () {
|
||||
const flowChart = `graph LR
|
||||
click ${unsafeProp} callback "${unsafeProp}";`;
|
||||
|
||||
expect(() => {
|
||||
flow.parser.parse(flowChart);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it(`should work with class id ${unsafeProp}`, function () {
|
||||
const flowChart = `graph LR
|
||||
${unsafeProp} --> A;
|
||||
classDef ${unsafeProp} color:#ffffff,fill:#000000;
|
||||
class ${unsafeProp} ${unsafeProp};`;
|
||||
|
||||
expect(() => {
|
||||
flow.parser.parse(flowChart);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it(`should work with subgraph id ${unsafeProp}`, function () {
|
||||
const flowChart = `graph LR
|
||||
${unsafeProp} --> A;
|
||||
subgraph ${unsafeProp}
|
||||
C --> D;
|
||||
end;`;
|
||||
|
||||
expect(() => {
|
||||
flow.parser.parse(flowChart);
|
||||
}).not.toThrow();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -28,7 +28,7 @@ let tickInterval = undefined;
|
||||
let todayMarker = '';
|
||||
let includes = [];
|
||||
let excludes = [];
|
||||
let links = {};
|
||||
let links = new Map();
|
||||
let sections = [];
|
||||
let tasks = [];
|
||||
let currentSection = '';
|
||||
@@ -62,7 +62,7 @@ export const clear = function () {
|
||||
inclusiveEndDates = false;
|
||||
topAxis = false;
|
||||
lastOrder = 0;
|
||||
links = {};
|
||||
links = new Map();
|
||||
commonClear();
|
||||
weekday = 'sunday';
|
||||
weekend = 'saturday';
|
||||
@@ -639,7 +639,7 @@ export const setLink = function (ids, _linkStr) {
|
||||
pushFun(id, () => {
|
||||
window.open(linkStr, '_self');
|
||||
});
|
||||
links[id] = linkStr;
|
||||
links.set(id, linkStr);
|
||||
}
|
||||
});
|
||||
setClass(ids, 'clickable');
|
||||
|
||||
@@ -475,14 +475,14 @@ export const draw = function (text, id, version, diagObj) {
|
||||
|
||||
rectangles
|
||||
.filter(function (d) {
|
||||
return links[d.id] !== undefined;
|
||||
return links.has(d.id);
|
||||
})
|
||||
.each(function (o) {
|
||||
var taskRect = doc.querySelector('#' + o.id);
|
||||
var taskText = doc.querySelector('#' + o.id + '-text');
|
||||
const oldParent = taskRect.parentNode;
|
||||
var Link = doc.createElement('a');
|
||||
Link.setAttribute('xlink:href', links[o.id]);
|
||||
Link.setAttribute('xlink:href', links.get(o.id));
|
||||
Link.setAttribute('target', '_top');
|
||||
oldParent.appendChild(Link);
|
||||
Link.appendChild(taskRect);
|
||||
|
||||
@@ -256,4 +256,15 @@ row2`;
|
||||
expect(ganttDb.getWeekday()).toBe(day);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['__proto__', 'constructor'])('should allow for a link to %s id', (prop) => {
|
||||
expect(() =>
|
||||
parser.parse(`gantt
|
||||
dateFormat YYYY-MM-DD
|
||||
section Section
|
||||
A task :${prop}, 2024-10-01, 3d
|
||||
click ${prop} href "https://mermaid.js.org/"
|
||||
`)
|
||||
).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -14,12 +14,12 @@ import {
|
||||
|
||||
let mainBranchName = getConfig().gitGraph.mainBranchName;
|
||||
let mainBranchOrder = getConfig().gitGraph.mainBranchOrder;
|
||||
let commits = {};
|
||||
let commits = new Map();
|
||||
let head = null;
|
||||
let branchesConfig = {};
|
||||
branchesConfig[mainBranchName] = { name: mainBranchName, order: mainBranchOrder };
|
||||
let branches = {};
|
||||
branches[mainBranchName] = head;
|
||||
let branchesConfig = new Map();
|
||||
branchesConfig.set(mainBranchName, { name: mainBranchName, order: mainBranchOrder });
|
||||
let branches = new Map();
|
||||
branches.set(mainBranchName, head);
|
||||
let curBranch = mainBranchName;
|
||||
let direction = 'LR';
|
||||
let seq = 0;
|
||||
@@ -46,11 +46,11 @@ function getId() {
|
||||
// if (Array.isArray(otherCommit.parent)) {
|
||||
// log.debug('In merge commit:', otherCommit.parent);
|
||||
// return (
|
||||
// isFastForwardable(currentCommit, commits[otherCommit.parent[0]]) ||
|
||||
// isFastForwardable(currentCommit, commits[otherCommit.parent[1]])
|
||||
// isFastForwardable(currentCommit, commits.get(otherCommit.parent[0])) ||
|
||||
// isFastForwardable(currentCommit, commits.get(otherCommit.parent[1]))
|
||||
// );
|
||||
// } else {
|
||||
// otherCommit = commits[otherCommit.parent];
|
||||
// otherCommit = commits.get(otherCommit.parent);
|
||||
// }
|
||||
// }
|
||||
// log.debug(currentCommit.id, otherCommit.id);
|
||||
@@ -118,16 +118,16 @@ export const commit = function (msg, id, type, tag) {
|
||||
branch: curBranch,
|
||||
};
|
||||
head = commit;
|
||||
commits[commit.id] = commit;
|
||||
branches[curBranch] = commit.id;
|
||||
commits.set(commit.id, commit);
|
||||
branches.set(curBranch, commit.id);
|
||||
log.debug('in pushCommit ' + commit.id);
|
||||
};
|
||||
|
||||
export const branch = function (name, order) {
|
||||
name = common.sanitizeText(name, getConfig());
|
||||
if (branches[name] === undefined) {
|
||||
branches[name] = head != null ? head.id : null;
|
||||
branchesConfig[name] = { name, order: order ? parseInt(order, 10) : null };
|
||||
if (!branches.has(name)) {
|
||||
branches.set(name, head != null ? head.id : null);
|
||||
branchesConfig.set(name, { name, order: order ? parseInt(order, 10) : null });
|
||||
checkout(name);
|
||||
log.debug('in createBranch');
|
||||
} else {
|
||||
@@ -151,8 +151,8 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
otherBranch = common.sanitizeText(otherBranch, getConfig());
|
||||
custom_id = common.sanitizeText(custom_id, getConfig());
|
||||
|
||||
const currentCommit = commits[branches[curBranch]];
|
||||
const otherCommit = commits[branches[otherBranch]];
|
||||
const currentCommit = commits.get(branches.get(curBranch));
|
||||
const otherCommit = commits.get(branches.get(otherBranch));
|
||||
if (curBranch === otherBranch) {
|
||||
let error = new Error('Incorrect usage of "merge". Cannot merge a branch to itself');
|
||||
error.hash = {
|
||||
@@ -175,7 +175,7 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
expected: ['commit'],
|
||||
};
|
||||
throw error;
|
||||
} else if (branches[otherBranch] === undefined) {
|
||||
} else if (!branches.has(otherBranch)) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "merge". Branch to be merged (' + otherBranch + ') does not exist'
|
||||
);
|
||||
@@ -209,7 +209,7 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
expected: ['branch abc'],
|
||||
};
|
||||
throw error;
|
||||
} else if (custom_id && commits[custom_id] !== undefined) {
|
||||
} else if (custom_id && commits.has(custom_id)) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "merge". Commit with id:' +
|
||||
custom_id +
|
||||
@@ -232,15 +232,15 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
// return;
|
||||
// }
|
||||
// if (isFastForwardable(currentCommit, otherCommit)) {
|
||||
// branches[curBranch] = branches[otherBranch];
|
||||
// head = commits[branches[curBranch]];
|
||||
// branches.set(curBranch, branches.get(otherBranch));
|
||||
// head = commits.get(branches.get(curBranch));
|
||||
// } else {
|
||||
// create merge commit
|
||||
const commit = {
|
||||
id: custom_id ? custom_id : seq + '-' + getId(),
|
||||
message: 'merged branch ' + otherBranch + ' into ' + curBranch,
|
||||
seq: seq++,
|
||||
parents: [head == null ? null : head.id, branches[otherBranch]],
|
||||
parents: [head == null ? null : head.id, branches.get(otherBranch)],
|
||||
branch: curBranch,
|
||||
type: commitType.MERGE,
|
||||
customType: override_type,
|
||||
@@ -248,8 +248,8 @@ export const merge = function (otherBranch, custom_id, override_type, custom_tag
|
||||
tag: custom_tag ? custom_tag : '',
|
||||
};
|
||||
head = commit;
|
||||
commits[commit.id] = commit;
|
||||
branches[curBranch] = commit.id;
|
||||
commits.set(commit.id, commit);
|
||||
branches.set(curBranch, commit.id);
|
||||
// }
|
||||
log.debug(branches);
|
||||
log.debug('in mergeBranch');
|
||||
@@ -262,7 +262,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
tag = common.sanitizeText(tag, getConfig());
|
||||
parentCommitId = common.sanitizeText(parentCommitId, getConfig());
|
||||
|
||||
if (!sourceId || commits[sourceId] === undefined) {
|
||||
if (!sourceId || !commits.has(sourceId)) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "cherryPick". Source commit id should exist and provided'
|
||||
);
|
||||
@@ -275,7 +275,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
let sourceCommit = commits[sourceId];
|
||||
let sourceCommit = commits.get(sourceId);
|
||||
let sourceCommitBranch = sourceCommit.branch;
|
||||
if (
|
||||
parentCommitId &&
|
||||
@@ -292,7 +292,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
if (!targetId || commits[targetId] === undefined) {
|
||||
if (!targetId || !commits.has(targetId)) {
|
||||
// cherry-pick source commit to current branch
|
||||
|
||||
if (sourceCommitBranch === curBranch) {
|
||||
@@ -308,7 +308,7 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
const currentCommit = commits[branches[curBranch]];
|
||||
const currentCommit = commits.get(branches.get(curBranch));
|
||||
if (currentCommit === undefined || !currentCommit) {
|
||||
let error = new Error(
|
||||
'Incorrect usage of "cherry-pick". Current branch (' + curBranch + ')has no commits'
|
||||
@@ -336,15 +336,15 @@ export const cherryPick = function (sourceId, targetId, tag, parentCommitId) {
|
||||
}`,
|
||||
};
|
||||
head = commit;
|
||||
commits[commit.id] = commit;
|
||||
branches[curBranch] = commit.id;
|
||||
commits.set(commit.id, commit);
|
||||
branches.set(curBranch, commit.id);
|
||||
log.debug(branches);
|
||||
log.debug('in cherryPick');
|
||||
}
|
||||
};
|
||||
export const checkout = function (branch) {
|
||||
branch = common.sanitizeText(branch, getConfig());
|
||||
if (branches[branch] === undefined) {
|
||||
if (!branches.has(branch)) {
|
||||
let error = new Error(
|
||||
'Trying to checkout branch which is not yet created. (Help try using "branch ' + branch + '")'
|
||||
);
|
||||
@@ -360,8 +360,8 @@ export const checkout = function (branch) {
|
||||
//log.debug('in createBranch');
|
||||
} else {
|
||||
curBranch = branch;
|
||||
const id = branches[curBranch];
|
||||
head = commits[id];
|
||||
const id = branches.get(curBranch);
|
||||
head = commits.get(id);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -369,10 +369,10 @@ export const checkout = function (branch) {
|
||||
// log.debug('in reset', commitRef);
|
||||
// const ref = commitRef.split(':')[0];
|
||||
// let parentCount = parseInt(commitRef.split(':')[1]);
|
||||
// let commit = ref === 'HEAD' ? head : commits[branches[ref]];
|
||||
// let commit = ref === 'HEAD' ? head : commits.get(branches.get(ref));
|
||||
// log.debug(commit, parentCount);
|
||||
// while (parentCount > 0) {
|
||||
// commit = commits[commit.parent];
|
||||
// commit = commits.get(commit.parent);
|
||||
// parentCount--;
|
||||
// if (!commit) {
|
||||
// const err = 'Critical error - unique parent commit not found during reset';
|
||||
@@ -416,19 +416,19 @@ function prettyPrintCommitHistory(commitArr) {
|
||||
});
|
||||
const label = [line, commit.id, commit.seq];
|
||||
for (let branch in branches) {
|
||||
if (branches[branch] === commit.id) {
|
||||
if (branches.get(branch) === commit.id) {
|
||||
label.push(branch);
|
||||
}
|
||||
}
|
||||
log.debug(label.join(' '));
|
||||
if (commit.parents && commit.parents.length == 2) {
|
||||
const newCommit = commits[commit.parents[0]];
|
||||
const newCommit = commits.get(commit.parents[0]);
|
||||
upsert(commitArr, commit, newCommit);
|
||||
commitArr.push(commits[commit.parents[1]]);
|
||||
commitArr.push(commits.get(commit.parents[1]));
|
||||
} else if (commit.parents.length == 0) {
|
||||
return;
|
||||
} else {
|
||||
const nextCommit = commits[commit.parents];
|
||||
const nextCommit = commits.get(commit.parents);
|
||||
upsert(commitArr, commit, nextCommit);
|
||||
}
|
||||
commitArr = uniqBy(commitArr, (c) => c.id);
|
||||
@@ -442,21 +442,21 @@ export const prettyPrint = function () {
|
||||
};
|
||||
|
||||
export const clear = function () {
|
||||
commits = {};
|
||||
commits = new Map();
|
||||
head = null;
|
||||
let mainBranch = getConfig().gitGraph.mainBranchName;
|
||||
let mainBranchOrder = getConfig().gitGraph.mainBranchOrder;
|
||||
branches = {};
|
||||
branches[mainBranch] = null;
|
||||
branchesConfig = {};
|
||||
branchesConfig[mainBranch] = { name: mainBranch, order: mainBranchOrder };
|
||||
branches = new Map();
|
||||
branches.set(mainBranch, null);
|
||||
branchesConfig = new Map();
|
||||
branchesConfig.set(mainBranch, { name: mainBranch, order: mainBranchOrder });
|
||||
curBranch = mainBranch;
|
||||
seq = 0;
|
||||
commonClear();
|
||||
};
|
||||
|
||||
export const getBranchesAsObjArray = function () {
|
||||
const branchesArray = Object.values(branchesConfig)
|
||||
const branchesArray = [...branchesConfig.values()]
|
||||
.map((branchConfig, i) => {
|
||||
if (branchConfig.order !== null) {
|
||||
return branchConfig;
|
||||
@@ -479,9 +479,7 @@ export const getCommits = function () {
|
||||
return commits;
|
||||
};
|
||||
export const getCommitsArray = function () {
|
||||
const commitArr = Object.keys(commits).map(function (key) {
|
||||
return commits[key];
|
||||
});
|
||||
const commitArr = [...commits.values()];
|
||||
commitArr.forEach(function (o) {
|
||||
log.debug(o.id);
|
||||
});
|
||||
|
||||
@@ -12,10 +12,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph definition with empty options', function () {
|
||||
@@ -25,10 +25,10 @@ describe('when parsing a gitGraph', function () {
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(parser.yy.getOptions()).toEqual({});
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph definition with valid options', function () {
|
||||
@@ -37,10 +37,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(parser.yy.getOptions()['key']).toBe('value');
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should not fail on a gitGraph with malformed json', function () {
|
||||
@@ -48,10 +48,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle set direction top to bottom', function () {
|
||||
@@ -60,10 +60,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('TB');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle set direction bottom to top', function () {
|
||||
@@ -72,10 +72,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('BT');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should checkout a branch', function () {
|
||||
@@ -84,7 +84,7 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(0);
|
||||
expect(commits.size).toBe(0);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new');
|
||||
});
|
||||
|
||||
@@ -94,7 +94,7 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(0);
|
||||
expect(commits.size).toBe(0);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new');
|
||||
});
|
||||
|
||||
@@ -104,11 +104,11 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(2);
|
||||
expect(commits.size).toBe(2);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new');
|
||||
const branchCommit = parser.yy.getBranches()['new'];
|
||||
const branchCommit = parser.yy.getBranches().get('new');
|
||||
expect(branchCommit).not.toBeNull();
|
||||
expect(commits[branchCommit].parent).not.toBeNull();
|
||||
expect(commits.get(branchCommit).parent).not.toBeNull();
|
||||
});
|
||||
it('should handle commit with args', function () {
|
||||
const str = 'gitGraph:\n' + 'commit "a commit"\n';
|
||||
@@ -116,9 +116,9 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('a commit');
|
||||
expect(commits.size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('a commit');
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
});
|
||||
|
||||
@@ -136,10 +136,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['newbranch']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).toEqual(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('newbranch'));
|
||||
});
|
||||
|
||||
it.skip('reset can take an argument', function () {
|
||||
@@ -155,9 +155,9 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
const main = commits[parser.yy.getBranches()['main']];
|
||||
const main = commits.get(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(main.parent);
|
||||
});
|
||||
|
||||
@@ -175,10 +175,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['newbranch']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).toEqual(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('newbranch'));
|
||||
});
|
||||
|
||||
it('should handle cases when merge is a noop', function () {
|
||||
@@ -194,10 +194,12 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
expect(parser.yy.getBranches()['newbranch']).not.toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['newbranch']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).not.toEqual(
|
||||
parser.yy.getBranches().get('main')
|
||||
);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('newbranch'));
|
||||
});
|
||||
|
||||
it('should handle merge with 2 parents', function () {
|
||||
@@ -215,10 +217,12 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(5);
|
||||
expect(commits.size).toBe(5);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getBranches()['newbranch']).not.toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).not.toEqual(
|
||||
parser.yy.getBranches().get('main')
|
||||
);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('main'));
|
||||
});
|
||||
|
||||
it.skip('should handle ff merge when history walk has two parents (merge commit)', function () {
|
||||
@@ -239,10 +243,10 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(7);
|
||||
expect(commits.size).toBe(7);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch');
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches()['main']);
|
||||
expect(parser.yy.getBranches().get('newbranch')).toEqual(parser.yy.getBranches().get('main'));
|
||||
expect(parser.yy.getHead().id).toEqual(parser.yy.getBranches().get('main'));
|
||||
|
||||
parser.yy.prettyPrint();
|
||||
});
|
||||
|
||||
@@ -13,15 +13,15 @@ describe('when parsing a gitGraph', function () {
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
//console.info(commits);
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id only', function () {
|
||||
@@ -30,15 +30,15 @@ describe('when parsing a gitGraph', function () {
|
||||
`;
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit tag only', function () {
|
||||
@@ -48,15 +48,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('test');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('test');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type HIGHLIGHT only', function () {
|
||||
@@ -66,15 +66,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type REVERSE only', function () {
|
||||
@@ -84,15 +84,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type NORMAL only', function () {
|
||||
@@ -102,15 +102,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit msg only', function () {
|
||||
@@ -120,15 +120,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test commit');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test commit');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit "msg:" key only', function () {
|
||||
@@ -138,15 +138,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test commit');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test commit');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id, tag only', function () {
|
||||
@@ -156,15 +156,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(0);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit type, tag only', function () {
|
||||
@@ -174,15 +174,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit tag and type only', function () {
|
||||
@@ -192,15 +192,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).not.toBeNull();
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).not.toBeNull();
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id, type and tag only', function () {
|
||||
@@ -210,15 +210,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom commit id, type, tag and msg', function () {
|
||||
@@ -228,15 +228,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom type,tag, msg, commit id,', function () {
|
||||
@@ -247,15 +247,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom tag, msg, commit id, type,', function () {
|
||||
@@ -265,15 +265,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a gitGraph commit with custom msg, commit id, type,tag', function () {
|
||||
@@ -283,15 +283,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
const key = Object.keys(commits)[0];
|
||||
expect(commits[key].message).toBe('test msg');
|
||||
expect(commits[key].id).toBe('1111');
|
||||
expect(commits[key].tag).toBe('test tag');
|
||||
expect(commits[key].type).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
const key = commits.keys().next().value;
|
||||
expect(commits.get(key).message).toBe('test msg');
|
||||
expect(commits.get(key).id).toBe('1111');
|
||||
expect(commits.get(key).tag).toBe('test tag');
|
||||
expect(commits.get(key).type).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle 3 straight commits', function () {
|
||||
@@ -303,10 +303,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(1);
|
||||
expect(parser.yy.getBranches().size).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle new branch creation', function () {
|
||||
@@ -317,10 +317,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow quoted branch names', function () {
|
||||
@@ -335,16 +335,14 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit2].branch).toBe('branch');
|
||||
expect(commits[commit3].branch).toBe('main');
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2, commit3] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit2).branch).toBe('branch');
|
||||
expect(commits.get(commit3).branch).toBe('main');
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([{ name: 'main' }, { name: 'branch' }]);
|
||||
});
|
||||
|
||||
@@ -356,10 +354,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('azAZ_-./test');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow branch names starting with numbers', function () {
|
||||
@@ -371,10 +369,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('1.0.1');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should allow branch names starting with unusual prefixes', function () {
|
||||
@@ -392,11 +390,11 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('A');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(7);
|
||||
expect(Object.keys(parser.yy.getBranches())).toEqual(
|
||||
expect(parser.yy.getBranches().size).toBe(7);
|
||||
expect([...parser.yy.getBranches().keys()]).toEqual(
|
||||
expect.arrayContaining([
|
||||
'branch01',
|
||||
'checkout02',
|
||||
@@ -417,10 +415,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
it('should handle new branch checkout with order', function () {
|
||||
const str = `gitGraph:
|
||||
@@ -432,9 +430,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('test3');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(4);
|
||||
expect(parser.yy.getBranches().size).toBe(4);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'test3' },
|
||||
@@ -452,9 +450,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('test3');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(4);
|
||||
expect(parser.yy.getBranches().size).toBe(4);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'test2' },
|
||||
@@ -473,16 +471,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(2);
|
||||
expect(commits.size).toBe(2);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commit1]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commit1]);
|
||||
});
|
||||
|
||||
it('should handle new branch checkout & commit and merge', function () {
|
||||
@@ -498,22 +495,22 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
const commit4 = Object.keys(commits)[3];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commits[commit1].id]);
|
||||
expect(commits[commit3].branch).toBe('testBranch');
|
||||
expect(commits[commit3].parents).toStrictEqual([commits[commit2].id]);
|
||||
expect(commits[commit4].branch).toBe('main');
|
||||
expect(commits[commit4].parents).toStrictEqual([commits[commit1].id, commits[commit3].id]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2, commit3, commit4] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commits.get(commit1).id]);
|
||||
expect(commits.get(commit3).branch).toBe('testBranch');
|
||||
expect(commits.get(commit3).parents).toStrictEqual([commits.get(commit2).id]);
|
||||
expect(commits.get(commit4).branch).toBe('main');
|
||||
expect(commits.get(commit4).parents).toStrictEqual([
|
||||
commits.get(commit1).id,
|
||||
commits.get(commit3).id,
|
||||
]);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'testBranch' },
|
||||
@@ -529,10 +526,10 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(1);
|
||||
expect(commits.size).toBe(1);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle new branch switch & commit', function () {
|
||||
@@ -545,16 +542,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(2);
|
||||
expect(commits.size).toBe(2);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('testBranch');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commit1]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commit1]);
|
||||
});
|
||||
|
||||
it('should handle new branch switch & commit and merge', function () {
|
||||
@@ -570,22 +566,22 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(4);
|
||||
expect(commits.size).toBe(4);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
const commit4 = Object.keys(commits)[3];
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commits[commit1].id]);
|
||||
expect(commits[commit3].branch).toBe('testBranch');
|
||||
expect(commits[commit3].parents).toStrictEqual([commits[commit2].id]);
|
||||
expect(commits[commit4].branch).toBe('main');
|
||||
expect(commits[commit4].parents).toStrictEqual([commits[commit1].id, commits[commit3].id]);
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2, commit3, commit4] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commits.get(commit1).id]);
|
||||
expect(commits.get(commit3).branch).toBe('testBranch');
|
||||
expect(commits.get(commit3).parents).toStrictEqual([commits.get(commit2).id]);
|
||||
expect(commits.get(commit4).branch).toBe('main');
|
||||
expect(commits.get(commit4).parents).toStrictEqual([
|
||||
commits.get(commit1).id,
|
||||
commits.get(commit3).id,
|
||||
]);
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'testBranch' },
|
||||
@@ -604,23 +600,23 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(3);
|
||||
expect(commits.size).toBe(3);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
expect(Object.keys(parser.yy.getBranches()).length).toBe(2);
|
||||
const commit1 = Object.keys(commits)[0];
|
||||
const commit2 = Object.keys(commits)[1];
|
||||
const commit3 = Object.keys(commits)[2];
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
const [commit1, commit2, commit3] = commits.keys();
|
||||
expect(commits.get(commit1).branch).toBe('main');
|
||||
expect(commits.get(commit1).parents).toStrictEqual([]);
|
||||
|
||||
expect(commits[commit1].branch).toBe('main');
|
||||
expect(commits[commit1].parents).toStrictEqual([]);
|
||||
expect(commits.get(commit2).branch).toBe('testBranch');
|
||||
expect(commits.get(commit2).parents).toStrictEqual([commits.get(commit1).id]);
|
||||
|
||||
expect(commits[commit2].branch).toBe('testBranch');
|
||||
expect(commits[commit2].parents).toStrictEqual([commits[commit1].id]);
|
||||
|
||||
expect(commits[commit3].branch).toBe('main');
|
||||
expect(commits[commit3].parents).toStrictEqual([commits[commit1].id, commits[commit2].id]);
|
||||
expect(commits[commit3].tag).toBe('merge-tag');
|
||||
expect(commits.get(commit3).branch).toBe('main');
|
||||
expect(commits.get(commit3).parents).toStrictEqual([
|
||||
commits.get(commit1).id,
|
||||
commits.get(commit2).id,
|
||||
]);
|
||||
expect(commits.get(commit3).tag).toBe('merge-tag');
|
||||
expect(parser.yy.getBranchesAsObjArray()).toStrictEqual([
|
||||
{ name: 'main' },
|
||||
{ name: 'testBranch' },
|
||||
@@ -652,7 +648,7 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(Object.keys(commits).length).toBe(7);
|
||||
expect(commits.size).toBe(7);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getDirection()).toBe('LR');
|
||||
|
||||
@@ -665,7 +661,7 @@ describe('when parsing a gitGraph', function () {
|
||||
testBranch2Merge,
|
||||
testBranch3Commit,
|
||||
testBranch3Merge,
|
||||
] = Object.values(commits);
|
||||
] = [...commits.values()];
|
||||
|
||||
expect(mainCommit.branch).toBe('main');
|
||||
expect(mainCommit.parents).toStrictEqual([]);
|
||||
@@ -708,9 +704,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[2];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('cherry-pick:A');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('main');
|
||||
const cherryPickCommitID = [...commits.keys()][2];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('cherry-pick:A');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('main');
|
||||
});
|
||||
|
||||
it('should support cherry-picking commits with custom tag', function () {
|
||||
@@ -724,9 +720,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[2];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('MyTag');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('main');
|
||||
const cherryPickCommitID = [...commits.keys()][2];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('MyTag');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('main');
|
||||
});
|
||||
|
||||
it('should support cherry-picking commits with no tag', function () {
|
||||
@@ -740,9 +736,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[2];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('main');
|
||||
const cherryPickCommitID = [...commits.keys()][2];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('main');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits', function () {
|
||||
@@ -761,9 +757,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[4];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('cherry-pick:M|parent:B');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][4];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('cherry-pick:M|parent:B');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits with tag', function () {
|
||||
@@ -782,9 +778,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[4];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('v1.0');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][4];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('v1.0');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits with additional commit', function () {
|
||||
@@ -805,9 +801,9 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[5];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('v2.1:ZERO');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][5];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('v2.1:ZERO');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should support cherry-picking of merge commits with empty tag', function () {
|
||||
@@ -829,11 +825,11 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
const cherryPickCommitID = Object.keys(commits)[5];
|
||||
const cherryPickCommitID2 = Object.keys(commits)[7];
|
||||
expect(commits[cherryPickCommitID].tag).toBe('');
|
||||
expect(commits[cherryPickCommitID2].tag).toBe('');
|
||||
expect(commits[cherryPickCommitID].branch).toBe('release');
|
||||
const cherryPickCommitID = [...commits.keys()][5];
|
||||
const cherryPickCommitID2 = [...commits.keys()][7];
|
||||
expect(commits.get(cherryPickCommitID).tag).toBe('');
|
||||
expect(commits.get(cherryPickCommitID2).tag).toBe('');
|
||||
expect(commits.get(cherryPickCommitID).branch).toBe('release');
|
||||
});
|
||||
|
||||
it('should fail cherry-picking of merge commits if the parent of merge commits is not specified', function () {
|
||||
@@ -1086,4 +1082,26 @@ describe('when parsing a gitGraph', function () {
|
||||
expect(parser.yy.getAccDescription()).toBe('This is a description\nusing multiple lines');
|
||||
});
|
||||
});
|
||||
|
||||
describe('unsafe properties', () => {
|
||||
for (const prop of ['__proto__', 'constructor']) {
|
||||
it(`should work with custom commit id or branch name ${prop}`, () => {
|
||||
const str = `gitGraph
|
||||
commit id:"${prop}"
|
||||
branch ${prop}
|
||||
checkout ${prop}
|
||||
commit
|
||||
checkout main
|
||||
merge ${prop}
|
||||
`;
|
||||
parser.parse(str);
|
||||
const commits = parser.yy.getCommits();
|
||||
expect(commits.size).toBe(3);
|
||||
expect(commits.keys().next().value).toBe(prop);
|
||||
expect(parser.yy.getCurrentBranch()).toBe('main');
|
||||
expect(parser.yy.getBranches().size).toBe(2);
|
||||
expect(parser.yy.getBranchesAsObjArray()[1].name).toBe(prop);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,7 +3,12 @@ import { getConfig, setupGraphViewbox } from '../../diagram-api/diagramAPI.js';
|
||||
import { log } from '../../logger.js';
|
||||
import utils from '../../utils.js';
|
||||
|
||||
let allCommitsDict = {};
|
||||
/**
|
||||
* @typedef {Map<string, { id: string, message: string, seq: number, type: number, tag: string, parents: string[], branch: string }>} CommitMap
|
||||
*/
|
||||
|
||||
/** @type {CommitMap} */
|
||||
let allCommitsDict = new Map();
|
||||
|
||||
const commitType = {
|
||||
NORMAL: 0,
|
||||
@@ -22,9 +27,9 @@ let maxPos = 0;
|
||||
let dir = 'LR';
|
||||
let defaultPos = 30;
|
||||
const clear = () => {
|
||||
branchPos = {};
|
||||
commitPos = {};
|
||||
allCommitsDict = {};
|
||||
branchPos = new Map();
|
||||
commitPos = new Map();
|
||||
allCommitsDict = new Map();
|
||||
maxPos = 0;
|
||||
lanes = [];
|
||||
dir = 'LR';
|
||||
@@ -78,7 +83,8 @@ const findClosestParent = (parents) => {
|
||||
let maxPosition = 0;
|
||||
|
||||
parents.forEach((parent) => {
|
||||
const parentPosition = dir === 'TB' || dir === 'BT' ? commitPos[parent].y : commitPos[parent].x;
|
||||
const parentPosition =
|
||||
dir === 'TB' || dir === 'BT' ? commitPos.get(parent).y : commitPos.get(parent).x;
|
||||
if (parentPosition >= maxPosition) {
|
||||
closestParent = parent;
|
||||
maxPosition = parentPosition;
|
||||
@@ -101,7 +107,7 @@ const findClosestParentBT = (parents) => {
|
||||
let maxPosition = Infinity;
|
||||
|
||||
parents.forEach((parent) => {
|
||||
const parentPosition = commitPos[parent].y;
|
||||
const parentPosition = commitPos.get(parent).y;
|
||||
if (parentPosition <= maxPosition) {
|
||||
closestParent = parent;
|
||||
maxPosition = parentPosition;
|
||||
@@ -119,7 +125,7 @@ const findClosestParentBT = (parents) => {
|
||||
* of the remaining commits.
|
||||
*
|
||||
* @param {any} sortedKeys
|
||||
* @param {any} commits
|
||||
* @param {CommitMap} commits
|
||||
* @param {any} defaultPos
|
||||
* @param {any} commitStep
|
||||
* @param {any} layoutOffset
|
||||
@@ -129,38 +135,38 @@ const setParallelBTPos = (sortedKeys, commits, defaultPos, commitStep, layoutOff
|
||||
let maxPosition = defaultPos;
|
||||
let roots = [];
|
||||
sortedKeys.forEach((key) => {
|
||||
const commit = commits[key];
|
||||
const commit = commits.get(key);
|
||||
if (commit.parents.length) {
|
||||
const closestParent = findClosestParent(commit.parents);
|
||||
curPos = commitPos[closestParent].y + commitStep;
|
||||
curPos = commitPos.get(closestParent).y + commitStep;
|
||||
if (curPos >= maxPosition) {
|
||||
maxPosition = curPos;
|
||||
}
|
||||
} else {
|
||||
roots.push(commit);
|
||||
}
|
||||
const x = branchPos[commit.branch].pos;
|
||||
const x = branchPos.get(commit.branch).pos;
|
||||
const y = curPos + layoutOffset;
|
||||
commitPos[commit.id] = { x: x, y: y };
|
||||
commitPos.set(commit.id, { x: x, y: y });
|
||||
});
|
||||
curPos = maxPosition;
|
||||
roots.forEach((commit) => {
|
||||
const posWithOffset = curPos + defaultPos;
|
||||
const y = posWithOffset;
|
||||
const x = branchPos[commit.branch].pos;
|
||||
commitPos[commit.id] = { x: x, y: y };
|
||||
const x = branchPos.get(commit.branch).pos;
|
||||
commitPos.set(commit.id, { x: x, y: y });
|
||||
});
|
||||
sortedKeys.forEach((key) => {
|
||||
const commit = commits[key];
|
||||
const commit = commits.get(key);
|
||||
if (commit.parents.length) {
|
||||
const closestParent = findClosestParentBT(commit.parents);
|
||||
curPos = commitPos[closestParent].y - commitStep;
|
||||
curPos = commitPos.get(closestParent).y - commitStep;
|
||||
if (curPos <= maxPosition) {
|
||||
maxPosition = curPos;
|
||||
}
|
||||
const x = branchPos[commit.branch].pos;
|
||||
const x = branchPos.get(commit.branch).pos;
|
||||
const y = curPos - layoutOffset;
|
||||
commitPos[commit.id] = { x: x, y: y };
|
||||
commitPos.set(commit.id, { x: x, y: y });
|
||||
}
|
||||
});
|
||||
};
|
||||
@@ -171,7 +177,7 @@ const setParallelBTPos = (sortedKeys, commits, defaultPos, commitStep, layoutOff
|
||||
* vertical layering correct in the graph.
|
||||
*
|
||||
* @param {any} svg
|
||||
* @param {any} commits
|
||||
* @param {CommitMap} commits
|
||||
* @param {any} modifyGraph
|
||||
*/
|
||||
const drawCommits = (svg, commits, modifyGraph) => {
|
||||
@@ -183,18 +189,18 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
if (dir === 'TB' || dir === 'BT') {
|
||||
pos = defaultPos;
|
||||
}
|
||||
const keys = Object.keys(commits);
|
||||
const keys = [...commits.keys()];
|
||||
const isParallelCommits = gitGraphConfig.parallelCommits;
|
||||
const layoutOffset = 10;
|
||||
const commitStep = 40;
|
||||
let sortedKeys =
|
||||
dir !== 'BT' || (dir === 'BT' && isParallelCommits)
|
||||
? keys.sort((a, b) => {
|
||||
return commits[a].seq - commits[b].seq;
|
||||
return commits.get(a).seq - commits.get(b).seq;
|
||||
})
|
||||
: keys
|
||||
.sort((a, b) => {
|
||||
return commits[a].seq - commits[b].seq;
|
||||
return commits.get(a).seq - commits.get(b).seq;
|
||||
})
|
||||
.reverse();
|
||||
|
||||
@@ -203,31 +209,31 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
sortedKeys = sortedKeys.reverse();
|
||||
}
|
||||
sortedKeys.forEach((key) => {
|
||||
const commit = commits[key];
|
||||
const commit = commits.get(key);
|
||||
if (isParallelCommits) {
|
||||
if (commit.parents.length) {
|
||||
const closestParent =
|
||||
dir === 'BT' ? findClosestParentBT(commit.parents) : findClosestParent(commit.parents);
|
||||
if (dir === 'TB') {
|
||||
pos = commitPos[closestParent].y + commitStep;
|
||||
pos = commitPos.get(closestParent).y + commitStep;
|
||||
} else if (dir === 'BT') {
|
||||
pos = commitPos[key].y - commitStep;
|
||||
pos = commitPos.get(key).y - commitStep;
|
||||
} else {
|
||||
pos = commitPos[closestParent].x + commitStep;
|
||||
pos = commitPos.get(closestParent).x + commitStep;
|
||||
}
|
||||
} else {
|
||||
if (dir === 'TB') {
|
||||
pos = defaultPos;
|
||||
} else if (dir === 'BT') {
|
||||
pos = commitPos[key].y - commitStep;
|
||||
pos = commitPos.get(key).y - commitStep;
|
||||
} else {
|
||||
pos = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
const posWithOffset = dir === 'BT' && isParallelCommits ? pos : pos + layoutOffset;
|
||||
const y = dir === 'TB' || dir === 'BT' ? posWithOffset : branchPos[commit.branch].pos;
|
||||
const x = dir === 'TB' || dir === 'BT' ? branchPos[commit.branch].pos : posWithOffset;
|
||||
const y = dir === 'TB' || dir === 'BT' ? posWithOffset : branchPos.get(commit.branch).pos;
|
||||
const x = dir === 'TB' || dir === 'BT' ? branchPos.get(commit.branch).pos : posWithOffset;
|
||||
|
||||
// Don't draw the commits now but calculate the positioning which is used by the branch lines etc.
|
||||
if (modifyGraph) {
|
||||
@@ -265,7 +271,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
circle.attr(
|
||||
'class',
|
||||
`commit ${commit.id} commit-highlight${
|
||||
branchPos[commit.branch].index % THEME_COLOR_LIMIT
|
||||
branchPos.get(commit.branch).index % THEME_COLOR_LIMIT
|
||||
} ${typeClass}-outer`
|
||||
);
|
||||
gBullets
|
||||
@@ -277,7 +283,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
.attr(
|
||||
'class',
|
||||
`commit ${commit.id} commit${
|
||||
branchPos[commit.branch].index % THEME_COLOR_LIMIT
|
||||
branchPos.get(commit.branch).index % THEME_COLOR_LIMIT
|
||||
} ${typeClass}-inner`
|
||||
);
|
||||
} else if (commitSymbolType === commitType.CHERRY_PICK) {
|
||||
@@ -324,7 +330,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
circle.attr('r', commit.type === commitType.MERGE ? 9 : 10);
|
||||
circle.attr(
|
||||
'class',
|
||||
`commit ${commit.id} commit${branchPos[commit.branch].index % THEME_COLOR_LIMIT}`
|
||||
`commit ${commit.id} commit${branchPos.get(commit.branch).index % THEME_COLOR_LIMIT}`
|
||||
);
|
||||
if (commitSymbolType === commitType.MERGE) {
|
||||
const circle2 = gBullets.append('circle');
|
||||
@@ -334,7 +340,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
circle2.attr(
|
||||
'class',
|
||||
`commit ${typeClass} ${commit.id} commit${
|
||||
branchPos[commit.branch].index % THEME_COLOR_LIMIT
|
||||
branchPos.get(commit.branch).index % THEME_COLOR_LIMIT
|
||||
}`
|
||||
);
|
||||
}
|
||||
@@ -345,16 +351,16 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
.attr(
|
||||
'class',
|
||||
`commit ${typeClass} ${commit.id} commit${
|
||||
branchPos[commit.branch].index % THEME_COLOR_LIMIT
|
||||
branchPos.get(commit.branch).index % THEME_COLOR_LIMIT
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (dir === 'TB' || dir === 'BT') {
|
||||
commitPos[commit.id] = { x: x, y: posWithOffset };
|
||||
commitPos.set(commit.id, { x: x, y: posWithOffset });
|
||||
} else {
|
||||
commitPos[commit.id] = { x: posWithOffset, y: y };
|
||||
commitPos.set(commit.id, { x: posWithOffset, y: y });
|
||||
}
|
||||
|
||||
// The first iteration over the commits are for positioning purposes, this
|
||||
@@ -482,7 +488,7 @@ const drawCommits = (svg, commits, modifyGraph) => {
|
||||
* @param {any} commitB
|
||||
* @param p1
|
||||
* @param p2
|
||||
* @param allCommits
|
||||
* @param {CommitMap} allCommits
|
||||
* @returns {boolean}
|
||||
* If there are commits between
|
||||
* commitA's x-position
|
||||
@@ -496,7 +502,7 @@ const shouldRerouteArrow = (commitA, commitB, p1, p2, allCommits) => {
|
||||
const branchToGetCurve = commitBIsFurthest ? commitB.branch : commitA.branch;
|
||||
const isOnBranchToGetCurve = (x) => x.branch === branchToGetCurve;
|
||||
const isBetweenCommits = (x) => x.seq > commitA.seq && x.seq < commitB.seq;
|
||||
return Object.values(allCommits).some((commitX) => {
|
||||
return [...allCommits.values()].some((commitX) => {
|
||||
return isBetweenCommits(commitX) && isOnBranchToGetCurve(commitX);
|
||||
});
|
||||
};
|
||||
@@ -531,11 +537,11 @@ const findLane = (y1, y2, depth = 0) => {
|
||||
* @param {any} svg
|
||||
* @param {any} commitA
|
||||
* @param {any} commitB
|
||||
* @param {any} allCommits
|
||||
* @param {CommitMap} allCommits
|
||||
*/
|
||||
const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
const p1 = commitPos[commitA.id]; // arrowStart
|
||||
const p2 = commitPos[commitB.id]; // arrowEnd
|
||||
const p1 = commitPos.get(commitA.id); // arrowStart
|
||||
const p2 = commitPos.get(commitB.id); // arrowEnd
|
||||
const arrowNeedsRerouting = shouldRerouteArrow(commitA, commitB, p1, p2, allCommits);
|
||||
// log.debug('drawArrow', p1, p2, arrowNeedsRerouting, commitA.id, commitB.id);
|
||||
|
||||
@@ -545,9 +551,9 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
let arc2 = '';
|
||||
let radius = 0;
|
||||
let offset = 0;
|
||||
let colorClassNum = branchPos[commitB.branch].index;
|
||||
let colorClassNum = branchPos.get(commitB.branch).index;
|
||||
if (commitB.type === commitType.MERGE && commitA.id !== commitB.parents[0]) {
|
||||
colorClassNum = branchPos[commitA.branch].index;
|
||||
colorClassNum = branchPos.get(commitA.branch).index;
|
||||
}
|
||||
|
||||
let lineDef;
|
||||
@@ -570,7 +576,7 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
} else {
|
||||
// Source commit is on branch position right of destination commit
|
||||
// so render arrow leftward with colour of source branch
|
||||
colorClassNum = branchPos[commitA.branch].index;
|
||||
colorClassNum = branchPos.get(commitA.branch).index;
|
||||
lineDef = `M ${p1.x} ${p1.y} L ${lineX + radius} ${p1.y} ${arc} ${lineX} ${
|
||||
p1.y + offset
|
||||
} L ${lineX} ${p2.y - radius} ${arc2} ${lineX - offset} ${p2.y} L ${p2.x} ${p2.y}`;
|
||||
@@ -585,7 +591,7 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
} else {
|
||||
// Source commit is on branch position right of destination commit
|
||||
// so render arrow leftward with colour of source branch
|
||||
colorClassNum = branchPos[commitA.branch].index;
|
||||
colorClassNum = branchPos.get(commitA.branch).index;
|
||||
lineDef = `M ${p1.x} ${p1.y} L ${lineX + radius} ${p1.y} ${arc2} ${lineX} ${
|
||||
p1.y - offset
|
||||
} L ${lineX} ${p2.y + radius} ${arc} ${lineX - offset} ${p2.y} L ${p2.x} ${p2.y}`;
|
||||
@@ -600,7 +606,7 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
} else {
|
||||
// Source commit is on branch positioned below destination commit
|
||||
// so render arrow upward with colour of source branch
|
||||
colorClassNum = branchPos[commitA.branch].index;
|
||||
colorClassNum = branchPos.get(commitA.branch).index;
|
||||
lineDef = `M ${p1.x} ${p1.y} L ${p1.x} ${lineY + radius} ${arc2} ${
|
||||
p1.x + offset
|
||||
} ${lineY} L ${p2.x - radius} ${lineY} ${arc} ${p2.x} ${lineY - offset} L ${p2.x} ${p2.y}`;
|
||||
@@ -710,13 +716,17 @@ const drawArrow = (svg, commitA, commitB, allCommits) => {
|
||||
.attr('class', 'arrow arrow' + (colorClassNum % THEME_COLOR_LIMIT));
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {*} svg
|
||||
* @param {CommitMap} commits
|
||||
*/
|
||||
const drawArrows = (svg, commits) => {
|
||||
const gArrows = svg.append('g').attr('class', 'commit-arrows');
|
||||
Object.keys(commits).forEach((key) => {
|
||||
const commit = commits[key];
|
||||
[...commits.keys()].forEach((key) => {
|
||||
const commit = commits.get(key);
|
||||
if (commit.parents && commit.parents.length > 0) {
|
||||
commit.parents.forEach((parent) => {
|
||||
drawArrow(gArrows, commits[parent], commit, commits);
|
||||
drawArrow(gArrows, commits.get(parent), commit, commits);
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -734,7 +744,7 @@ const drawBranches = (svg, branches) => {
|
||||
branches.forEach((branch, index) => {
|
||||
const adjustIndexForTheme = index % THEME_COLOR_LIMIT;
|
||||
|
||||
const pos = branchPos[branch.name].pos;
|
||||
const pos = branchPos.get(branch.name).pos;
|
||||
const line = g.append('line');
|
||||
line.attr('x1', 0);
|
||||
line.attr('y1', pos);
|
||||
@@ -822,7 +832,7 @@ export const draw = function (txt, id, ver, diagObj) {
|
||||
label.node().appendChild(labelElement);
|
||||
let bbox = labelElement.getBBox();
|
||||
|
||||
branchPos[branch.name] = { pos, index };
|
||||
branchPos.set(branch.name, { pos, index });
|
||||
pos +=
|
||||
50 +
|
||||
(gitGraphConfig.rotateCommitLabel ? 40 : 0) +
|
||||
|
||||
@@ -16,7 +16,7 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(100);
|
||||
expect(sections.get('ash')).toBe(100);
|
||||
});
|
||||
|
||||
it('should handle simple pie', async () => {
|
||||
@@ -26,8 +26,8 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with showData', async () => {
|
||||
@@ -39,8 +39,8 @@ describe('pie', () => {
|
||||
expect(db.getShowData()).toBeTruthy();
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with comments', async () => {
|
||||
@@ -51,8 +51,8 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with a title', async () => {
|
||||
@@ -64,8 +64,8 @@ describe('pie', () => {
|
||||
expect(db.getDiagramTitle()).toBe('a 60/40 pie');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with an acc title (accTitle)', async () => {
|
||||
@@ -80,8 +80,8 @@ describe('pie', () => {
|
||||
expect(db.getAccTitle()).toBe('a neat acc title');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with an acc description (accDescr)', async () => {
|
||||
@@ -96,8 +96,8 @@ describe('pie', () => {
|
||||
expect(db.getAccDescription()).toBe('a neat description');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with a multiline acc description (accDescr)', async () => {
|
||||
@@ -115,8 +115,8 @@ describe('pie', () => {
|
||||
expect(db.getAccDescription()).toBe('a neat description\non multiple lines');
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with positive decimal', async () => {
|
||||
@@ -126,8 +126,8 @@ describe('pie', () => {
|
||||
`);
|
||||
|
||||
const sections = db.getSections();
|
||||
expect(sections['ash']).toBe(60.67);
|
||||
expect(sections['bat']).toBe(40);
|
||||
expect(sections.get('ash')).toBe(60.67);
|
||||
expect(sections.get('bat')).toBe(40);
|
||||
});
|
||||
|
||||
it('should handle simple pie with negative decimal', () => {
|
||||
@@ -138,6 +138,16 @@ describe('pie', () => {
|
||||
`);
|
||||
}).rejects.toThrowError();
|
||||
});
|
||||
|
||||
it('should handle unsafe properties', async () => {
|
||||
await expect(
|
||||
parser.parse(`pie title Unsafe props test
|
||||
"__proto__" : 386
|
||||
"constructor" : 85
|
||||
"prototype" : 15`)
|
||||
).resolves.toBeUndefined();
|
||||
expect([...db.getSections().keys()]).toEqual(['__proto__', 'constructor', 'prototype']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('config', () => {
|
||||
|
||||
@@ -16,7 +16,7 @@ import DEFAULT_CONFIG from '../../defaultConfig.js';
|
||||
export const DEFAULT_PIE_CONFIG: Required<PieDiagramConfig> = DEFAULT_CONFIG.pie;
|
||||
|
||||
export const DEFAULT_PIE_DB: RequiredDeep<PieFields> = {
|
||||
sections: {},
|
||||
sections: new Map(),
|
||||
showData: false,
|
||||
config: DEFAULT_PIE_CONFIG,
|
||||
} as const;
|
||||
@@ -28,14 +28,14 @@ const config: Required<PieDiagramConfig> = structuredClone(DEFAULT_PIE_CONFIG);
|
||||
const getConfig = (): Required<PieDiagramConfig> => structuredClone(config);
|
||||
|
||||
const clear = (): void => {
|
||||
sections = structuredClone(DEFAULT_PIE_DB.sections);
|
||||
sections = new Map();
|
||||
showData = DEFAULT_PIE_DB.showData;
|
||||
commonClear();
|
||||
};
|
||||
|
||||
const addSection = ({ label, value }: D3Section): void => {
|
||||
if (sections[label] === undefined) {
|
||||
sections[label] = value;
|
||||
if (!sections.has(label)) {
|
||||
sections.set(label, value);
|
||||
log.debug(`added new section: ${label}, with value: ${value}`);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -11,7 +11,7 @@ import { selectSvgElement } from '../../rendering-util/selectSvgElement.js';
|
||||
|
||||
const createPieArcs = (sections: Sections): d3.PieArcDatum<D3Section>[] => {
|
||||
// Compute the position of each group on the pie:
|
||||
const pieData: D3Section[] = Object.entries(sections)
|
||||
const pieData: D3Section[] = [...sections.entries()]
|
||||
.map((element: [string, number]): D3Section => {
|
||||
return {
|
||||
label: element[0],
|
||||
@@ -105,8 +105,8 @@ export const draw: DrawDefinition = (text, id, _version, diagObj) => {
|
||||
.attr('class', 'pieCircle');
|
||||
|
||||
let sum = 0;
|
||||
Object.keys(sections).forEach((key: string): void => {
|
||||
sum += sections[key];
|
||||
sections.forEach((section) => {
|
||||
sum += section;
|
||||
});
|
||||
// Now add the percentage.
|
||||
// Use the centroid method to get the best coordinates.
|
||||
|
||||
@@ -34,7 +34,7 @@ export interface PieStyleOptions {
|
||||
pieOpacity: string;
|
||||
}
|
||||
|
||||
export type Sections = Record<string, number>;
|
||||
export type Sections = Map<string, number>;
|
||||
|
||||
export interface D3Section {
|
||||
label: string;
|
||||
|
||||
@@ -20,6 +20,7 @@ const mockDB: Record<string, Mock<any, any>> = {
|
||||
setYAxisBottomText: vi.fn(),
|
||||
setDiagramTitle: vi.fn(),
|
||||
addPoint: vi.fn(),
|
||||
addClass: vi.fn(),
|
||||
};
|
||||
|
||||
function clearMocks() {
|
||||
@@ -423,4 +424,13 @@ describe('Testing quadrantChart jison file', () => {
|
||||
['stroke-width: 10px']
|
||||
);
|
||||
});
|
||||
|
||||
it('should be able to handle constructor as a className', () => {
|
||||
const str = `quadrantChart
|
||||
classDef constructor fill:#ff0000
|
||||
Microsoft:::constructor: [0.75, 0.75]
|
||||
`;
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
expect(mockDB.addClass).toHaveBeenCalledWith('constructor', ['fill:#ff0000']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -127,7 +127,7 @@ export class QuadrantBuilder {
|
||||
private config: QuadrantBuilderConfig;
|
||||
private themeConfig: QuadrantBuilderThemeConfig;
|
||||
private data: QuadrantBuilderData;
|
||||
private classes: Record<string, StylesObject> = {};
|
||||
private classes: Map<string, StylesObject> = new Map();
|
||||
|
||||
constructor() {
|
||||
this.config = this.getDefaultConfig();
|
||||
@@ -202,7 +202,7 @@ export class QuadrantBuilder {
|
||||
this.config = this.getDefaultConfig();
|
||||
this.themeConfig = this.getDefaultThemeConfig();
|
||||
this.data = this.getDefaultData();
|
||||
this.classes = {};
|
||||
this.classes = new Map();
|
||||
log.info('clear called');
|
||||
}
|
||||
|
||||
@@ -215,7 +215,7 @@ export class QuadrantBuilder {
|
||||
}
|
||||
|
||||
addClass(className: string, styles: StylesObject) {
|
||||
this.classes[className] = styles;
|
||||
this.classes.set(className, styles);
|
||||
}
|
||||
|
||||
setConfig(config: Partial<QuadrantBuilderConfig>) {
|
||||
@@ -486,7 +486,7 @@ export class QuadrantBuilder {
|
||||
.range([quadrantHeight + quadrantTop, quadrantTop]);
|
||||
|
||||
const points: QuadrantPointType[] = this.data.points.map((point) => {
|
||||
const classStyles = this.classes[point.className as keyof typeof this.classes];
|
||||
const classStyles = this.classes.get(point.className!);
|
||||
if (classStyles) {
|
||||
point = { ...classStyles, ...point };
|
||||
}
|
||||
|
||||
@@ -33,14 +33,14 @@ describe('when parsing requirement diagram it...', function () {
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
expect(Object.keys(requirementDb.getRequirements()).length).toBe(1);
|
||||
expect(requirementDb.getRequirements().size).toBe(1);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.id).toBe(expectedId);
|
||||
expect(foundReq.text).toBe(expectedText);
|
||||
|
||||
expect(Object.keys(requirementDb.getElements()).length).toBe(0);
|
||||
expect(requirementDb.getElements().size).toBe(0);
|
||||
expect(Object.keys(requirementDb.getRelationships()).length).toBe(0);
|
||||
});
|
||||
|
||||
@@ -61,10 +61,10 @@ describe('when parsing requirement diagram it...', function () {
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
expect(Object.keys(requirementDb.getRequirements()).length).toBe(0);
|
||||
expect(Object.keys(requirementDb.getElements()).length).toBe(1);
|
||||
expect(requirementDb.getRequirements().size).toBe(0);
|
||||
expect(requirementDb.getElements().size).toBe(1);
|
||||
|
||||
let foundElement = requirementDb.getElements()[expectedName];
|
||||
let foundElement = requirementDb.getElements().get(expectedName);
|
||||
expect(foundElement).toBeDefined();
|
||||
expect(foundElement.type).toBe(expectedType);
|
||||
expect(foundElement.docRef).toBe(expectedDocRef);
|
||||
@@ -121,8 +121,8 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
expect(Object.keys(requirementDb.getRequirements()).length).toBe(0);
|
||||
expect(Object.keys(requirementDb.getElements()).length).toBe(0);
|
||||
expect(requirementDb.getRequirements().size).toBe(0);
|
||||
expect(requirementDb.getElements().size).toBe(0);
|
||||
expect(Object.keys(requirementDb.getRelationships()).length).toBe(1);
|
||||
|
||||
let foundRelationship = requirementDb.getRelationships()[0];
|
||||
@@ -152,7 +152,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@@ -179,7 +179,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@@ -206,7 +206,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@@ -233,7 +233,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@@ -260,7 +260,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@@ -287,7 +287,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
@@ -314,7 +314,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
@@ -341,7 +341,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
@@ -368,7 +368,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
@@ -395,7 +395,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@@ -422,7 +422,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@@ -449,7 +449,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@@ -476,7 +476,7 @@ line 2`;
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundReq = requirementDb.getRequirements()[expectedName];
|
||||
let foundReq = requirementDb.getRequirements().get(expectedName);
|
||||
expect(foundReq).toBeDefined();
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
@@ -578,4 +578,25 @@ line 2`;
|
||||
let foundRelationship = requirementDb.getRelationships()[0];
|
||||
expect(foundRelationship.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
for (const property of ['__proto__', 'constructor']) {
|
||||
it(`will accept ${property} as requirement id`, function () {
|
||||
reqDiagram.parser.parse(`requirementDiagram
|
||||
requirement ${property} {
|
||||
id: 1
|
||||
text: the test text.
|
||||
risk: high
|
||||
verifymethod: test
|
||||
}`);
|
||||
expect(reqDiagram.parser.yy.getRequirements().size).toBe(1);
|
||||
});
|
||||
|
||||
it(`will accept ${property} as element id`, function () {
|
||||
reqDiagram.parser.parse(`requirementDiagram
|
||||
element ${property} {
|
||||
type: simulation
|
||||
}`);
|
||||
expect(reqDiagram.parser.yy.getElements().size).toBe(1);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -11,9 +11,9 @@ import {
|
||||
|
||||
let relations = [];
|
||||
let latestRequirement = {};
|
||||
let requirements = {};
|
||||
let requirements = new Map();
|
||||
let latestElement = {};
|
||||
let elements = {};
|
||||
let elements = new Map();
|
||||
|
||||
const RequirementType = {
|
||||
REQUIREMENT: 'Requirement',
|
||||
@@ -48,8 +48,8 @@ const Relationships = {
|
||||
};
|
||||
|
||||
const addRequirement = (name, type) => {
|
||||
if (requirements[name] === undefined) {
|
||||
requirements[name] = {
|
||||
if (!requirements.has(name)) {
|
||||
requirements.set(name, {
|
||||
name,
|
||||
type,
|
||||
|
||||
@@ -57,11 +57,11 @@ const addRequirement = (name, type) => {
|
||||
text: latestRequirement.text,
|
||||
risk: latestRequirement.risk,
|
||||
verifyMethod: latestRequirement.verifyMethod,
|
||||
};
|
||||
});
|
||||
}
|
||||
latestRequirement = {};
|
||||
|
||||
return requirements[name];
|
||||
return requirements.get(name);
|
||||
};
|
||||
|
||||
const getRequirements = () => requirements;
|
||||
@@ -91,18 +91,17 @@ const setNewReqVerifyMethod = (verifyMethod) => {
|
||||
};
|
||||
|
||||
const addElement = (name) => {
|
||||
if (elements[name] === undefined) {
|
||||
elements[name] = {
|
||||
if (!elements.has(name)) {
|
||||
elements.set(name, {
|
||||
name,
|
||||
|
||||
type: latestElement.type,
|
||||
docRef: latestElement.docRef,
|
||||
};
|
||||
});
|
||||
log.info('Added new requirement: ', name);
|
||||
}
|
||||
latestElement = {};
|
||||
|
||||
return elements[name];
|
||||
return elements.get(name);
|
||||
};
|
||||
|
||||
const getElements = () => elements;
|
||||
@@ -132,9 +131,9 @@ const getRelationships = () => relations;
|
||||
const clear = () => {
|
||||
relations = [];
|
||||
latestRequirement = {};
|
||||
requirements = {};
|
||||
requirements = new Map();
|
||||
latestElement = {};
|
||||
elements = {};
|
||||
elements = new Map();
|
||||
commonClear();
|
||||
};
|
||||
|
||||
|
||||
@@ -191,9 +191,13 @@ const drawRelationshipFromLayout = function (svg, rel, g, insert, diagObj) {
|
||||
return;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Map<string, any>} reqs
|
||||
* @param graph
|
||||
* @param svgNode
|
||||
*/
|
||||
export const drawReqs = (reqs, graph, svgNode) => {
|
||||
Object.keys(reqs).forEach((reqName) => {
|
||||
let req = reqs[reqName];
|
||||
reqs.forEach((req, reqName) => {
|
||||
reqName = elementString(reqName);
|
||||
log.info('Added new requirement: ', reqName);
|
||||
|
||||
@@ -236,9 +240,13 @@ export const drawReqs = (reqs, graph, svgNode) => {
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Map<string, any>} els
|
||||
* @param graph
|
||||
* @param svgNode
|
||||
*/
|
||||
export const drawElements = (els, graph, svgNode) => {
|
||||
Object.keys(els).forEach((elName) => {
|
||||
let el = els[elName];
|
||||
els.forEach((el, elName) => {
|
||||
const id = elementString(elName);
|
||||
|
||||
const groupNode = svgNode.append('g').attr('id', id);
|
||||
|
||||
@@ -20,5 +20,14 @@ describe('Sankey diagram', function () {
|
||||
|
||||
sankey.parser.parse(graphDefinition);
|
||||
});
|
||||
|
||||
it('allows __proto__ as id', function () {
|
||||
sankey.parser.parse(
|
||||
prepareTextForParsing(`sankey-beta
|
||||
__proto__,A,0.597
|
||||
A,__proto__,0.403
|
||||
`)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -15,12 +15,12 @@ let links: SankeyLink[] = [];
|
||||
// Array of nodes guarantees their order
|
||||
let nodes: SankeyNode[] = [];
|
||||
// We also have to track nodes uniqueness (by ID)
|
||||
let nodesMap: Record<string, SankeyNode> = {};
|
||||
let nodesMap: Map<string, SankeyNode> = new Map();
|
||||
|
||||
const clear = (): void => {
|
||||
links = [];
|
||||
nodes = [];
|
||||
nodesMap = {};
|
||||
nodesMap = new Map();
|
||||
commonClear();
|
||||
};
|
||||
|
||||
@@ -48,11 +48,13 @@ class SankeyNode {
|
||||
const findOrCreateNode = (ID: string): SankeyNode => {
|
||||
ID = common.sanitizeText(ID, getConfig());
|
||||
|
||||
if (!nodesMap[ID]) {
|
||||
nodesMap[ID] = new SankeyNode(ID);
|
||||
nodes.push(nodesMap[ID]);
|
||||
let node = nodesMap.get(ID);
|
||||
if (node === undefined) {
|
||||
node = new SankeyNode(ID);
|
||||
nodesMap.set(ID, node);
|
||||
nodes.push(node);
|
||||
}
|
||||
return nodesMap[ID];
|
||||
return node;
|
||||
};
|
||||
|
||||
const getNodes = () => nodes;
|
||||
|
||||
@@ -15,9 +15,9 @@ import type { Actor, AddMessageParams, Box, Message, Note } from './types.js';
|
||||
|
||||
interface SequenceState {
|
||||
prevActor?: string;
|
||||
actors: Record<string, Actor>;
|
||||
createdActors: Record<string, number>;
|
||||
destroyedActors: Record<string, number>;
|
||||
actors: Map<string, Actor>;
|
||||
createdActors: Map<string, number>;
|
||||
destroyedActors: Map<string, number>;
|
||||
boxes: Box[];
|
||||
messages: Message[];
|
||||
notes: Note[];
|
||||
@@ -30,9 +30,9 @@ interface SequenceState {
|
||||
|
||||
const state = new ImperativeState<SequenceState>(() => ({
|
||||
prevActor: undefined,
|
||||
actors: {},
|
||||
createdActors: {},
|
||||
destroyedActors: {},
|
||||
actors: new Map(),
|
||||
createdActors: new Map(),
|
||||
destroyedActors: new Map(),
|
||||
boxes: [],
|
||||
messages: [],
|
||||
notes: [],
|
||||
@@ -60,7 +60,7 @@ export const addActor = function (
|
||||
type: string
|
||||
) {
|
||||
let assignedBox = state.records.currentBox;
|
||||
const old = state.records.actors[id];
|
||||
const old = state.records.actors.get(id);
|
||||
if (old) {
|
||||
// If already set and trying to set to a new one throw error
|
||||
if (state.records.currentBox && old.box && state.records.currentBox !== old.box) {
|
||||
@@ -87,7 +87,7 @@ export const addActor = function (
|
||||
description = { text: name, wrap: null, type };
|
||||
}
|
||||
|
||||
state.records.actors[id] = {
|
||||
state.records.actors.set(id, {
|
||||
box: assignedBox,
|
||||
name: name,
|
||||
description: description.text,
|
||||
@@ -98,9 +98,12 @@ export const addActor = function (
|
||||
actorCnt: null,
|
||||
rectData: null,
|
||||
type: type ?? 'participant',
|
||||
};
|
||||
if (state.records.prevActor && state.records.actors[state.records.prevActor]) {
|
||||
state.records.actors[state.records.prevActor].nextActor = id;
|
||||
});
|
||||
if (state.records.prevActor) {
|
||||
const prevActorInRecords = state.records.actors.get(state.records.prevActor);
|
||||
if (prevActorInRecords) {
|
||||
prevActorInRecords.nextActor = id;
|
||||
}
|
||||
}
|
||||
|
||||
if (state.records.currentBox) {
|
||||
@@ -207,10 +210,11 @@ export const getDestroyedActors = function () {
|
||||
return state.records.destroyedActors;
|
||||
};
|
||||
export const getActor = function (id: string) {
|
||||
return state.records.actors[id];
|
||||
// TODO: do we ever use this function in a way that it might return undefined?
|
||||
return state.records.actors.get(id)!;
|
||||
};
|
||||
export const getActorKeys = function () {
|
||||
return Object.keys(state.records.actors);
|
||||
return [...state.records.actors.keys()];
|
||||
};
|
||||
export const enableSequenceNumbers = function () {
|
||||
state.records.sequenceNumbersEnabled = true;
|
||||
@@ -499,18 +503,18 @@ export const apply = function (param: any | AddMessageParams | AddMessageParams[
|
||||
addActor(param.actor, param.actor, param.description, param.draw);
|
||||
break;
|
||||
case 'createParticipant':
|
||||
if (state.records.actors[param.actor]) {
|
||||
if (state.records.actors.has(param.actor)) {
|
||||
throw new Error(
|
||||
"It is not possible to have actors with the same id, even if one is destroyed before the next is created. Use 'AS' aliases to simulate the behavior"
|
||||
);
|
||||
}
|
||||
state.records.lastCreated = param.actor;
|
||||
addActor(param.actor, param.actor, param.description, param.draw);
|
||||
state.records.createdActors[param.actor] = state.records.messages.length;
|
||||
state.records.createdActors.set(param.actor, state.records.messages.length);
|
||||
break;
|
||||
case 'destroyParticipant':
|
||||
state.records.lastDestroyed = param.actor;
|
||||
state.records.destroyedActors[param.actor] = state.records.messages.length;
|
||||
state.records.destroyedActors.set(param.actor, state.records.messages.length);
|
||||
break;
|
||||
case 'activeStart':
|
||||
addSignal(param.actor, undefined, undefined, param.signalType);
|
||||
|
||||
@@ -192,8 +192,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -235,8 +235,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
expect(diagram.db.getAccDescription()).toBe('');
|
||||
const messages = diagram.db.getMessages();
|
||||
@@ -258,8 +258,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
expect(diagram.db.getAccDescription()).toBe('');
|
||||
const messages = diagram.db.getMessages();
|
||||
@@ -311,8 +311,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -328,8 +328,8 @@ Bob-->Alice-in-Wonderland:I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors['Alice-in-Wonderland'].description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice-in-Wonderland').description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -348,9 +348,9 @@ Bob-->Alice-in-Wonderland:I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice-in-Wonderland', 'Bob']);
|
||||
expect(actors['Alice-in-Wonderland'].description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect([...actors.keys()]).toEqual(['Alice-in-Wonderland', 'Bob']);
|
||||
expect(actors.get('Alice-in-Wonderland').description).toBe('Alice-in-Wonderland');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -371,9 +371,9 @@ B-->A: I am good thanks!`;
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(Object.keys(actors)).toEqual(['A', 'B']);
|
||||
expect(actors.A.description).toBe('Alice');
|
||||
expect(actors.B.description).toBe('Bob');
|
||||
expect([...actors.keys()]).toEqual(['A', 'B']);
|
||||
expect(actors.get('A').description).toBe('Alice');
|
||||
expect(actors.get('B').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages.length).toBe(2);
|
||||
@@ -396,12 +396,12 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice', 'Bob', 'John', 'Mandy', 'Joan']);
|
||||
expect(actors.Alice.description).toBe('Alice2');
|
||||
expect(actors.Alice.type).toBe('actor');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.John.type).toBe('participant');
|
||||
expect(actors.Joan.type).toBe('participant');
|
||||
expect([...actors.keys()]).toEqual(['Alice', 'Bob', 'John', 'Mandy', 'Joan']);
|
||||
expect(actors.get('Alice').description).toBe('Alice2');
|
||||
expect(actors.get('Alice').type).toBe('actor');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
expect(actors.get('John').type).toBe('participant');
|
||||
expect(actors.get('Joan').type).toBe('participant');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages.length).toBe(5);
|
||||
@@ -419,9 +419,9 @@ B-->A: I am good thanks!`;
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['A', 'B']);
|
||||
expect(actors.A.description).toBe('Alice');
|
||||
expect(actors.B.description).toBe('Bob');
|
||||
expect([...actors.keys()]).toEqual(['A', 'B']);
|
||||
expect(actors.get('A').description).toBe('Alice');
|
||||
expect(actors.get('B').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages.length).toBe(2);
|
||||
@@ -435,8 +435,8 @@ Alice-xBob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -450,8 +450,8 @@ Alice--xBob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -465,8 +465,8 @@ Alice-)Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -480,8 +480,8 @@ Alice--)Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -495,8 +495,8 @@ Alice->>Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -508,8 +508,8 @@ Alice->>Bob:Hello Bob, how are you?`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -526,8 +526,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -547,8 +547,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -571,8 +571,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -624,8 +624,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -645,8 +645,8 @@ deactivate Bob`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -660,8 +660,8 @@ sequenceDiagram;Alice->Bob: Hello Bob, how are you?;Note right of Bob: Bob think
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -680,8 +680,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -700,8 +700,8 @@ Bob-->Alice: I am good thanks!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -725,8 +725,8 @@ Bob-->John: Jolly good!`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -754,10 +754,10 @@ note right of 1: multiline<br \t/>text
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors['1'].description).toBe('multiline<br>text');
|
||||
expect(actors['2'].description).toBe('multiline<br/>text');
|
||||
expect(actors['3'].description).toBe('multiline<br />text');
|
||||
expect(actors['4'].description).toBe('multiline<br \t/>text');
|
||||
expect(actors.get('1').description).toBe('multiline<br>text');
|
||||
expect(actors.get('2').description).toBe('multiline<br/>text');
|
||||
expect(actors.get('3').description).toBe('multiline<br />text');
|
||||
expect(actors.get('4').description).toBe('multiline<br \t/>text');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages[0].message).toBe('multiline<br>text');
|
||||
@@ -893,8 +893,8 @@ end`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -915,8 +915,8 @@ end`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages[1].type).toEqual(diagram.db.LINETYPE.RECT_START);
|
||||
@@ -940,8 +940,8 @@ end`;
|
||||
`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
expect(messages[1].type).toEqual(diagram.db.LINETYPE.RECT_START);
|
||||
@@ -967,8 +967,8 @@ end`;
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -993,8 +993,8 @@ end`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
actors.Bob.description = 'Bob';
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
actors.get('Bob').description = 'Bob';
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -1039,8 +1039,8 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Service.description).toBe('Service');
|
||||
expect(actors.DB.description).toBe('DB');
|
||||
expect(actors.get('Service').description).toBe('Service');
|
||||
expect(actors.get('DB').description).toBe('DB');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -1063,8 +1063,8 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Service.description).toBe('Service');
|
||||
expect(actors.DB.description).toBe('DB');
|
||||
expect(actors.get('Service').description).toBe('Service');
|
||||
expect(actors.get('DB').description).toBe('DB');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -1090,8 +1090,8 @@ sequenceDiagram
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Consumer.description).toBe('Consumer');
|
||||
expect(actors.API.description).toBe('API');
|
||||
expect(actors.get('Consumer').description).toBe('Consumer');
|
||||
expect(actors.get('API').description).toBe('API');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -1120,8 +1120,8 @@ end`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -1142,8 +1142,8 @@ end`;
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
|
||||
expect(actors.Alice.description).toBe('Alice');
|
||||
expect(actors.Bob.description).toBe('Bob');
|
||||
expect(actors.get('Alice').description).toBe('Alice');
|
||||
expect(actors.get('Bob').description).toBe('Bob');
|
||||
|
||||
const messages = diagram.db.getMessages();
|
||||
|
||||
@@ -1309,15 +1309,15 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.a.links['Repo']).toBe('https://repo.contoso.com/');
|
||||
expect(actors.b.links['Repo']).toBe(undefined);
|
||||
expect(actors.a.links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.b.links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.a.links['On-Call']).toBe('https://oncall.contoso.com/?svc=alice');
|
||||
expect(actors.c.links['Dashboard']).toBe(undefined);
|
||||
expect(actors.a.links['Endpoint']).toBe('https://alice.contoso.com');
|
||||
expect(actors.a.links['Swagger']).toBe('https://swagger.contoso.com');
|
||||
expect(actors.a.links['Tests']).toBe('https://tests.contoso.com/?svc=alice@contoso.com');
|
||||
expect(actors.get('a').links['Repo']).toBe('https://repo.contoso.com/');
|
||||
expect(actors.get('b').links['Repo']).toBe(undefined);
|
||||
expect(actors.get('a').links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.get('b').links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.get('a').links['On-Call']).toBe('https://oncall.contoso.com/?svc=alice');
|
||||
expect(actors.get('c').links['Dashboard']).toBe(undefined);
|
||||
expect(actors.get('a').links['Endpoint']).toBe('https://alice.contoso.com');
|
||||
expect(actors.get('a').links['Swagger']).toBe('https://swagger.contoso.com');
|
||||
expect(actors.get('a').links['Tests']).toBe('https://tests.contoso.com/?svc=alice@contoso.com');
|
||||
});
|
||||
|
||||
it('should handle properties EXPERIMENTAL: USE WITH CAUTION', async () => {
|
||||
@@ -1333,11 +1333,11 @@ properties b: {"class": "external-service-actor", "icon": "@computer"}
|
||||
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
expect(actors.a.properties['class']).toBe('internal-service-actor');
|
||||
expect(actors.b.properties['class']).toBe('external-service-actor');
|
||||
expect(actors.a.properties['icon']).toBe('@clock');
|
||||
expect(actors.b.properties['icon']).toBe('@computer');
|
||||
expect(actors.c.properties['class']).toBe(undefined);
|
||||
expect(actors.get('a').properties['class']).toBe('internal-service-actor');
|
||||
expect(actors.get('b').properties['class']).toBe('external-service-actor');
|
||||
expect(actors.get('a').properties['icon']).toBe('@clock');
|
||||
expect(actors.get('b').properties['icon']).toBe('@computer');
|
||||
expect(actors.get('c').properties['class']).toBe(undefined);
|
||||
});
|
||||
|
||||
it('should handle box', async () => {
|
||||
@@ -1423,14 +1423,14 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
await mermaidAPI.parse(str);
|
||||
const actors = diagram.db.getActors();
|
||||
const createdActors = diagram.db.getCreatedActors();
|
||||
expect(actors['c'].name).toEqual('c');
|
||||
expect(actors['c'].description).toEqual('c');
|
||||
expect(actors['c'].type).toEqual('participant');
|
||||
expect(createdActors['c']).toEqual(1);
|
||||
expect(actors['d'].name).toEqual('d');
|
||||
expect(actors['d'].description).toEqual('Donald');
|
||||
expect(actors['d'].type).toEqual('actor');
|
||||
expect(createdActors['d']).toEqual(3);
|
||||
expect(actors.get('c').name).toEqual('c');
|
||||
expect(actors.get('c').description).toEqual('c');
|
||||
expect(actors.get('c').type).toEqual('participant');
|
||||
expect(createdActors.get('c')).toEqual(1);
|
||||
expect(actors.get('d').name).toEqual('d');
|
||||
expect(actors.get('d').description).toEqual('Donald');
|
||||
expect(actors.get('d').type).toEqual('actor');
|
||||
expect(createdActors.get('d')).toEqual(3);
|
||||
});
|
||||
it('should handle simple actor destruction', async () => {
|
||||
const str = `
|
||||
@@ -1445,8 +1445,8 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
`;
|
||||
await mermaidAPI.parse(str);
|
||||
const destroyedActors = diagram.db.getDestroyedActors();
|
||||
expect(destroyedActors['a']).toEqual(1);
|
||||
expect(destroyedActors['c']).toEqual(3);
|
||||
expect(destroyedActors.get('a')).toEqual(1);
|
||||
expect(destroyedActors.get('c')).toEqual(3);
|
||||
});
|
||||
it('should handle the creation and destruction of the same actor', async () => {
|
||||
const str = `
|
||||
@@ -1461,8 +1461,8 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
await mermaidAPI.parse(str);
|
||||
const createdActors = diagram.db.getCreatedActors();
|
||||
const destroyedActors = diagram.db.getDestroyedActors();
|
||||
expect(createdActors['c']).toEqual(1);
|
||||
expect(destroyedActors['c']).toEqual(3);
|
||||
expect(createdActors.get('c')).toEqual(1);
|
||||
expect(destroyedActors.get('c')).toEqual(3);
|
||||
});
|
||||
});
|
||||
describe('when checking the bounds in a sequenceDiagram', function () {
|
||||
@@ -1668,7 +1668,7 @@ participant Alice
|
||||
await mermaidAPI.parse(str);
|
||||
|
||||
const actors = diagram.db.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice']);
|
||||
expect([...actors.keys()]).toEqual(['Alice']);
|
||||
});
|
||||
it('should handle one actor and a centered note', async () => {
|
||||
const str = `
|
||||
@@ -2033,4 +2033,12 @@ participant Alice`;
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it.each(['__proto__', 'constructor'])('should allow %s as an actor name', function (prop) {
|
||||
expect(
|
||||
mermaidAPI.parse(`
|
||||
sequenceDiagram
|
||||
${prop}-->>A: Hello, how are you?`)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -144,7 +144,7 @@ export const bounds = {
|
||||
this.updateBounds(_startx, _starty, _stopx, _stopy);
|
||||
},
|
||||
newActivation: function (message, diagram, actors) {
|
||||
const actorRect = actors[message.from];
|
||||
const actorRect = actors.get(message.from);
|
||||
const stackedSize = actorActivations(message.from).length || 0;
|
||||
const x = actorRect.x + actorRect.width / 2 + ((stackedSize - 1) * conf.activationWidth) / 2;
|
||||
this.activations.push({
|
||||
@@ -488,7 +488,7 @@ const drawMessage = async function (diagram, msgModel, lineStartY: number, diagO
|
||||
const addActorRenderingData = async function (
|
||||
diagram,
|
||||
actors,
|
||||
createdActors,
|
||||
createdActors: Map<string, any>,
|
||||
actorKeys,
|
||||
verticalPos,
|
||||
messages,
|
||||
@@ -500,7 +500,7 @@ const addActorRenderingData = async function (
|
||||
let maxHeight = 0;
|
||||
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
const box = actor.box;
|
||||
|
||||
// end of box
|
||||
@@ -528,7 +528,7 @@ const addActorRenderingData = async function (
|
||||
maxHeight = common.getMax(maxHeight, actor.height);
|
||||
|
||||
// if the actor is created by a message, widen margin
|
||||
if (createdActors[actor.name]) {
|
||||
if (createdActors.get(actor.name)) {
|
||||
prevMargin += actor.width / 2;
|
||||
}
|
||||
|
||||
@@ -558,7 +558,7 @@ const addActorRenderingData = async function (
|
||||
export const drawActors = async function (diagram, actors, actorKeys, isFooter) {
|
||||
if (!isFooter) {
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
// Draw the box with the attached line
|
||||
await svgDraw.drawActor(diagram, actor, conf, false);
|
||||
}
|
||||
@@ -566,7 +566,7 @@ export const drawActors = async function (diagram, actors, actorKeys, isFooter)
|
||||
let maxHeight = 0;
|
||||
bounds.bumpVerticalPos(conf.boxMargin * 2);
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
if (!actor.stopy) {
|
||||
actor.stopy = bounds.getVerticalPos();
|
||||
}
|
||||
@@ -581,7 +581,7 @@ export const drawActorsPopup = function (diagram, actors, actorKeys, doc) {
|
||||
let maxHeight = 0;
|
||||
let maxWidth = 0;
|
||||
for (const actorKey of actorKeys) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
const minMenuWidth = getRequiredPopupWidth(actor);
|
||||
const menuDimensions = svgDraw.drawPopup(
|
||||
diagram,
|
||||
@@ -624,7 +624,7 @@ const actorActivations = function (actor) {
|
||||
|
||||
const activationBounds = function (actor, actors) {
|
||||
// handle multiple stacked activations for same actor
|
||||
const actorObj = actors[actor];
|
||||
const actorObj = actors.get(actor);
|
||||
const activations = actorActivations(actor);
|
||||
|
||||
const left = activations.reduce(
|
||||
@@ -682,7 +682,7 @@ function adjustCreatedDestroyedData(
|
||||
destroyedActors
|
||||
) {
|
||||
function receiverAdjustment(actor, adjustment) {
|
||||
if (actor.x < actors[msg.from].x) {
|
||||
if (actor.x < actors.get(msg.from).x) {
|
||||
bounds.insert(
|
||||
msgModel.stopx - adjustment,
|
||||
msgModel.starty,
|
||||
@@ -702,7 +702,7 @@ function adjustCreatedDestroyedData(
|
||||
}
|
||||
|
||||
function senderAdjustment(actor, adjustment) {
|
||||
if (actor.x < actors[msg.to].x) {
|
||||
if (actor.x < actors.get(msg.to).x) {
|
||||
bounds.insert(
|
||||
msgModel.startx - adjustment,
|
||||
msgModel.starty,
|
||||
@@ -722,16 +722,16 @@ function adjustCreatedDestroyedData(
|
||||
}
|
||||
|
||||
// if it is a create message
|
||||
if (createdActors[msg.to] == index) {
|
||||
const actor = actors[msg.to];
|
||||
if (createdActors.get(msg.to) == index) {
|
||||
const actor = actors.get(msg.to);
|
||||
const adjustment = actor.type == 'actor' ? ACTOR_TYPE_WIDTH / 2 + 3 : actor.width / 2 + 3;
|
||||
receiverAdjustment(actor, adjustment);
|
||||
actor.starty = lineStartY - actor.height / 2;
|
||||
bounds.bumpVerticalPos(actor.height / 2);
|
||||
}
|
||||
// if it is a destroy sender message
|
||||
else if (destroyedActors[msg.from] == index) {
|
||||
const actor = actors[msg.from];
|
||||
else if (destroyedActors.get(msg.from) == index) {
|
||||
const actor = actors.get(msg.from);
|
||||
if (conf.mirrorActors) {
|
||||
const adjustment = actor.type == 'actor' ? ACTOR_TYPE_WIDTH / 2 : actor.width / 2;
|
||||
senderAdjustment(actor, adjustment);
|
||||
@@ -740,8 +740,8 @@ function adjustCreatedDestroyedData(
|
||||
bounds.bumpVerticalPos(actor.height / 2);
|
||||
}
|
||||
// if it is a destroy receiver message
|
||||
else if (destroyedActors[msg.to] == index) {
|
||||
const actor = actors[msg.to];
|
||||
else if (destroyedActors.get(msg.to) == index) {
|
||||
const actor = actors.get(msg.to);
|
||||
if (conf.mirrorActors) {
|
||||
const adjustment = actor.type == 'actor' ? ACTOR_TYPE_WIDTH / 2 + 3 : actor.width / 2 + 3;
|
||||
receiverAdjustment(actor, adjustment);
|
||||
@@ -1132,15 +1132,15 @@ export const draw = async function (_text: string, id: string, _version: string,
|
||||
* @returns The max message width of each actor.
|
||||
*/
|
||||
async function getMaxMessageWidthPerActor(
|
||||
actors: { [id: string]: any },
|
||||
actors: Map<string, any>,
|
||||
messages: any[],
|
||||
diagObj: Diagram
|
||||
): Promise<{ [id: string]: number }> {
|
||||
const maxMessageWidthPerActor = {};
|
||||
|
||||
for (const msg of messages) {
|
||||
if (actors[msg.to] && actors[msg.from]) {
|
||||
const actor = actors[msg.to];
|
||||
if (actors.get(msg.to) && actors.get(msg.from)) {
|
||||
const actor = actors.get(msg.to);
|
||||
|
||||
// If this is the first actor, and the message is left of it, no need to calculate the margin
|
||||
if (msg.placement === diagObj.db.PLACEMENT.LEFTOF && !actor.prevActor) {
|
||||
@@ -1258,13 +1258,13 @@ const getRequiredPopupWidth = function (actor) {
|
||||
* @param boxes - The boxes around the actors if any
|
||||
*/
|
||||
async function calculateActorMargins(
|
||||
actors: { [id: string]: any },
|
||||
actors: Map<string, any>,
|
||||
actorToMessageWidth: Awaited<ReturnType<typeof getMaxMessageWidthPerActor>>,
|
||||
boxes
|
||||
) {
|
||||
let maxHeight = 0;
|
||||
for (const prop of Object.keys(actors)) {
|
||||
const actor = actors[prop];
|
||||
for (const prop of actors.keys()) {
|
||||
const actor = actors.get(prop);
|
||||
if (actor.wrap) {
|
||||
actor.description = utils.wrapLabel(
|
||||
actor.description,
|
||||
@@ -1285,13 +1285,13 @@ async function calculateActorMargins(
|
||||
}
|
||||
|
||||
for (const actorKey in actorToMessageWidth) {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
|
||||
if (!actor) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const nextActor = actors[actor.nextActor];
|
||||
const nextActor = actors.get(actor.nextActor);
|
||||
|
||||
// No need to space out an actor that doesn't have a next link
|
||||
if (!nextActor) {
|
||||
@@ -1311,7 +1311,7 @@ async function calculateActorMargins(
|
||||
boxes.forEach((box) => {
|
||||
const textFont = messageFont(conf);
|
||||
let totalWidth = box.actorKeys.reduce((total, aKey) => {
|
||||
return (total += actors[aKey].width + (actors[aKey].margin || 0));
|
||||
return (total += actors.get(aKey).width + (actors.get(aKey).margin || 0));
|
||||
}, 0);
|
||||
|
||||
totalWidth -= 2 * conf.boxTextMargin;
|
||||
@@ -1334,8 +1334,10 @@ async function calculateActorMargins(
|
||||
}
|
||||
|
||||
const buildNoteModel = async function (msg, actors, diagObj) {
|
||||
const startx = actors[msg.from].x;
|
||||
const stopx = actors[msg.to].x;
|
||||
const fromActor = actors.get(msg.from);
|
||||
const toActor = actors.get(msg.to);
|
||||
const startx = fromActor.x;
|
||||
const stopx = toActor.x;
|
||||
const shouldWrap = msg.wrap && msg.message;
|
||||
|
||||
let textDimensions: { width: number; height: number; lineHeight?: number } = hasKatex(msg.message)
|
||||
@@ -1349,7 +1351,7 @@ const buildNoteModel = async function (msg, actors, diagObj) {
|
||||
? conf.width
|
||||
: common.getMax(conf.width, textDimensions.width + 2 * conf.noteMargin),
|
||||
height: 0,
|
||||
startx: actors[msg.from].x,
|
||||
startx: fromActor.x,
|
||||
stopx: 0,
|
||||
starty: 0,
|
||||
stopy: 0,
|
||||
@@ -1359,45 +1361,36 @@ const buildNoteModel = async function (msg, actors, diagObj) {
|
||||
noteModel.width = shouldWrap
|
||||
? common.getMax(conf.width, textDimensions.width)
|
||||
: common.getMax(
|
||||
actors[msg.from].width / 2 + actors[msg.to].width / 2,
|
||||
fromActor.width / 2 + toActor.width / 2,
|
||||
textDimensions.width + 2 * conf.noteMargin
|
||||
);
|
||||
noteModel.startx = startx + (actors[msg.from].width + conf.actorMargin) / 2;
|
||||
noteModel.startx = startx + (fromActor.width + conf.actorMargin) / 2;
|
||||
} else if (msg.placement === diagObj.db.PLACEMENT.LEFTOF) {
|
||||
noteModel.width = shouldWrap
|
||||
? common.getMax(conf.width, textDimensions.width + 2 * conf.noteMargin)
|
||||
: common.getMax(
|
||||
actors[msg.from].width / 2 + actors[msg.to].width / 2,
|
||||
fromActor.width / 2 + toActor.width / 2,
|
||||
textDimensions.width + 2 * conf.noteMargin
|
||||
);
|
||||
noteModel.startx = startx - noteModel.width + (actors[msg.from].width - conf.actorMargin) / 2;
|
||||
noteModel.startx = startx - noteModel.width + (fromActor.width - conf.actorMargin) / 2;
|
||||
} else if (msg.to === msg.from) {
|
||||
textDimensions = utils.calculateTextDimensions(
|
||||
shouldWrap
|
||||
? utils.wrapLabel(
|
||||
msg.message,
|
||||
common.getMax(conf.width, actors[msg.from].width),
|
||||
noteFont(conf)
|
||||
)
|
||||
? utils.wrapLabel(msg.message, common.getMax(conf.width, fromActor.width), noteFont(conf))
|
||||
: msg.message,
|
||||
noteFont(conf)
|
||||
);
|
||||
noteModel.width = shouldWrap
|
||||
? common.getMax(conf.width, actors[msg.from].width)
|
||||
: common.getMax(
|
||||
actors[msg.from].width,
|
||||
conf.width,
|
||||
textDimensions.width + 2 * conf.noteMargin
|
||||
);
|
||||
noteModel.startx = startx + (actors[msg.from].width - noteModel.width) / 2;
|
||||
? common.getMax(conf.width, fromActor.width)
|
||||
: common.getMax(fromActor.width, conf.width, textDimensions.width + 2 * conf.noteMargin);
|
||||
noteModel.startx = startx + (fromActor.width - noteModel.width) / 2;
|
||||
} else {
|
||||
noteModel.width =
|
||||
Math.abs(startx + actors[msg.from].width / 2 - (stopx + actors[msg.to].width / 2)) +
|
||||
conf.actorMargin;
|
||||
Math.abs(startx + fromActor.width / 2 - (stopx + toActor.width / 2)) + conf.actorMargin;
|
||||
noteModel.startx =
|
||||
startx < stopx
|
||||
? startx + actors[msg.from].width / 2 - conf.actorMargin / 2
|
||||
: stopx + actors[msg.to].width / 2 - conf.actorMargin / 2;
|
||||
? startx + fromActor.width / 2 - conf.actorMargin / 2
|
||||
: stopx + toActor.width / 2 - conf.actorMargin / 2;
|
||||
}
|
||||
if (shouldWrap) {
|
||||
noteModel.message = utils.wrapLabel(
|
||||
@@ -1545,7 +1538,7 @@ const calculateLoopBounds = async function (messages, actors, _maxWidthPerActor,
|
||||
break;
|
||||
case diagObj.db.LINETYPE.ACTIVE_START:
|
||||
{
|
||||
const actorRect = actors[msg.from ? msg.from : msg.to.actor];
|
||||
const actorRect = actors.get(msg.from ? msg.from : msg.to.actor);
|
||||
const stackedSize = actorActivations(msg.from ? msg.from : msg.to.actor).length;
|
||||
const x =
|
||||
actorRect.x + actorRect.width / 2 + ((stackedSize - 1) * conf.activationWidth) / 2;
|
||||
@@ -1585,8 +1578,8 @@ const calculateLoopBounds = async function (messages, actors, _maxWidthPerActor,
|
||||
stack.forEach((stk) => {
|
||||
current = stk;
|
||||
if (msgModel.startx === msgModel.stopx) {
|
||||
const from = actors[msg.from];
|
||||
const to = actors[msg.to];
|
||||
const from = actors.get(msg.from);
|
||||
const to = actors.get(msg.to);
|
||||
current.from = common.getMin(
|
||||
from.x - msgModel.width / 2,
|
||||
from.x - from.width / 2,
|
||||
|
||||
@@ -307,7 +307,7 @@ export const fixLifeLineHeights = (diagram, actors, actorKeys, conf) => {
|
||||
return;
|
||||
}
|
||||
actorKeys.forEach((actorKey) => {
|
||||
const actor = actors[actorKey];
|
||||
const actor = actors.get(actorKey);
|
||||
const actorDOM = diagram.select('#actor' + actor.actorCnt);
|
||||
if (!conf.mirrorActors && actor.stopy) {
|
||||
actorDOM.attr('y2', actor.stopy + actor.height / 2);
|
||||
|
||||
394
packages/mermaid/src/diagrams/state/dataFetcher.js
Normal file
394
packages/mermaid/src/diagrams/state/dataFetcher.js
Normal file
@@ -0,0 +1,394 @@
|
||||
import { getConfig } from '../../diagram-api/diagramAPI.js';
|
||||
import { log } from '../../logger.js';
|
||||
import common from '../common/common.js';
|
||||
import {
|
||||
CSS_DIAGRAM_CLUSTER,
|
||||
CSS_DIAGRAM_CLUSTER_ALT,
|
||||
CSS_DIAGRAM_NOTE,
|
||||
CSS_DIAGRAM_STATE,
|
||||
CSS_EDGE,
|
||||
CSS_EDGE_NOTE_EDGE,
|
||||
DEFAULT_NESTED_DOC_DIR,
|
||||
DEFAULT_STATE_TYPE,
|
||||
DIVIDER_TYPE,
|
||||
DOMID_STATE,
|
||||
DOMID_TYPE_SPACER,
|
||||
G_EDGE_ARROWHEADSTYLE,
|
||||
G_EDGE_LABELPOS,
|
||||
G_EDGE_LABELTYPE,
|
||||
G_EDGE_STYLE,
|
||||
G_EDGE_THICKNESS,
|
||||
NOTE,
|
||||
NOTE_ID,
|
||||
PARENT,
|
||||
PARENT_ID,
|
||||
SHAPE_DIVIDER,
|
||||
SHAPE_END,
|
||||
SHAPE_GROUP,
|
||||
SHAPE_NOTE,
|
||||
SHAPE_NOTEGROUP,
|
||||
SHAPE_START,
|
||||
SHAPE_STATE,
|
||||
SHAPE_STATE_WITH_DESC,
|
||||
STMT_RELATION,
|
||||
STMT_STATE,
|
||||
} from './stateCommon.js';
|
||||
|
||||
// List of nodes created from the parsed diagram statement items
|
||||
let nodeDb = {};
|
||||
|
||||
let graphItemCount = 0; // used to construct ids, etc.
|
||||
|
||||
/**
|
||||
* Create a standard string for the dom ID of an item.
|
||||
* If a type is given, insert that before the counter, preceded by the type spacer
|
||||
*
|
||||
* @param itemId
|
||||
* @param counter
|
||||
* @param {string | null} type
|
||||
* @param typeSpacer
|
||||
* @returns {string}
|
||||
*/
|
||||
export function stateDomId(itemId = '', counter = 0, type = '', typeSpacer = DOMID_TYPE_SPACER) {
|
||||
const typeStr = type !== null && type.length > 0 ? `${typeSpacer}${type}` : '';
|
||||
return `${DOMID_STATE}-${itemId}${typeStr}-${counter}`;
|
||||
}
|
||||
|
||||
const setupDoc = (parentParsedItem, doc, diagramStates, nodes, edges, altFlag, useRough) => {
|
||||
// graphItemCount = 0;
|
||||
log.trace('items', doc);
|
||||
doc.forEach((item) => {
|
||||
switch (item.stmt) {
|
||||
case STMT_STATE:
|
||||
dataFetcher(parentParsedItem, item, diagramStates, nodes, edges, altFlag, useRough);
|
||||
break;
|
||||
case DEFAULT_STATE_TYPE:
|
||||
dataFetcher(parentParsedItem, item, diagramStates, nodes, edges, altFlag, useRough);
|
||||
break;
|
||||
case STMT_RELATION:
|
||||
{
|
||||
dataFetcher(
|
||||
parentParsedItem,
|
||||
item.state1,
|
||||
diagramStates,
|
||||
nodes,
|
||||
edges,
|
||||
altFlag,
|
||||
useRough
|
||||
);
|
||||
dataFetcher(
|
||||
parentParsedItem,
|
||||
item.state2,
|
||||
diagramStates,
|
||||
nodes,
|
||||
edges,
|
||||
altFlag,
|
||||
useRough
|
||||
);
|
||||
const edgeData = {
|
||||
id: 'edge' + graphItemCount,
|
||||
start: item.state1.id,
|
||||
end: item.state2.id,
|
||||
arrowhead: 'normal',
|
||||
arrowTypeEnd: 'arrow_barb',
|
||||
style: G_EDGE_STYLE,
|
||||
labelStyle: '',
|
||||
label: common.sanitizeText(item.description, getConfig()),
|
||||
arrowheadStyle: G_EDGE_ARROWHEADSTYLE,
|
||||
labelpos: G_EDGE_LABELPOS,
|
||||
labelType: G_EDGE_LABELTYPE,
|
||||
thickness: G_EDGE_THICKNESS,
|
||||
classes: CSS_EDGE,
|
||||
useRough,
|
||||
};
|
||||
edges.push(edgeData);
|
||||
//g.setEdge(item.state1.id, item.state2.id, edgeData, graphItemCount);
|
||||
graphItemCount++;
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the direction from the statement items.
|
||||
* Look through all of the documents (docs) in the parsedItems
|
||||
* Because is a _document_ direction, the default direction is not necessarily the same as the overall default _diagram_ direction.
|
||||
* @param {object[]} parsedItem - the parsed statement item to look through
|
||||
* @param [defaultDir] - the direction to use if none is found
|
||||
* @returns {string}
|
||||
*/
|
||||
const getDir = (parsedItem, defaultDir = DEFAULT_NESTED_DOC_DIR) => {
|
||||
let dir = defaultDir;
|
||||
if (parsedItem.doc) {
|
||||
for (let i = 0; i < parsedItem.doc.length; i++) {
|
||||
const parsedItemDoc = parsedItem.doc[i];
|
||||
if (parsedItemDoc.stmt === 'dir') {
|
||||
dir = parsedItemDoc.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return dir;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns a new list of classes.
|
||||
* In the future, this can be replaced with a class common to all diagrams.
|
||||
* ClassDef information = { id: id, styles: [], textStyles: [] }
|
||||
*
|
||||
* @returns {{}}
|
||||
*/
|
||||
function newClassesList() {
|
||||
return {};
|
||||
}
|
||||
|
||||
// let direction = DEFAULT_DIAGRAM_DIRECTION;
|
||||
// let rootDoc = [];
|
||||
let cssClasses = newClassesList(); // style classes defined by a classDef
|
||||
|
||||
/**
|
||||
*
|
||||
* @param nodes
|
||||
* @param nodeData
|
||||
*/
|
||||
function insertOrUpdateNode(nodes, nodeData) {
|
||||
if (!nodeData.id || nodeData.id === '</join></fork>' || nodeData.id === '</choice>') {
|
||||
return;
|
||||
}
|
||||
|
||||
//Populate node style attributes if nodeData has classes defined
|
||||
if (nodeData.cssClasses) {
|
||||
nodeData.cssClasses.split(' ').forEach((cssClass) => {
|
||||
if (cssClasses[cssClass]) {
|
||||
cssClasses[cssClass].styles.forEach((style) => {
|
||||
// Populate nodeData with style attributes specifically to be used by rough.js
|
||||
if (style && style.startsWith('fill:')) {
|
||||
nodeData.backgroundColor = style.replace('fill:', '');
|
||||
}
|
||||
if (style && style.startsWith('stroke:')) {
|
||||
nodeData.borderColor = style.replace('stroke:', '');
|
||||
}
|
||||
if (style && style.startsWith('stroke-width:')) {
|
||||
nodeData.borderWidth = style.replace('stroke-width:', '');
|
||||
}
|
||||
|
||||
nodeData.cssStyles += style + ';';
|
||||
});
|
||||
cssClasses[cssClass].textStyles.forEach((style) => {
|
||||
nodeData.labelStyle += style + ';';
|
||||
if (style && style.startsWith('fill:')) {
|
||||
nodeData.labelTextColor = style.replace('fill:', '');
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
const existingNodeData = nodes.find((node) => node.id === nodeData.id);
|
||||
if (existingNodeData) {
|
||||
//update the existing nodeData
|
||||
Object.assign(existingNodeData, nodeData);
|
||||
} else {
|
||||
nodes.push(nodeData);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get classes from the db for the info item.
|
||||
* If there aren't any or if dbInfoItem isn't defined, return an empty string.
|
||||
* Else create 1 string from the list of classes found
|
||||
*
|
||||
* @param {undefined | null | object} dbInfoItem
|
||||
* @returns {string}
|
||||
*/
|
||||
function getClassesFromDbInfo(dbInfoItem) {
|
||||
if (dbInfoItem === undefined || dbInfoItem === null) {
|
||||
return '';
|
||||
} else {
|
||||
if (dbInfoItem.cssClasses) {
|
||||
return dbInfoItem.cssClasses.join(' ');
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
export const dataFetcher = (parent, parsedItem, diagramStates, nodes, edges, altFlag, useRough) => {
|
||||
const itemId = parsedItem.id;
|
||||
const classStr = getClassesFromDbInfo(diagramStates[itemId]);
|
||||
|
||||
if (itemId !== 'root') {
|
||||
let shape = SHAPE_STATE;
|
||||
if (parsedItem.start === true) {
|
||||
shape = SHAPE_START;
|
||||
}
|
||||
if (parsedItem.start === false) {
|
||||
shape = SHAPE_END;
|
||||
}
|
||||
if (parsedItem.type !== DEFAULT_STATE_TYPE) {
|
||||
shape = parsedItem.type;
|
||||
}
|
||||
|
||||
// Add the node to our list (nodeDb)
|
||||
if (!nodeDb[itemId]) {
|
||||
nodeDb[itemId] = {
|
||||
id: itemId,
|
||||
shape,
|
||||
description: common.sanitizeText(itemId, getConfig()),
|
||||
cssClasses: `${classStr} ${CSS_DIAGRAM_STATE}`,
|
||||
};
|
||||
}
|
||||
|
||||
const newNode = nodeDb[itemId];
|
||||
|
||||
// Save data for description and group so that for instance a statement without description overwrites
|
||||
// one with description @todo TODO What does this mean? If important, add a test for it
|
||||
|
||||
// Build of the array of description strings
|
||||
if (parsedItem.description) {
|
||||
if (Array.isArray(newNode.description)) {
|
||||
// There already is an array of strings,add to it
|
||||
newNode.shape = SHAPE_STATE_WITH_DESC;
|
||||
newNode.description.push(parsedItem.description);
|
||||
} else {
|
||||
if (newNode.description?.length > 0) {
|
||||
// if there is a description already transform it to an array
|
||||
newNode.shape = SHAPE_STATE_WITH_DESC;
|
||||
if (newNode.description === itemId) {
|
||||
// If the previous description was this, remove it
|
||||
newNode.description = [parsedItem.description];
|
||||
} else {
|
||||
newNode.description = [newNode.description, parsedItem.description];
|
||||
}
|
||||
} else {
|
||||
newNode.shape = SHAPE_STATE;
|
||||
newNode.description = parsedItem.description;
|
||||
}
|
||||
}
|
||||
newNode.description = common.sanitizeTextOrArray(newNode.description, getConfig());
|
||||
}
|
||||
|
||||
// If there's only 1 description entry, just use a regular state shape
|
||||
if (newNode.description?.length === 1 && newNode.shape === SHAPE_STATE_WITH_DESC) {
|
||||
newNode.shape = SHAPE_STATE;
|
||||
}
|
||||
|
||||
// group
|
||||
if (!newNode.type && parsedItem.doc) {
|
||||
log.info('Setting cluster for XCX', itemId, getDir(parsedItem));
|
||||
newNode.type = 'group';
|
||||
newNode.isGroup = true;
|
||||
newNode.dir = getDir(parsedItem);
|
||||
newNode.shape = parsedItem.type === DIVIDER_TYPE ? SHAPE_DIVIDER : SHAPE_GROUP;
|
||||
newNode.cssClasses =
|
||||
newNode.cssClasses +
|
||||
' ' +
|
||||
CSS_DIAGRAM_CLUSTER +
|
||||
' ' +
|
||||
(altFlag ? CSS_DIAGRAM_CLUSTER_ALT : '');
|
||||
}
|
||||
|
||||
// This is what will be added to the graph
|
||||
const nodeData = {
|
||||
labelStyle: '',
|
||||
shape: newNode.shape,
|
||||
label: newNode.description,
|
||||
cssClasses: newNode.cssClasses,
|
||||
cssStyles: '',
|
||||
id: itemId,
|
||||
dir: newNode.dir,
|
||||
domId: stateDomId(itemId, graphItemCount),
|
||||
type: newNode.type,
|
||||
isGroup: newNode.type === 'group',
|
||||
padding: 8,
|
||||
rx: 10,
|
||||
ry: 10,
|
||||
useRough,
|
||||
};
|
||||
|
||||
// Clear the label for dividers who have no description
|
||||
if (nodeData.shape === SHAPE_DIVIDER) {
|
||||
nodeData.label = '';
|
||||
}
|
||||
|
||||
if (parent && parent.id !== 'root') {
|
||||
log.trace('Setting node ', itemId, ' to be child of its parent ', parent.id);
|
||||
nodeData.parentId = parent.id;
|
||||
}
|
||||
|
||||
nodeData.centerLabel = true;
|
||||
|
||||
if (parsedItem.note) {
|
||||
// Todo: set random id
|
||||
const noteData = {
|
||||
labelStyle: '',
|
||||
shape: SHAPE_NOTE,
|
||||
label: parsedItem.note.text,
|
||||
cssClasses: CSS_DIAGRAM_NOTE,
|
||||
// useHtmlLabels: false,
|
||||
cssStyles: '', // styles.style,
|
||||
id: itemId + NOTE_ID + '-' + graphItemCount,
|
||||
domId: stateDomId(itemId, graphItemCount, NOTE),
|
||||
type: newNode.type,
|
||||
isGroup: newNode.type === 'group',
|
||||
padding: 0, //getConfig().flowchart.padding
|
||||
useRough,
|
||||
};
|
||||
const groupData = {
|
||||
labelStyle: '',
|
||||
shape: SHAPE_NOTEGROUP,
|
||||
label: parsedItem.note.text,
|
||||
cssClasses: newNode.cssClasses,
|
||||
cssStyles: '', // styles.style,
|
||||
id: itemId + PARENT_ID,
|
||||
domId: stateDomId(itemId, graphItemCount, PARENT),
|
||||
type: 'group',
|
||||
isGroup: true,
|
||||
padding: 16, //getConfig().flowchart.padding
|
||||
useRough,
|
||||
};
|
||||
graphItemCount++;
|
||||
|
||||
const parentNodeId = itemId + PARENT_ID;
|
||||
|
||||
//add parent id to groupData
|
||||
groupData.id = parentNodeId;
|
||||
//add parent id to noteData
|
||||
noteData.parentId = parentNodeId;
|
||||
|
||||
//insert groupData
|
||||
insertOrUpdateNode(nodes, groupData);
|
||||
//insert noteData
|
||||
insertOrUpdateNode(nodes, noteData);
|
||||
//insert nodeData
|
||||
insertOrUpdateNode(nodes, nodeData);
|
||||
|
||||
let from = itemId;
|
||||
let to = noteData.id;
|
||||
|
||||
if (parsedItem.note.position === 'left of') {
|
||||
from = noteData.id;
|
||||
to = itemId;
|
||||
}
|
||||
|
||||
edges.push({
|
||||
id: from + '-' + to,
|
||||
start: from,
|
||||
end: to,
|
||||
arrowhead: 'none',
|
||||
arrowTypeEnd: '',
|
||||
style: G_EDGE_STYLE,
|
||||
labelStyle: '',
|
||||
classes: CSS_EDGE_NOTE_EDGE,
|
||||
arrowheadStyle: G_EDGE_ARROWHEADSTYLE,
|
||||
labelpos: G_EDGE_LABELPOS,
|
||||
labelType: G_EDGE_LABELTYPE,
|
||||
thickness: G_EDGE_THICKNESS,
|
||||
useRough,
|
||||
});
|
||||
} else {
|
||||
insertOrUpdateNode(nodes, nodeData);
|
||||
}
|
||||
}
|
||||
if (parsedItem.doc) {
|
||||
log.trace('Adding nodes children ');
|
||||
setupDoc(parsedItem, parsedItem.doc, diagramStates, nodes, edges, !altFlag, useRough);
|
||||
}
|
||||
};
|
||||
@@ -21,8 +21,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['namedState1'].descriptions.join(' ')).toEqual('Small State 1');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('namedState1').descriptions.join(' ')).toEqual('Small State 1');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -34,8 +34,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['namedState1'].descriptions.join(' ')).toEqual('Small State 1');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('namedState1').descriptions.join(' ')).toEqual('Small State 1');
|
||||
});
|
||||
|
||||
it('no spaces before and after the colon', () => {
|
||||
@@ -45,8 +45,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['namedState1'].descriptions.join(' ')).toEqual('Small State 1');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('namedState1').descriptions.join(' ')).toEqual('Small State 1');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -62,8 +62,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.parse(diagramText);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['assemble']).not.toBeUndefined();
|
||||
expect(states['assemblies']).not.toBeUndefined();
|
||||
expect(states.get('assemble')).not.toBeUndefined();
|
||||
expect(states.get('assemblies')).not.toBeUndefined();
|
||||
});
|
||||
|
||||
it('state "as" as as', function () {
|
||||
@@ -73,8 +73,8 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.parse(diagramText);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['as']).not.toBeUndefined();
|
||||
expect(states['as'].descriptions.join(' ')).toEqual('as');
|
||||
expect(states.get('as')).not.toBeUndefined();
|
||||
expect(states.get('as').descriptions.join(' ')).toEqual('as');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -99,12 +99,12 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['namedState1']).not.toBeUndefined();
|
||||
expect(states['bigState1']).not.toBeUndefined();
|
||||
expect(states['bigState1'].doc[0].id).toEqual('bigState1InternalState');
|
||||
expect(states['namedState2']).not.toBeUndefined();
|
||||
expect(states['bigState2']).not.toBeUndefined();
|
||||
expect(states['bigState2'].doc[0].id).toEqual('bigState2InternalState');
|
||||
expect(states.get('namedState1')).not.toBeUndefined();
|
||||
expect(states.get('bigState1')).not.toBeUndefined();
|
||||
expect(states.get('bigState1').doc[0].id).toEqual('bigState1InternalState');
|
||||
expect(states.get('namedState2')).not.toBeUndefined();
|
||||
expect(states.get('bigState2')).not.toBeUndefined();
|
||||
expect(states.get('bigState2').doc[0].id).toEqual('bigState2InternalState');
|
||||
const relationships = stateDiagram.parser.yy.getRelations();
|
||||
expect(relationships[0].id1).toEqual('namedState1');
|
||||
expect(relationships[0].id2).toEqual('bigState1');
|
||||
@@ -123,11 +123,23 @@ describe('state parser can parse...', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states['bigState1']).not.toBeUndefined();
|
||||
expect(states['bigState1'].doc[0].id).toEqual('inner1');
|
||||
expect(states['bigState1'].doc[0].description).toEqual('inner state 1');
|
||||
expect(states['bigState1'].doc[1].id).toEqual('inner2');
|
||||
expect(states['bigState1'].doc[1].description).toEqual('inner state 2');
|
||||
expect(states.get('bigState1')).not.toBeUndefined();
|
||||
expect(states.get('bigState1').doc[0].id).toEqual('inner1');
|
||||
expect(states.get('bigState1').doc[0].description).toEqual('inner state 1');
|
||||
expect(states.get('bigState1').doc[1].id).toEqual('inner2');
|
||||
expect(states.get('bigState1').doc[1].description).toEqual('inner state 2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('unsafe properties as state names', () => {
|
||||
it.each(['__proto__', 'constructor'])('should allow %s as a state name', function (prop) {
|
||||
stateDiagram.parser.parse(`
|
||||
stateDiagram-v2
|
||||
[*] --> ${prop}
|
||||
${prop} --> [*]`);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
expect(states.get(prop)).not.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -19,8 +19,8 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const styleClasses = stateDb.getClasses();
|
||||
expect(styleClasses['exampleClass'].styles.length).toEqual(1);
|
||||
expect(styleClasses['exampleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(styleClasses.get('exampleClass').styles.length).toEqual(1);
|
||||
expect(styleClasses.get('exampleClass').styles[0]).toEqual('background:#bbb');
|
||||
});
|
||||
|
||||
it('can define multiple attributes separated by commas', function () {
|
||||
@@ -30,10 +30,10 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const styleClasses = stateDb.getClasses();
|
||||
expect(styleClasses['exampleClass'].styles.length).toEqual(3);
|
||||
expect(styleClasses['exampleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(styleClasses['exampleClass'].styles[1]).toEqual('font-weight:bold');
|
||||
expect(styleClasses['exampleClass'].styles[2]).toEqual('font-style:italic');
|
||||
expect(styleClasses.get('exampleClass').styles.length).toEqual(3);
|
||||
expect(styleClasses.get('exampleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(styleClasses.get('exampleClass').styles[1]).toEqual('font-weight:bold');
|
||||
expect(styleClasses.get('exampleClass').styles[2]).toEqual('font-style:italic');
|
||||
});
|
||||
|
||||
// need to look at what the lexer is doing
|
||||
@@ -44,9 +44,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toBe(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toBe(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
|
||||
it('an attribute can have a space in the style', function () {
|
||||
@@ -56,9 +56,19 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toBe(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toBe('background: #bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toBe(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toBe('background: #bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
|
||||
it('can have __proto__ or constructor as a class name', function () {
|
||||
stateDiagram.parser.parse(
|
||||
'stateDiagram-v2\n classDef __proto__ background:#bbb,border:1.5px solid red;\n classDef constructor background:#bbb,border:1.5px solid red;'
|
||||
);
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes.get('__proto__').styles.length).toBe(2);
|
||||
expect(classes.get('constructor').styles.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -74,9 +84,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDb.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
const state_a = stateDb.getState('a');
|
||||
expect(state_a.classes.length).toEqual(1);
|
||||
@@ -95,9 +105,9 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
expect(classes['exampleStyleClass'].styles.length).toBe(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toBe('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toBe(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toBe('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toBe('border:1px solid red');
|
||||
|
||||
const state_a_a = stateDiagram.parser.yy.getState('a_a');
|
||||
expect(state_a_a.classes.length).toEqual(1);
|
||||
@@ -117,11 +127,11 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
expect(states['b'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get('b').classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
|
||||
it('can be applied to a [*] state', () => {
|
||||
@@ -136,11 +146,11 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
expect(states['root_start'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get('root_start').classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
|
||||
it('can be applied to a comma separated list of states', function () {
|
||||
@@ -155,11 +165,11 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
let classes = stateDiagram.parser.yy.getClasses();
|
||||
let states = stateDiagram.parser.yy.getStates();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(states['a'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states['b'].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
expect(states.get('a').classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get('b').classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
|
||||
it('a comma separated list of states may or may not have spaces after commas', function () {
|
||||
@@ -174,13 +184,13 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
const classes = stateDiagram.parser.yy.getClasses();
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
|
||||
expect(classes['exampleStyleClass'].styles.length).toEqual(2);
|
||||
expect(classes['exampleStyleClass'].styles[0]).toEqual('background:#bbb');
|
||||
expect(classes['exampleStyleClass'].styles[1]).toEqual('border:1px solid red');
|
||||
expect(classes.get('exampleStyleClass').styles.length).toEqual(2);
|
||||
expect(classes.get('exampleStyleClass').styles[0]).toEqual('background:#bbb');
|
||||
expect(classes.get('exampleStyleClass').styles[1]).toEqual('border:1px solid red');
|
||||
|
||||
const statesList = ['a', 'b', 'c', 'd', 'e'];
|
||||
statesList.forEach((stateId) => {
|
||||
expect(states[stateId].classes[0]).toEqual('exampleStyleClass');
|
||||
expect(states.get(stateId).classes[0]).toEqual('exampleStyleClass');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -202,7 +212,7 @@ describe('ClassDefs and classes when parsing a State diagram', () => {
|
||||
|
||||
const states = stateDiagram.parser.yy.getStates();
|
||||
|
||||
expect(states['Moving'].doc.length).toEqual(1);
|
||||
expect(states.get('Moving').doc.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -20,6 +20,44 @@ export const STMT_APPLYCLASS = 'applyClass';
|
||||
export const DEFAULT_STATE_TYPE = 'default';
|
||||
export const DIVIDER_TYPE = 'divider';
|
||||
|
||||
// Graph edge settings
|
||||
export const G_EDGE_STYLE = 'fill:none';
|
||||
export const G_EDGE_ARROWHEADSTYLE = 'fill: #333';
|
||||
export const G_EDGE_LABELPOS = 'c';
|
||||
export const G_EDGE_LABELTYPE = 'text';
|
||||
export const G_EDGE_THICKNESS = 'normal';
|
||||
|
||||
export const SHAPE_STATE = 'rect';
|
||||
export const SHAPE_STATE_WITH_DESC = 'rectWithTitle';
|
||||
export const SHAPE_START = 'stateStart';
|
||||
export const SHAPE_END = 'stateEnd';
|
||||
export const SHAPE_DIVIDER = 'divider';
|
||||
export const SHAPE_GROUP = 'roundedWithTitle';
|
||||
export const SHAPE_NOTE = 'note';
|
||||
export const SHAPE_NOTEGROUP = 'noteGroup';
|
||||
|
||||
// CSS classes
|
||||
export const CSS_DIAGRAM = 'statediagram';
|
||||
export const CSS_STATE = 'state';
|
||||
export const CSS_DIAGRAM_STATE = `${CSS_DIAGRAM}-${CSS_STATE}`;
|
||||
export const CSS_EDGE = 'transition';
|
||||
export const CSS_NOTE = 'note';
|
||||
export const CSS_NOTE_EDGE = 'note-edge';
|
||||
export const CSS_EDGE_NOTE_EDGE = `${CSS_EDGE} ${CSS_NOTE_EDGE}`;
|
||||
export const CSS_DIAGRAM_NOTE = `${CSS_DIAGRAM}-${CSS_NOTE}`;
|
||||
export const CSS_CLUSTER = 'cluster';
|
||||
export const CSS_DIAGRAM_CLUSTER = `${CSS_DIAGRAM}-${CSS_CLUSTER}`;
|
||||
export const CSS_CLUSTER_ALT = 'cluster-alt';
|
||||
export const CSS_DIAGRAM_CLUSTER_ALT = `${CSS_DIAGRAM}-${CSS_CLUSTER_ALT}`;
|
||||
|
||||
export const PARENT = 'parent';
|
||||
export const NOTE = 'note';
|
||||
export const DOMID_STATE = 'state';
|
||||
export const DOMID_TYPE_SPACER = '----';
|
||||
export const NOTE_ID = `${DOMID_TYPE_SPACER}${NOTE}`;
|
||||
export const PARENT_ID = `${DOMID_TYPE_SPACER}${PARENT}`;
|
||||
// --------------------------------------
|
||||
|
||||
export default {
|
||||
DEFAULT_DIAGRAM_DIRECTION,
|
||||
DEFAULT_NESTED_DOC_DIR,
|
||||
@@ -29,4 +67,35 @@ export default {
|
||||
STMT_APPLYCLASS,
|
||||
DEFAULT_STATE_TYPE,
|
||||
DIVIDER_TYPE,
|
||||
G_EDGE_STYLE,
|
||||
G_EDGE_ARROWHEADSTYLE,
|
||||
G_EDGE_LABELPOS,
|
||||
G_EDGE_LABELTYPE,
|
||||
G_EDGE_THICKNESS,
|
||||
CSS_EDGE,
|
||||
CSS_DIAGRAM,
|
||||
SHAPE_STATE,
|
||||
SHAPE_STATE_WITH_DESC,
|
||||
SHAPE_START,
|
||||
SHAPE_END,
|
||||
SHAPE_DIVIDER,
|
||||
SHAPE_GROUP,
|
||||
SHAPE_NOTE,
|
||||
SHAPE_NOTEGROUP,
|
||||
CSS_STATE,
|
||||
CSS_DIAGRAM_STATE,
|
||||
CSS_NOTE,
|
||||
CSS_NOTE_EDGE,
|
||||
CSS_EDGE_NOTE_EDGE,
|
||||
CSS_DIAGRAM_NOTE,
|
||||
CSS_CLUSTER,
|
||||
CSS_DIAGRAM_CLUSTER,
|
||||
CSS_CLUSTER_ALT,
|
||||
CSS_DIAGRAM_CLUSTER_ALT,
|
||||
PARENT,
|
||||
NOTE,
|
||||
DOMID_STATE,
|
||||
DOMID_TYPE_SPACER,
|
||||
NOTE_ID,
|
||||
PARENT_ID,
|
||||
};
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
setDiagramTitle,
|
||||
getDiagramTitle,
|
||||
} from '../common/commonDb.js';
|
||||
import { dataFetcher } from './dataFetcher.js';
|
||||
|
||||
import {
|
||||
DEFAULT_DIAGRAM_DIRECTION,
|
||||
@@ -20,7 +21,34 @@ import {
|
||||
STMT_APPLYCLASS,
|
||||
DEFAULT_STATE_TYPE,
|
||||
DIVIDER_TYPE,
|
||||
G_EDGE_STYLE,
|
||||
G_EDGE_ARROWHEADSTYLE,
|
||||
G_EDGE_LABELPOS,
|
||||
G_EDGE_LABELTYPE,
|
||||
G_EDGE_THICKNESS,
|
||||
CSS_EDGE,
|
||||
DEFAULT_NESTED_DOC_DIR,
|
||||
SHAPE_DIVIDER,
|
||||
SHAPE_GROUP,
|
||||
CSS_DIAGRAM_CLUSTER,
|
||||
CSS_DIAGRAM_CLUSTER_ALT,
|
||||
CSS_DIAGRAM_STATE,
|
||||
SHAPE_STATE_WITH_DESC,
|
||||
SHAPE_STATE,
|
||||
SHAPE_START,
|
||||
SHAPE_END,
|
||||
SHAPE_NOTE,
|
||||
SHAPE_NOTEGROUP,
|
||||
CSS_DIAGRAM_NOTE,
|
||||
DOMID_TYPE_SPACER,
|
||||
DOMID_STATE,
|
||||
NOTE_ID,
|
||||
PARENT_ID,
|
||||
NOTE,
|
||||
PARENT,
|
||||
CSS_EDGE_NOTE_EDGE,
|
||||
} from './stateCommon.js';
|
||||
import { node } from 'stylis';
|
||||
|
||||
const START_NODE = '[*]';
|
||||
const START_TYPE = 'start';
|
||||
@@ -37,20 +65,23 @@ const STYLECLASS_SEP = ',';
|
||||
* In the future, this can be replaced with a class common to all diagrams.
|
||||
* ClassDef information = { id: id, styles: [], textStyles: [] }
|
||||
*
|
||||
* @returns {{}}
|
||||
* @returns {Map<string, any>}
|
||||
*/
|
||||
function newClassesList() {
|
||||
return {};
|
||||
return new Map();
|
||||
}
|
||||
|
||||
let direction = DEFAULT_DIAGRAM_DIRECTION;
|
||||
let rootDoc = [];
|
||||
let classes = newClassesList(); // style classes defined by a classDef
|
||||
|
||||
// --------------------------------------
|
||||
|
||||
const newDoc = () => {
|
||||
return {
|
||||
/** @type {{ id1: string, id2: string, relationTitle: string }[]} */
|
||||
relations: [],
|
||||
states: {},
|
||||
states: new Map(),
|
||||
documents: {},
|
||||
};
|
||||
};
|
||||
@@ -217,9 +248,9 @@ export const addState = function (
|
||||
) {
|
||||
const trimmedId = id?.trim();
|
||||
// add the state if needed
|
||||
if (currentDocument.states[trimmedId] === undefined) {
|
||||
if (!currentDocument.states.has(trimmedId)) {
|
||||
log.info('Adding state ', trimmedId, descr);
|
||||
currentDocument.states[trimmedId] = {
|
||||
currentDocument.states.set(trimmedId, {
|
||||
id: trimmedId,
|
||||
descriptions: [],
|
||||
type,
|
||||
@@ -228,13 +259,13 @@ export const addState = function (
|
||||
classes: [],
|
||||
styles: [],
|
||||
textStyles: [],
|
||||
};
|
||||
});
|
||||
} else {
|
||||
if (!currentDocument.states[trimmedId].doc) {
|
||||
currentDocument.states[trimmedId].doc = doc;
|
||||
if (!currentDocument.states.get(trimmedId).doc) {
|
||||
currentDocument.states.get(trimmedId).doc = doc;
|
||||
}
|
||||
if (!currentDocument.states[trimmedId].type) {
|
||||
currentDocument.states[trimmedId].type = type;
|
||||
if (!currentDocument.states.get(trimmedId).type) {
|
||||
currentDocument.states.get(trimmedId).type = type;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -250,11 +281,9 @@ export const addState = function (
|
||||
}
|
||||
|
||||
if (note) {
|
||||
currentDocument.states[trimmedId].note = note;
|
||||
currentDocument.states[trimmedId].note.text = common.sanitizeText(
|
||||
currentDocument.states[trimmedId].note.text,
|
||||
getConfig()
|
||||
);
|
||||
const doc2 = currentDocument.states.get(trimmedId);
|
||||
doc2.note = note;
|
||||
doc2.note.text = common.sanitizeText(doc2.note.text, getConfig());
|
||||
}
|
||||
|
||||
if (classes) {
|
||||
@@ -291,7 +320,7 @@ export const clear = function (saveCommon) {
|
||||
};
|
||||
|
||||
export const getState = function (id) {
|
||||
return currentDocument.states[id];
|
||||
return currentDocument.states.get(id);
|
||||
};
|
||||
|
||||
export const getStates = function () {
|
||||
@@ -429,7 +458,7 @@ export const addRelation = function (item1, item2, title) {
|
||||
};
|
||||
|
||||
export const addDescription = function (id, descr) {
|
||||
const theState = currentDocument.states[id];
|
||||
const theState = currentDocument.states.get(id);
|
||||
const _descr = descr.startsWith(':') ? descr.replace(':', '').trim() : descr;
|
||||
theState.descriptions.push(common.sanitizeText(_descr, getConfig()));
|
||||
};
|
||||
@@ -456,10 +485,10 @@ const getDividerId = () => {
|
||||
*/
|
||||
export const addStyleClass = function (id, styleAttributes = '') {
|
||||
// create a new style class object with this id
|
||||
if (classes[id] === undefined) {
|
||||
classes[id] = { id: id, styles: [], textStyles: [] }; // This is a classDef
|
||||
if (!classes.has(id)) {
|
||||
classes.set(id, { id: id, styles: [], textStyles: [] }); // This is a classDef
|
||||
}
|
||||
const foundClass = classes[id];
|
||||
const foundClass = classes.get(id);
|
||||
if (styleAttributes !== undefined && styleAttributes !== null) {
|
||||
styleAttributes.split(STYLECLASS_SEP).forEach((attrib) => {
|
||||
// remove any trailing ;
|
||||
@@ -541,8 +570,28 @@ const setDirection = (dir) => {
|
||||
|
||||
const trimColon = (str) => (str && str[0] === ':' ? str.substr(1).trim() : str.trim());
|
||||
|
||||
export const getData = () => {
|
||||
const nodes = [];
|
||||
const edges = [];
|
||||
|
||||
// for (const key in currentDocument.states) {
|
||||
// if (currentDocument.states.hasOwnProperty(key)) {
|
||||
// nodes.push({...currentDocument.states[key]});
|
||||
// }
|
||||
// }
|
||||
extract(getRootDocV2());
|
||||
const diagramStates = getStates();
|
||||
const config = getConfig();
|
||||
const useRough = config.look === 'handdrawn';
|
||||
dataFetcher(undefined, getRootDocV2(), diagramStates, nodes, edges, true, useRough);
|
||||
|
||||
console.log('State Nodes XDX:', nodes);
|
||||
return { nodes, edges, other: {}, config };
|
||||
};
|
||||
|
||||
export default {
|
||||
getConfig: () => getConfig().state,
|
||||
getData,
|
||||
addState,
|
||||
clear,
|
||||
getState,
|
||||
|
||||
@@ -12,10 +12,10 @@ describe('State Diagram stateDb', () => {
|
||||
|
||||
stateDb.addStyleClass(newStyleClassId, newStyleClassAttribs);
|
||||
const styleClasses = stateDb.getClasses();
|
||||
expect(styleClasses[newStyleClassId].id).toEqual(newStyleClassId);
|
||||
expect(styleClasses[newStyleClassId].styles.length).toEqual(2);
|
||||
expect(styleClasses[newStyleClassId].styles[0]).toEqual('font-weight:bold');
|
||||
expect(styleClasses[newStyleClassId].styles[1]).toEqual('border:blue');
|
||||
expect(styleClasses.get(newStyleClassId).id).toEqual(newStyleClassId);
|
||||
expect(styleClasses.get(newStyleClassId).styles.length).toEqual(2);
|
||||
expect(styleClasses.get(newStyleClassId).styles[0]).toEqual('font-weight:bold');
|
||||
expect(styleClasses.get(newStyleClassId).styles[1]).toEqual('border:blue');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -34,15 +34,15 @@ describe('State Diagram stateDb', () => {
|
||||
|
||||
stateDb.addDescription(testStateId, restOfTheDescription);
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[0]).toEqual(restOfTheDescription);
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual(restOfTheDescription);
|
||||
|
||||
stateDb.addDescription(testStateId, oneLeadingColon);
|
||||
states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[1]).toEqual(restOfTheDescription);
|
||||
expect(states.get(testStateId).descriptions[1]).toEqual(restOfTheDescription);
|
||||
|
||||
stateDb.addDescription(testStateId, twoLeadingColons);
|
||||
states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[2]).toEqual(`:${restOfTheDescription}`);
|
||||
expect(states.get(testStateId).descriptions[2]).toEqual(`:${restOfTheDescription}`);
|
||||
});
|
||||
|
||||
it('adds each description to the array of descriptions', () => {
|
||||
@@ -51,10 +51,10 @@ describe('State Diagram stateDb', () => {
|
||||
stateDb.addDescription(testStateId, 'description 2');
|
||||
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions.length).toEqual(3);
|
||||
expect(states[testStateId].descriptions[0]).toEqual('description 0');
|
||||
expect(states[testStateId].descriptions[1]).toEqual('description 1');
|
||||
expect(states[testStateId].descriptions[2]).toEqual('description 2');
|
||||
expect(states.get(testStateId).descriptions.length).toEqual(3);
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual('description 0');
|
||||
expect(states.get(testStateId).descriptions[1]).toEqual('description 1');
|
||||
expect(states.get(testStateId).descriptions[2]).toEqual('description 2');
|
||||
});
|
||||
|
||||
it('sanitizes on the description', () => {
|
||||
@@ -63,13 +63,13 @@ describe('State Diagram stateDb', () => {
|
||||
'desc outside the script <script>the description</script>'
|
||||
);
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[0]).toEqual('desc outside the script ');
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual('desc outside the script ');
|
||||
});
|
||||
|
||||
it('adds the description to the state with the given id', () => {
|
||||
stateDb.addDescription(testStateId, 'the description');
|
||||
let states = stateDb.getStates();
|
||||
expect(states[testStateId].descriptions[0]).toEqual('the description');
|
||||
expect(states.get(testStateId).descriptions[0]).toEqual('the description');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -405,7 +405,7 @@ describe('state diagram V2, ', function () {
|
||||
stateDiagram.parser.yy.extract(stateDiagram.parser.yy.getRootDocV2());
|
||||
|
||||
const states = stateDb.getStates();
|
||||
expect(states['Active'].doc[0].id).toEqual('Idle');
|
||||
expect(states.get('Active').doc[0].id).toEqual('Idle');
|
||||
|
||||
const rels = stateDb.getRelations();
|
||||
const rel_Inactive_Idle = rels.find((rel) => rel.id1 === 'Inactive' && rel.id2 === 'Idle');
|
||||
|
||||
@@ -3,7 +3,8 @@ import type { DiagramDefinition } from '../../diagram-api/types.js';
|
||||
import parser from './parser/stateDiagram.jison';
|
||||
import db from './stateDb.js';
|
||||
import styles from './styles.js';
|
||||
import renderer from './stateRenderer-v2.js';
|
||||
//import renderer from './stateRenderer-v2.js';
|
||||
import renderer from './stateRenderer-v3-unified.js';
|
||||
|
||||
export const diagram: DiagramDefinition = {
|
||||
parser,
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
import type { DiagramDefinition } from '../../diagram-api/types.js';
|
||||
// @ts-ignore: JISON doesn't support types
|
||||
import parser from './parser/stateDiagram.jison';
|
||||
import db from './stateDb.js';
|
||||
import styles from './styles.js';
|
||||
import renderer from './stateRenderer-v3-unified.js';
|
||||
|
||||
export const diagram: DiagramDefinition = {
|
||||
parser,
|
||||
db,
|
||||
renderer,
|
||||
styles,
|
||||
init: (cnf) => {
|
||||
if (!cnf.state) {
|
||||
cnf.state = {};
|
||||
}
|
||||
cnf.state.arrowMarkerAbsolute = cnf.arrowMarkerAbsolute;
|
||||
db.clear();
|
||||
},
|
||||
};
|
||||
@@ -81,7 +81,7 @@ export const setConf = function (cnf) {
|
||||
*
|
||||
* @param {string} text - the diagram text to be parsed
|
||||
* @param diagramObj
|
||||
* @returns {Record<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles (a Map with keys = strings, values = )
|
||||
* @returns {Map<string, import('../../diagram-api/types.js').DiagramStyleClassDef>} ClassDef styles (a Map with keys = strings, values = )
|
||||
*/
|
||||
export const getClasses = function (text, diagramObj) {
|
||||
diagramObj.db.extract(diagramObj.db.getRootDocV2());
|
||||
@@ -129,13 +129,13 @@ export function stateDomId(itemId = '', counter = 0, type = '', typeSpacer = DOM
|
||||
* @param g - graph
|
||||
* @param {object} parent
|
||||
* @param {object} parsedItem - parsed statement item
|
||||
* @param {object[]} diagramStates - the list of all known states for the diagram
|
||||
* @param {Map<string, object>} diagramStates - the list of all known states for the diagram
|
||||
* @param {object} diagramDb
|
||||
* @param {boolean} altFlag - for clusters, add the "statediagram-cluster-alt" CSS class
|
||||
*/
|
||||
const setupNode = (g, parent, parsedItem, diagramStates, diagramDb, altFlag) => {
|
||||
const itemId = parsedItem.id;
|
||||
const classStr = getClassesFromDbInfo(diagramStates[itemId]);
|
||||
const classStr = getClassesFromDbInfo(diagramStates.get(itemId));
|
||||
|
||||
if (itemId !== 'root') {
|
||||
let shape = SHAPE_STATE;
|
||||
@@ -308,7 +308,7 @@ const setupNode = (g, parent, parsedItem, diagramStates, diagramDb, altFlag) =>
|
||||
* @param g
|
||||
* @param parentParsedItem - parsed Item that is the parent of this document (doc)
|
||||
* @param doc - the document to set up; it is a list of parsed statements
|
||||
* @param {object[]} diagramStates - the list of all known states for the diagram
|
||||
* @param {Map<string, object>} diagramStates - the list of all known states for the diagram
|
||||
* @param diagramDb
|
||||
* @param {boolean} altFlag
|
||||
* @todo This duplicates some of what is done in stateDb.js extract method
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
import { getConfig } from '../../diagram-api/diagramAPI.js';
|
||||
import type { DiagramStyleClassDef } from '../../diagram-api/types.js';
|
||||
import { log } from '../../logger.js';
|
||||
import { getDiagramElements } from '../../rendering-util/insertElementsForSize.js';
|
||||
import { render } from '../../rendering-util/render.js';
|
||||
import { setupViewPortForSVG } from '../../rendering-util/setupViewPortForSVG.js';
|
||||
import type { LayoutData } from '../../rendering-util/types.js';
|
||||
import utils from '../../utils.js';
|
||||
import { CSS_DIAGRAM, DEFAULT_NESTED_DOC_DIR } from './stateCommon.js';
|
||||
|
||||
/**
|
||||
* Get the direction from the statement items.
|
||||
* Look through all of the documents (docs) in the parsedItems
|
||||
* Because is a _document_ direction, the default direction is not necessarily the same as the overall default _diagram_ direction.
|
||||
* @param parsedItem - the parsed statement item to look through
|
||||
* @param defaultDir - the direction to use if none is found
|
||||
* @returns The direction to use
|
||||
*/
|
||||
const getDir = (parsedItem: any, defaultDir = DEFAULT_NESTED_DOC_DIR) => {
|
||||
let dir = defaultDir;
|
||||
if (parsedItem.doc) {
|
||||
for (let i = 0; i < parsedItem.doc.length; i++) {
|
||||
const parsedItemDoc = parsedItem.doc[i];
|
||||
if (parsedItemDoc.stmt === 'dir') {
|
||||
dir = parsedItemDoc.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return dir;
|
||||
};
|
||||
|
||||
export const getClasses = function (
|
||||
text: string,
|
||||
diagramObj: any
|
||||
): Map<string, DiagramStyleClassDef> {
|
||||
diagramObj.db.extract(diagramObj.db.getRootDocV2());
|
||||
return diagramObj.db.getClasses();
|
||||
};
|
||||
|
||||
export const draw = async function (text: string, id: string, _version: string, diag: any) {
|
||||
log.info('REF0:');
|
||||
log.info('Drawing state diagram (v2)', id);
|
||||
const { securityLevel, state: conf, layout } = getConfig();
|
||||
// Extracting the data from the parsed structure into a more usable form
|
||||
// Not related to the refactoring, but this is the first step in the rendering process
|
||||
diag.db.extract(diag.db.getRootDocV2());
|
||||
|
||||
const DIR = getDir(diag.db.getRootDocV2());
|
||||
|
||||
// The getData method provided in all supported diagrams is used to extract the data from the parsed structure
|
||||
// into the Layout data format
|
||||
const data4Layout = diag.db.getData() as LayoutData;
|
||||
// Create the root SVG - the element is the div containing the SVG element
|
||||
const { element, svg } = getDiagramElements(id, securityLevel);
|
||||
|
||||
// // For some diagrams this call is not needed, but in the state diagram it is
|
||||
// await insertElementsForSize(element, data4Layout);
|
||||
|
||||
// console.log('data4Layout:', data4Layout);
|
||||
|
||||
// // Now we have layout data with real sizes, we can perform the layout
|
||||
// const data4Rendering = doLayout(data4Layout, id, _version, 'dagre-wrapper');
|
||||
|
||||
// // The performRender method provided in all supported diagrams is used to render the data
|
||||
// performRender(data4Rendering);
|
||||
|
||||
data4Layout.type = diag.type;
|
||||
data4Layout.layoutAlgorithm = layout;
|
||||
data4Layout.direction = DIR;
|
||||
|
||||
// TODO: Should we move these two to baseConfig? These types are not there in StateConfig.
|
||||
// @ts-expect-error TODO: Will be fixed after config refactor
|
||||
data4Layout.nodeSpacing = conf?.nodeSpacing || 50;
|
||||
// @ts-expect-error TODO: Will be fixed after config refactor
|
||||
data4Layout.rankSpacing = conf?.rankSpacing || 50;
|
||||
data4Layout.markers = ['barb'];
|
||||
data4Layout.diagramId = id;
|
||||
// console.log('REF1:', data4Layout);
|
||||
await render(data4Layout, svg, element);
|
||||
const padding = 8;
|
||||
utils.insertTitle(
|
||||
element,
|
||||
'statediagramTitleText',
|
||||
conf?.titleTopMargin ?? 25,
|
||||
diag.db.getDiagramTitle()
|
||||
);
|
||||
setupViewPortForSVG(svg, padding, CSS_DIAGRAM, conf?.useMaxWidth ?? true);
|
||||
};
|
||||
|
||||
export default {
|
||||
getClasses,
|
||||
draw,
|
||||
};
|
||||
@@ -1,5 +1,9 @@
|
||||
# Mermaid Chart
|
||||
|
||||
The Future of Diagramming & Visual Collaboration
|
||||
|
||||
Try the Ultimate AI, Mermaid, and Visual Diagramming Suite by creating an account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
<br />
|
||||
|
||||
<a href="https://www.producthunt.com/posts/mermaid-chart?utm_source=badge-featured&utm_medium=badge&utm_souce=badge-mermaid-chart" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=416671&theme=light" alt="Mermaid Chart - A smarter way to create diagrams | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
@@ -12,22 +16,26 @@
|
||||
|
||||
- **Editor** - A web based editor for creating and editing Mermaid diagrams.
|
||||
|
||||
- **Presentation** - A presentation mode for viewing Mermaid diagrams in a slideshow format.
|
||||
- **Visual Editor** - The Visual Editor enables users of all skill levels to create diagrams easily and efficiently, with both GUI and code-based editing options.
|
||||
|
||||
- **Collaboration** - A web based collaboration feature for multi-user editing on Mermaid diagrams in real-time (Pro plan).
|
||||
- **AI Chat** - Use our embedded AI Chat to generate diagrams from natural language descriptions.
|
||||
|
||||
- **Plugins** - A plugin system for extending the functionality of Mermaid.
|
||||
|
||||
Plugins are available for:
|
||||
Official Mermaid Chart plugins:
|
||||
|
||||
- [ChatGPT](https://docs.mermaidchart.com/plugins/chatgpt)
|
||||
- [Mermaid Chart GPT](https://chat.openai.com/g/g-1IRFKwq4G-mermaid-chart)
|
||||
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=MermaidChart.vscode-mermaid-chart)
|
||||
- [JetBrains IDE](https://plugins.jetbrains.com/plugin/23043-mermaid-chart)
|
||||
- [Microsoft PowerPoint and Word](https://appsource.microsoft.com/en-us/product/office/WA200006214?tab=Overview)
|
||||
- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=MermaidChart.vscode-mermaid-chart)
|
||||
|
||||
- **AI diagramming** - A feature for generating Mermaid diagrams from text using AI (Pro plan).
|
||||
Visit our [Plugins](https://www.mermaidchart.com/plugins) page for more information.
|
||||
|
||||
- **More** - To learn more, visit our [Product](https://www.mermaidchart.com/product) page.
|
||||
- **Collaboration** - A web based collaboration feature for multi-user editing on Mermaid diagrams in real-time (Pro and Enterprise plans).
|
||||
|
||||
- **Comments** - Enhance collaboration by adding comments to diagrams.
|
||||
|
||||
- **Presentations** - A presentation mode for viewing Mermaid diagrams in a slideshow format.
|
||||
|
||||
## Plans
|
||||
|
||||
@@ -37,11 +45,9 @@
|
||||
|
||||
- **Enterprise** - A paid plan for enterprise use that includes all Pro features, and more.
|
||||
|
||||
## Access
|
||||
To learn more, visit our [Pricing](https://mermaidchart.com/pricing) page.
|
||||
|
||||
Sign up for a free account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
Mermaid Chart is currently offering a 14-day free trial of our newly-launched Pro tier. To learn more, visit our [Pricing](https://mermaidchart.com/pricing) page.
|
||||
Mermaid Chart is currently offering a 14-day free trial on our Pro and Enterprise tiers. Sign up for a free account at [Mermaid Chart](https://www.mermaidchart.com/app/sign-up).
|
||||
|
||||
## Mermaid JS contributions
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user