mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-10-18 05:29:40 +02:00
Merge
This commit is contained in:
@@ -1,12 +1,11 @@
|
||||
/* eslint-env jasmine */
|
||||
import * as configApi from './config';
|
||||
|
||||
describe('when working with site config', function() {
|
||||
describe('when working with site config', function () {
|
||||
beforeEach(() => {
|
||||
// Resets the site config to default config
|
||||
configApi.setSiteConfig({});
|
||||
});
|
||||
it('should set site config and config properly', function() {
|
||||
it('should set site config and config properly', function () {
|
||||
let config_0 = { foo: 'bar', bar: 0 };
|
||||
configApi.setSiteConfig(config_0);
|
||||
let config_1 = configApi.getSiteConfig();
|
||||
@@ -15,18 +14,22 @@ describe('when working with site config', function() {
|
||||
expect(config_1.bar).toEqual(config_0.bar);
|
||||
expect(config_1).toEqual(config_2);
|
||||
});
|
||||
it('should respect secure keys when applying directives', function() {
|
||||
let config_0 = { foo: 'bar', bar: 'cant-be-changed', secure: [...configApi.defaultConfig.secure, 'bar'] };
|
||||
it('should respect secure keys when applying directives', function () {
|
||||
let config_0 = {
|
||||
foo: 'bar',
|
||||
bar: 'cant-be-changed',
|
||||
secure: [...configApi.defaultConfig.secure, 'bar'],
|
||||
};
|
||||
configApi.setSiteConfig(config_0);
|
||||
const directive = { foo: 'baf', bar: 'should-not-be-allowed'};
|
||||
const cfg = configApi.updateCurrentConfig(config_0,[directive]);
|
||||
const directive = { foo: 'baf', bar: 'should-not-be-allowed' };
|
||||
const cfg = configApi.updateCurrentConfig(config_0, [directive]);
|
||||
expect(cfg.foo).toEqual(directive.foo);
|
||||
expect(cfg.bar).toBe(config_0.bar)
|
||||
expect(cfg.bar).toBe(config_0.bar);
|
||||
});
|
||||
it('should set reset config properly', function() {
|
||||
let config_0 = { foo: 'bar', bar: 0};
|
||||
it('should set reset config properly', function () {
|
||||
let config_0 = { foo: 'bar', bar: 0 };
|
||||
configApi.setSiteConfig(config_0);
|
||||
let config_1 = { foo: 'baf'};
|
||||
let config_1 = { foo: 'baf' };
|
||||
configApi.setConfig(config_1);
|
||||
let config_2 = configApi.getConfig();
|
||||
expect(config_2.foo).toEqual(config_1.foo);
|
||||
@@ -36,14 +39,14 @@ describe('when working with site config', function() {
|
||||
let config_4 = configApi.getSiteConfig();
|
||||
expect(config_4.foo).toEqual(config_0.foo);
|
||||
});
|
||||
it('should set global reset config properly', function() {
|
||||
let config_0 = { foo: 'bar', bar: 0};
|
||||
it('should set global reset config properly', function () {
|
||||
let config_0 = { foo: 'bar', bar: 0 };
|
||||
configApi.setSiteConfig(config_0);
|
||||
let config_1 = configApi.getSiteConfig();
|
||||
expect(config_1.foo).toEqual(config_0.foo);
|
||||
let config_2 = configApi.getConfig();
|
||||
expect(config_2.foo).toEqual(config_0.foo);
|
||||
configApi.setConfig({ foobar: 'bar0' })
|
||||
configApi.setConfig({ foobar: 'bar0' });
|
||||
let config_3 = configApi.getConfig();
|
||||
expect(config_3.foobar).toEqual('bar0');
|
||||
configApi.reset();
|
||||
|
@@ -5,61 +5,58 @@ describe('Graphlib decorations', () => {
|
||||
let node;
|
||||
beforeEach(function () {
|
||||
setLogLevel(1);
|
||||
node = { x:171, y:100, width: 210, height: 184};
|
||||
node = { x: 171, y: 100, width: 210, height: 184 };
|
||||
});
|
||||
|
||||
describe('intersection', function () {
|
||||
it('case 1 - intersection on left edge of box', function () {
|
||||
const o = {x: 31, y: 143.2257070163421};
|
||||
const i = {x: 99.3359375, y: 100}
|
||||
const o = { x: 31, y: 143.2257070163421 };
|
||||
const i = { x: 99.3359375, y: 100 };
|
||||
const int = intersection(node, o, i);
|
||||
expect(int.x).toBe(66)
|
||||
expect(int.y).toBeCloseTo(122.139)
|
||||
expect(int.x).toBe(66);
|
||||
expect(int.y).toBeCloseTo(122.139);
|
||||
});
|
||||
|
||||
it('case 2 - intersection on left edge of box', function () {
|
||||
const o = {x: 310.2578125, y: 169.88002060631462};
|
||||
const i = {x: 127.96875, y: 100};
|
||||
const o = { x: 310.2578125, y: 169.88002060631462 };
|
||||
const i = { x: 127.96875, y: 100 };
|
||||
const node2 = {
|
||||
height: 337.5,
|
||||
width: 184.4609375,
|
||||
x: 100.23046875,
|
||||
y: 176.75
|
||||
}
|
||||
y: 176.75,
|
||||
};
|
||||
const int = intersection(node2, o, i);
|
||||
expect(int.x).toBeCloseTo(192.4609375)
|
||||
expect(int.y).toBeCloseTo(145.15711441743503)
|
||||
|
||||
expect(int.x).toBeCloseTo(192.4609375);
|
||||
expect(int.y).toBeCloseTo(145.15711441743503);
|
||||
});
|
||||
it('case 3 - intersection on otop of box outside point greater then inside point', function () {
|
||||
const o = {x: 157, y: 39};
|
||||
const i = {x: 104, y: 105};
|
||||
const o = { x: 157, y: 39 };
|
||||
const i = { x: 104, y: 105 };
|
||||
const node2 = {
|
||||
width: 212,
|
||||
x: 114,
|
||||
y: 164,
|
||||
height: 176
|
||||
}
|
||||
height: 176,
|
||||
};
|
||||
const int = intersection(node2, o, i);
|
||||
expect(int.x).toBeCloseTo(133.71)
|
||||
expect(int.y).toBeCloseTo(76)
|
||||
expect(int.x).toBeCloseTo(133.71);
|
||||
expect(int.y).toBeCloseTo(76);
|
||||
// expect(int.y).toBeCloseTo(67.833)
|
||||
|
||||
});
|
||||
it('case 4 - intersection on top of box inside point greater then inside point', function () {
|
||||
const o = {x: 144, y: 38};
|
||||
const i = {x: 198, y: 105};
|
||||
const node2 = {
|
||||
width: 212,
|
||||
x: 114,
|
||||
y: 164,
|
||||
height: 176
|
||||
}
|
||||
const int = intersection(node2, o, i);
|
||||
expect(int.x).toBeCloseTo(174.626 )
|
||||
expect(int.y).toBeCloseTo(76)
|
||||
it('case 4 - intersection on top of box inside point greater then inside point', function () {
|
||||
const o = { x: 144, y: 38 };
|
||||
const i = { x: 198, y: 105 };
|
||||
const node2 = {
|
||||
width: 212,
|
||||
x: 114,
|
||||
y: 164,
|
||||
height: 176,
|
||||
};
|
||||
const int = intersection(node2, o, i);
|
||||
expect(int.x).toBeCloseTo(174.626);
|
||||
expect(int.y).toBeCloseTo(76);
|
||||
// expect(int.y).toBeCloseTo(67.833)
|
||||
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@@ -1,6 +1,11 @@
|
||||
import graphlib from 'graphlib';
|
||||
import dagre from 'dagre';
|
||||
import { validate, adjustClustersAndEdges, extractDecendants, sortNodesByHierarchy } from './mermaid-graphlib';
|
||||
import {
|
||||
validate,
|
||||
adjustClustersAndEdges,
|
||||
extractDecendants,
|
||||
sortNodesByHierarchy,
|
||||
} from './mermaid-graphlib';
|
||||
import { setLogLevel, log } from '../logger';
|
||||
|
||||
describe('Graphlib decorations', () => {
|
||||
@@ -9,17 +14,17 @@ describe('Graphlib decorations', () => {
|
||||
setLogLevel(1);
|
||||
g = new graphlib.Graph({
|
||||
multigraph: true,
|
||||
compound: true
|
||||
compound: true,
|
||||
});
|
||||
g.setGraph({
|
||||
rankdir: 'TB',
|
||||
nodesep: 10,
|
||||
ranksep: 10,
|
||||
marginx: 8,
|
||||
marginy: 8
|
||||
marginy: 8,
|
||||
});
|
||||
g.setDefaultEdgeLabel(function () {
|
||||
return {};
|
||||
return {};
|
||||
});
|
||||
});
|
||||
|
||||
@@ -34,7 +39,7 @@ describe('Graphlib decorations', () => {
|
||||
end
|
||||
C1 --> C2
|
||||
*/
|
||||
g.setNode('a', { data:1});
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('c', { data: 3 });
|
||||
g.setParent('a', 'C1');
|
||||
@@ -65,7 +70,7 @@ describe('Graphlib decorations', () => {
|
||||
g.setEdge('C1', 'C2');
|
||||
|
||||
adjustClustersAndEdges(g);
|
||||
log.info(g.edges())
|
||||
log.info(g.edges());
|
||||
expect(validate(g)).toBe(true);
|
||||
});
|
||||
|
||||
@@ -92,7 +97,7 @@ describe('Graphlib decorations', () => {
|
||||
g.setEdge('C1', 'c', { name: 'C1-external-link' });
|
||||
|
||||
adjustClustersAndEdges(g);
|
||||
log.info(g.nodes())
|
||||
log.info(g.nodes());
|
||||
expect(g.nodes().length).toBe(2);
|
||||
expect(validate(g)).toBe(true);
|
||||
});
|
||||
@@ -114,69 +119,69 @@ describe('Graphlib decorations', () => {
|
||||
// g.setEdge('a', 'b', { name: 'C1-internal-link' });
|
||||
g.setEdge('C1', 'C2', { name: 'C1-external-link' });
|
||||
|
||||
log.info(g.nodes())
|
||||
log.info(g.nodes());
|
||||
adjustClustersAndEdges(g);
|
||||
log.info(g.nodes())
|
||||
log.info(g.nodes());
|
||||
expect(g.nodes().length).toBe(2);
|
||||
expect(validate(g)).toBe(true);
|
||||
});
|
||||
it('adjustClustersAndEdges GLB6', function () {
|
||||
/*
|
||||
it('adjustClustersAndEdges GLB6', function () {
|
||||
/*
|
||||
subgraph C1
|
||||
a
|
||||
end
|
||||
C1 --> b
|
||||
*/
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('C1', { data: 3 });
|
||||
g.setParent('a', 'C1');
|
||||
g.setEdge('C1', 'b', { data: 'link1' }, '1');
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('C1', { data: 3 });
|
||||
g.setParent('a', 'C1');
|
||||
g.setEdge('C1', 'b', { data: 'link1' }, '1');
|
||||
|
||||
// log.info(g.edges())
|
||||
adjustClustersAndEdges(g);
|
||||
log.info(g.edges())
|
||||
expect(g.nodes()).toEqual(['b', 'C1']);
|
||||
expect(g.edges().length).toBe(1);
|
||||
expect(validate(g)).toBe(true);
|
||||
expect(g.node('C1').clusterNode).toBe(true);
|
||||
// log.info(g.edges())
|
||||
adjustClustersAndEdges(g);
|
||||
log.info(g.edges());
|
||||
expect(g.nodes()).toEqual(['b', 'C1']);
|
||||
expect(g.edges().length).toBe(1);
|
||||
expect(validate(g)).toBe(true);
|
||||
expect(g.node('C1').clusterNode).toBe(true);
|
||||
|
||||
const C1Graph = g.node('C1').graph;
|
||||
expect(C1Graph.nodes()).toEqual(['a']);
|
||||
});
|
||||
it('adjustClustersAndEdges GLB7', function () {
|
||||
/*
|
||||
const C1Graph = g.node('C1').graph;
|
||||
expect(C1Graph.nodes()).toEqual(['a']);
|
||||
});
|
||||
it('adjustClustersAndEdges GLB7', function () {
|
||||
/*
|
||||
subgraph C1
|
||||
a
|
||||
end
|
||||
C1 --> b
|
||||
C1 --> c
|
||||
*/
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('c', { data: 3 });
|
||||
g.setParent('a', 'C1');
|
||||
g.setNode('C1', { data: 4 });
|
||||
g.setEdge('C1', 'b', { data: 'link1' }, '1');
|
||||
g.setEdge('C1', 'c', { data: 'link2' }, '2');
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('c', { data: 3 });
|
||||
g.setParent('a', 'C1');
|
||||
g.setNode('C1', { data: 4 });
|
||||
g.setEdge('C1', 'b', { data: 'link1' }, '1');
|
||||
g.setEdge('C1', 'c', { data: 'link2' }, '2');
|
||||
|
||||
log.info(g.node('C1'))
|
||||
adjustClustersAndEdges(g);
|
||||
log.info(g.edges())
|
||||
expect(g.nodes()).toEqual(['b', 'c', 'C1']);
|
||||
expect(g.nodes().length).toBe(3);
|
||||
expect(g.edges().length).toBe(2);
|
||||
log.info(g.node('C1'));
|
||||
adjustClustersAndEdges(g);
|
||||
log.info(g.edges());
|
||||
expect(g.nodes()).toEqual(['b', 'c', 'C1']);
|
||||
expect(g.nodes().length).toBe(3);
|
||||
expect(g.edges().length).toBe(2);
|
||||
|
||||
expect(g.edges().length).toBe(2);
|
||||
const edgeData = g.edge(g.edges()[1]);
|
||||
expect(edgeData.data).toBe('link2');
|
||||
expect(validate(g)).toBe(true);
|
||||
expect(g.edges().length).toBe(2);
|
||||
const edgeData = g.edge(g.edges()[1]);
|
||||
expect(edgeData.data).toBe('link2');
|
||||
expect(validate(g)).toBe(true);
|
||||
|
||||
const C1Graph = g.node('C1').graph;
|
||||
expect(C1Graph.nodes()).toEqual(['a']);
|
||||
});
|
||||
it('adjustClustersAndEdges GLB8', function () {
|
||||
/*
|
||||
const C1Graph = g.node('C1').graph;
|
||||
expect(C1Graph.nodes()).toEqual(['a']);
|
||||
});
|
||||
it('adjustClustersAndEdges GLB8', function () {
|
||||
/*
|
||||
subgraph A
|
||||
a
|
||||
end
|
||||
@@ -189,32 +194,31 @@ describe('Graphlib decorations', () => {
|
||||
A --> B
|
||||
A --> C
|
||||
*/
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('c', { data: 3 });
|
||||
g.setParent('a', 'A');
|
||||
g.setParent('b', 'B');
|
||||
g.setParent('c', 'C');
|
||||
g.setEdge('A', 'B', { data: 'link1' }, '1');
|
||||
g.setEdge('A', 'C', { data: 'link2' }, '2');
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('c', { data: 3 });
|
||||
g.setParent('a', 'A');
|
||||
g.setParent('b', 'B');
|
||||
g.setParent('c', 'C');
|
||||
g.setEdge('A', 'B', { data: 'link1' }, '1');
|
||||
g.setEdge('A', 'C', { data: 'link2' }, '2');
|
||||
|
||||
// log.info(g.edges())
|
||||
adjustClustersAndEdges(g);
|
||||
expect(g.nodes()).toEqual(['A', 'B', 'C']);
|
||||
expect(g.edges().length).toBe(2);
|
||||
// log.info(g.edges())
|
||||
adjustClustersAndEdges(g);
|
||||
expect(g.nodes()).toEqual(['A', 'B', 'C']);
|
||||
expect(g.edges().length).toBe(2);
|
||||
|
||||
expect(g.edges().length).toBe(2);
|
||||
const edgeData = g.edge(g.edges()[1]);
|
||||
expect(edgeData.data).toBe('link2');
|
||||
expect(validate(g)).toBe(true);
|
||||
expect(g.edges().length).toBe(2);
|
||||
const edgeData = g.edge(g.edges()[1]);
|
||||
expect(edgeData.data).toBe('link2');
|
||||
expect(validate(g)).toBe(true);
|
||||
|
||||
const CGraph = g.node('C').graph;
|
||||
expect(CGraph.nodes()).toEqual(['c']);
|
||||
const CGraph = g.node('C').graph;
|
||||
expect(CGraph.nodes()).toEqual(['c']);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('adjustClustersAndEdges the extracted graphs shall contain the correct data GLB10', function () {
|
||||
/*
|
||||
it('adjustClustersAndEdges the extracted graphs shall contain the correct data GLB10', function () {
|
||||
/*
|
||||
subgraph C
|
||||
subgraph D
|
||||
d
|
||||
@@ -222,29 +226,29 @@ describe('Graphlib decorations', () => {
|
||||
end
|
||||
*/
|
||||
|
||||
g.setNode('C', { data: 1 });
|
||||
g.setNode('D', { data: 2 });
|
||||
g.setNode('d', { data: 3 });
|
||||
g.setParent('d', 'D');
|
||||
g.setParent('D', 'C');
|
||||
g.setNode('C', { data: 1 });
|
||||
g.setNode('D', { data: 2 });
|
||||
g.setNode('d', { data: 3 });
|
||||
g.setParent('d', 'D');
|
||||
g.setParent('D', 'C');
|
||||
|
||||
// log.info('Graph before', g.node('D'))
|
||||
// log.info('Graph before', graphlib.json.write(g))
|
||||
adjustClustersAndEdges(g);
|
||||
// log.info('Graph after', graphlib.json.write(g), g.node('C').graph)
|
||||
// log.info('Graph before', g.node('D'))
|
||||
// log.info('Graph before', graphlib.json.write(g))
|
||||
adjustClustersAndEdges(g);
|
||||
// log.info('Graph after', graphlib.json.write(g), g.node('C').graph)
|
||||
|
||||
const CGraph = g.node('C').graph;
|
||||
const DGraph = CGraph.node('D').graph;
|
||||
const CGraph = g.node('C').graph;
|
||||
const DGraph = CGraph.node('D').graph;
|
||||
|
||||
expect(CGraph.nodes()).toEqual(['D']);
|
||||
expect(DGraph.nodes()).toEqual(['d']);
|
||||
expect(CGraph.nodes()).toEqual(['D']);
|
||||
expect(DGraph.nodes()).toEqual(['d']);
|
||||
|
||||
expect(g.nodes()).toEqual(['C']);
|
||||
expect(g.nodes().length).toBe(1);
|
||||
});
|
||||
expect(g.nodes()).toEqual(['C']);
|
||||
expect(g.nodes().length).toBe(1);
|
||||
});
|
||||
|
||||
it('adjustClustersAndEdges the extracted graphs shall contain the correct data GLB11', function () {
|
||||
/*
|
||||
it('adjustClustersAndEdges the extracted graphs shall contain the correct data GLB11', function () {
|
||||
/*
|
||||
subgraph A
|
||||
a
|
||||
end
|
||||
@@ -260,86 +264,86 @@ describe('Graphlib decorations', () => {
|
||||
A --> C
|
||||
*/
|
||||
|
||||
g.setNode('C', { data: 1 });
|
||||
g.setNode('D', { data: 2 });
|
||||
g.setNode('d', { data: 3 });
|
||||
g.setNode('B', { data: 4 });
|
||||
g.setNode('b', { data: 5 });
|
||||
g.setNode('A', { data: 6 });
|
||||
g.setNode('a', { data: 7 });
|
||||
g.setParent('a', 'A');
|
||||
g.setParent('b', 'B');
|
||||
g.setParent('d', 'D');
|
||||
g.setParent('D', 'C');
|
||||
g.setEdge('A', 'B', { data: 'link1' }, '1');
|
||||
g.setEdge('A', 'C', { data: 'link2' }, '2');
|
||||
g.setNode('C', { data: 1 });
|
||||
g.setNode('D', { data: 2 });
|
||||
g.setNode('d', { data: 3 });
|
||||
g.setNode('B', { data: 4 });
|
||||
g.setNode('b', { data: 5 });
|
||||
g.setNode('A', { data: 6 });
|
||||
g.setNode('a', { data: 7 });
|
||||
g.setParent('a', 'A');
|
||||
g.setParent('b', 'B');
|
||||
g.setParent('d', 'D');
|
||||
g.setParent('D', 'C');
|
||||
g.setEdge('A', 'B', { data: 'link1' }, '1');
|
||||
g.setEdge('A', 'C', { data: 'link2' }, '2');
|
||||
|
||||
log.info('Graph before', g.node('D'))
|
||||
log.info('Graph before', graphlib.json.write(g))
|
||||
adjustClustersAndEdges(g);
|
||||
log.trace('Graph after', graphlib.json.write(g))
|
||||
expect(g.nodes()).toEqual(['C', 'B', 'A']);
|
||||
expect(g.nodes().length).toBe(3);
|
||||
expect(g.edges().length).toBe(2);
|
||||
log.info('Graph before', g.node('D'));
|
||||
log.info('Graph before', graphlib.json.write(g));
|
||||
adjustClustersAndEdges(g);
|
||||
log.trace('Graph after', graphlib.json.write(g));
|
||||
expect(g.nodes()).toEqual(['C', 'B', 'A']);
|
||||
expect(g.nodes().length).toBe(3);
|
||||
expect(g.edges().length).toBe(2);
|
||||
|
||||
const AGraph = g.node('A').graph;
|
||||
const BGraph = g.node('B').graph;
|
||||
const CGraph = g.node('C').graph;
|
||||
// log.info(CGraph.nodes());
|
||||
const DGraph = CGraph.node('D').graph;
|
||||
// log.info('DG', CGraph.children('D'));
|
||||
const AGraph = g.node('A').graph;
|
||||
const BGraph = g.node('B').graph;
|
||||
const CGraph = g.node('C').graph;
|
||||
// log.info(CGraph.nodes());
|
||||
const DGraph = CGraph.node('D').graph;
|
||||
// log.info('DG', CGraph.children('D'));
|
||||
|
||||
log.info('A', AGraph.nodes());
|
||||
expect(AGraph.nodes().length).toBe(1);
|
||||
expect(AGraph.nodes()).toEqual(['a']);
|
||||
log.trace('Nodes', BGraph.nodes())
|
||||
expect(BGraph.nodes().length).toBe(1);
|
||||
expect(BGraph.nodes()).toEqual(['b']);
|
||||
expect(CGraph.nodes()).toEqual(['D']);
|
||||
expect(CGraph.nodes().length).toEqual(1);
|
||||
log.info('A', AGraph.nodes());
|
||||
expect(AGraph.nodes().length).toBe(1);
|
||||
expect(AGraph.nodes()).toEqual(['a']);
|
||||
log.trace('Nodes', BGraph.nodes());
|
||||
expect(BGraph.nodes().length).toBe(1);
|
||||
expect(BGraph.nodes()).toEqual(['b']);
|
||||
expect(CGraph.nodes()).toEqual(['D']);
|
||||
expect(CGraph.nodes().length).toEqual(1);
|
||||
|
||||
expect(AGraph.edges().length).toBe(0);
|
||||
expect(BGraph.edges().length).toBe(0);
|
||||
expect(CGraph.edges().length).toBe(0);
|
||||
expect(DGraph.nodes()).toEqual(['d']);
|
||||
expect(DGraph.edges().length).toBe(0);
|
||||
// expect(CGraph.node('D')).toEqual({ data: 2 });
|
||||
expect(g.edges().length).toBe(2);
|
||||
expect(AGraph.edges().length).toBe(0);
|
||||
expect(BGraph.edges().length).toBe(0);
|
||||
expect(CGraph.edges().length).toBe(0);
|
||||
expect(DGraph.nodes()).toEqual(['d']);
|
||||
expect(DGraph.edges().length).toBe(0);
|
||||
// expect(CGraph.node('D')).toEqual({ data: 2 });
|
||||
expect(g.edges().length).toBe(2);
|
||||
|
||||
// expect(g.edges().length).toBe(2);
|
||||
// const edgeData = g.edge(g.edges()[1]);
|
||||
// expect(edgeData.data).toBe('link2');
|
||||
// expect(validate(g)).toBe(true);
|
||||
});
|
||||
it('adjustClustersAndEdges the extracted graphs shall contain the correct links GLB20', function () {
|
||||
/*
|
||||
// expect(g.edges().length).toBe(2);
|
||||
// const edgeData = g.edge(g.edges()[1]);
|
||||
// expect(edgeData.data).toBe('link2');
|
||||
// expect(validate(g)).toBe(true);
|
||||
});
|
||||
it('adjustClustersAndEdges the extracted graphs shall contain the correct links GLB20', function () {
|
||||
/*
|
||||
a --> b
|
||||
subgraph b [Test]
|
||||
c --> d -->e
|
||||
end
|
||||
*/
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('c', { data: 3 });
|
||||
g.setNode('d', { data: 3 });
|
||||
g.setNode('e', { data: 3 });
|
||||
g.setParent('c', 'b');
|
||||
g.setParent('d', 'b');
|
||||
g.setParent('e', 'b');
|
||||
g.setEdge('a', 'b', { data: 'link1' }, '1');
|
||||
g.setEdge('c', 'd', { data: 'link2' }, '2');
|
||||
g.setEdge('d', 'e', { data: 'link2' }, '2');
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('c', { data: 3 });
|
||||
g.setNode('d', { data: 3 });
|
||||
g.setNode('e', { data: 3 });
|
||||
g.setParent('c', 'b');
|
||||
g.setParent('d', 'b');
|
||||
g.setParent('e', 'b');
|
||||
g.setEdge('a', 'b', { data: 'link1' }, '1');
|
||||
g.setEdge('c', 'd', { data: 'link2' }, '2');
|
||||
g.setEdge('d', 'e', { data: 'link2' }, '2');
|
||||
|
||||
log.info('Graph before', graphlib.json.write(g))
|
||||
adjustClustersAndEdges(g);
|
||||
const bGraph = g.node('b').graph;
|
||||
// log.trace('Graph after', graphlib.json.write(g))
|
||||
log.info('Graph after', graphlib.json.write(bGraph));
|
||||
expect(bGraph.nodes().length).toBe(3);
|
||||
expect(bGraph.edges().length).toBe(2);
|
||||
});
|
||||
it('adjustClustersAndEdges the extracted graphs shall contain the correct links GLB21', function () {
|
||||
/*
|
||||
log.info('Graph before', graphlib.json.write(g));
|
||||
adjustClustersAndEdges(g);
|
||||
const bGraph = g.node('b').graph;
|
||||
// log.trace('Graph after', graphlib.json.write(g))
|
||||
log.info('Graph after', graphlib.json.write(bGraph));
|
||||
expect(bGraph.nodes().length).toBe(3);
|
||||
expect(bGraph.edges().length).toBe(2);
|
||||
});
|
||||
it('adjustClustersAndEdges the extracted graphs shall contain the correct links GLB21', function () {
|
||||
/*
|
||||
state a {
|
||||
state b {
|
||||
state c {
|
||||
@@ -348,28 +352,27 @@ describe('Graphlib decorations', () => {
|
||||
}
|
||||
}
|
||||
*/
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('c', { data: 3 });
|
||||
g.setNode('e', { data: 3 });
|
||||
g.setParent('b', 'a');
|
||||
g.setParent('c', 'b');
|
||||
g.setParent('e', 'c');
|
||||
g.setNode('a', { data: 1 });
|
||||
g.setNode('b', { data: 2 });
|
||||
g.setNode('c', { data: 3 });
|
||||
g.setNode('e', { data: 3 });
|
||||
g.setParent('b', 'a');
|
||||
g.setParent('c', 'b');
|
||||
g.setParent('e', 'c');
|
||||
|
||||
log.info('Graph before', graphlib.json.write(g))
|
||||
adjustClustersAndEdges(g);
|
||||
const aGraph = g.node('a').graph;
|
||||
const bGraph = aGraph.node('b').graph;
|
||||
log.info('Graph after', graphlib.json.write(aGraph));
|
||||
const cGraph = bGraph.node('c').graph;
|
||||
// log.trace('Graph after', graphlib.json.write(g))
|
||||
expect(aGraph.nodes().length).toBe(1);
|
||||
expect(bGraph.nodes().length).toBe(1);
|
||||
expect(cGraph.nodes().length).toBe(1);
|
||||
expect(bGraph.edges().length).toBe(0);
|
||||
log.info('Graph before', graphlib.json.write(g));
|
||||
adjustClustersAndEdges(g);
|
||||
const aGraph = g.node('a').graph;
|
||||
const bGraph = aGraph.node('b').graph;
|
||||
log.info('Graph after', graphlib.json.write(aGraph));
|
||||
const cGraph = bGraph.node('c').graph;
|
||||
// log.trace('Graph after', graphlib.json.write(g))
|
||||
expect(aGraph.nodes().length).toBe(1);
|
||||
expect(bGraph.nodes().length).toBe(1);
|
||||
expect(cGraph.nodes().length).toBe(1);
|
||||
expect(bGraph.edges().length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
it('adjustClustersAndEdges should handle nesting GLB77', function () {
|
||||
/*
|
||||
flowchart TB
|
||||
@@ -382,10 +385,12 @@ flowchart TB
|
||||
end
|
||||
*/
|
||||
|
||||
const exportedGraph = JSON.parse('{"options":{"directed":true,"multigraph":true,"compound":true},"nodes":[{"v":"A","value":{"labelStyle":"","shape":"rect","labelText":"A","rx":0,"ry":0,"class":"default","style":"","id":"A","width":500,"type":"group","padding":15}},{"v":"B","value":{"labelStyle":"","shape":"rect","labelText":"B","rx":0,"ry":0,"class":"default","style":"","id":"B","width":500,"type":"group","padding":15},"parent":"A"},{"v":"b","value":{"labelStyle":"","shape":"rect","labelText":"b","rx":0,"ry":0,"class":"default","style":"","id":"b","padding":15},"parent":"A"},{"v":"c","value":{"labelStyle":"","shape":"rect","labelText":"c","rx":0,"ry":0,"class":"default","style":"","id":"c","padding":15},"parent":"B"},{"v":"a","value":{"labelStyle":"","shape":"rect","labelText":"a","rx":0,"ry":0,"class":"default","style":"","id":"a","padding":15},"parent":"A"}],"edges":[{"v":"b","w":"B","name":"1","value":{"minlen":1,"arrowhead":"normal","arrowTypeStart":"arrow_open","arrowTypeEnd":"arrow_point","thickness":"normal","pattern":"solid","style":"fill:none","labelStyle":"","arrowheadStyle":"fill: #333","labelpos":"c","labelType":"text","label":"","id":"L-b-B","classes":"flowchart-link LS-b LE-B"}},{"v":"a","w":"c","name":"2","value":{"minlen":1,"arrowhead":"normal","arrowTypeStart":"arrow_open","arrowTypeEnd":"arrow_point","thickness":"normal","pattern":"solid","style":"fill:none","labelStyle":"","arrowheadStyle":"fill: #333","labelpos":"c","labelType":"text","label":"","id":"L-a-c","classes":"flowchart-link LS-a LE-c"}}],"value":{"rankdir":"TB","nodesep":50,"ranksep":50,"marginx":8,"marginy":8}}');
|
||||
const gr = graphlib.json.read(exportedGraph)
|
||||
const exportedGraph = JSON.parse(
|
||||
'{"options":{"directed":true,"multigraph":true,"compound":true},"nodes":[{"v":"A","value":{"labelStyle":"","shape":"rect","labelText":"A","rx":0,"ry":0,"class":"default","style":"","id":"A","width":500,"type":"group","padding":15}},{"v":"B","value":{"labelStyle":"","shape":"rect","labelText":"B","rx":0,"ry":0,"class":"default","style":"","id":"B","width":500,"type":"group","padding":15},"parent":"A"},{"v":"b","value":{"labelStyle":"","shape":"rect","labelText":"b","rx":0,"ry":0,"class":"default","style":"","id":"b","padding":15},"parent":"A"},{"v":"c","value":{"labelStyle":"","shape":"rect","labelText":"c","rx":0,"ry":0,"class":"default","style":"","id":"c","padding":15},"parent":"B"},{"v":"a","value":{"labelStyle":"","shape":"rect","labelText":"a","rx":0,"ry":0,"class":"default","style":"","id":"a","padding":15},"parent":"A"}],"edges":[{"v":"b","w":"B","name":"1","value":{"minlen":1,"arrowhead":"normal","arrowTypeStart":"arrow_open","arrowTypeEnd":"arrow_point","thickness":"normal","pattern":"solid","style":"fill:none","labelStyle":"","arrowheadStyle":"fill: #333","labelpos":"c","labelType":"text","label":"","id":"L-b-B","classes":"flowchart-link LS-b LE-B"}},{"v":"a","w":"c","name":"2","value":{"minlen":1,"arrowhead":"normal","arrowTypeStart":"arrow_open","arrowTypeEnd":"arrow_point","thickness":"normal","pattern":"solid","style":"fill:none","labelStyle":"","arrowheadStyle":"fill: #333","labelpos":"c","labelType":"text","label":"","id":"L-a-c","classes":"flowchart-link LS-a LE-c"}}],"value":{"rankdir":"TB","nodesep":50,"ranksep":50,"marginx":8,"marginy":8}}'
|
||||
);
|
||||
const gr = graphlib.json.read(exportedGraph);
|
||||
|
||||
log.info('Graph before', graphlib.json.write(gr))
|
||||
log.info('Graph before', graphlib.json.write(gr));
|
||||
adjustClustersAndEdges(gr);
|
||||
const aGraph = gr.node('A').graph;
|
||||
const bGraph = aGraph.node('B').graph;
|
||||
@@ -394,7 +399,6 @@ flowchart TB
|
||||
expect(aGraph.parent('c')).toBe('B');
|
||||
expect(aGraph.parent('B')).toBe(undefined);
|
||||
});
|
||||
|
||||
});
|
||||
describe('extractDecendants', function () {
|
||||
let g;
|
||||
@@ -402,14 +406,14 @@ describe('extractDecendants', function () {
|
||||
setLogLevel(1);
|
||||
g = new graphlib.Graph({
|
||||
multigraph: true,
|
||||
compound: true
|
||||
compound: true,
|
||||
});
|
||||
g.setGraph({
|
||||
rankdir: 'TB',
|
||||
nodesep: 10,
|
||||
ranksep: 10,
|
||||
marginx: 8,
|
||||
marginy: 8
|
||||
marginy: 8,
|
||||
});
|
||||
g.setDefaultEdgeLabel(function () {
|
||||
return {};
|
||||
@@ -439,9 +443,9 @@ describe('extractDecendants', function () {
|
||||
g.setEdge('A', 'C', { data: 'link2' }, '2');
|
||||
|
||||
// log.info(g.edges())
|
||||
const d1 = extractDecendants('A',g)
|
||||
const d2 = extractDecendants('B',g)
|
||||
const d3 = extractDecendants('C',g)
|
||||
const d1 = extractDecendants('A', g);
|
||||
const d2 = extractDecendants('B', g);
|
||||
const d3 = extractDecendants('C', g);
|
||||
|
||||
expect(d1).toEqual(['a']);
|
||||
expect(d2).toEqual(['b']);
|
||||
@@ -454,14 +458,14 @@ describe('sortNodesByHierarchy', function () {
|
||||
setLogLevel(1);
|
||||
g = new graphlib.Graph({
|
||||
multigraph: true,
|
||||
compound: true
|
||||
compound: true,
|
||||
});
|
||||
g.setGraph({
|
||||
rankdir: 'TB',
|
||||
nodesep: 10,
|
||||
ranksep: 10,
|
||||
marginx: 8,
|
||||
marginy: 8
|
||||
marginy: 8,
|
||||
});
|
||||
g.setDefaultEdgeLabel(function () {
|
||||
return {};
|
||||
@@ -484,7 +488,7 @@ describe('sortNodesByHierarchy', function () {
|
||||
g.setEdge('a', 'b', '1');
|
||||
expect(sortNodesByHierarchy(g)).toEqual(['a', 'A', 'B', 'b']);
|
||||
});
|
||||
it('it should sort proper en nodes are in correct order', function () {
|
||||
it('it should sort proper en nodes are in correct order', function () {
|
||||
/*
|
||||
a -->b
|
||||
subgraph B
|
||||
|
@@ -313,6 +313,8 @@ const rect = (parent, node) => {
|
||||
// add the rect
|
||||
const rect = shapeSvg.insert('rect', ':first-child');
|
||||
|
||||
const totalWidth = bbox.width + node.padding;
|
||||
const totalHeight = bbox.height + node.padding;
|
||||
rect
|
||||
.attr('class', 'basic label-container')
|
||||
.attr('style', node.style)
|
||||
@@ -320,8 +322,19 @@ const rect = (parent, node) => {
|
||||
.attr('ry', node.ry)
|
||||
.attr('x', -bbox.width / 2 - halfPadding)
|
||||
.attr('y', -bbox.height / 2 - halfPadding)
|
||||
.attr('width', bbox.width + node.padding)
|
||||
.attr('height', bbox.height + node.padding);
|
||||
.attr('width', totalWidth)
|
||||
.attr('height', totalHeight);
|
||||
|
||||
if (node.props) {
|
||||
const propKeys = new Set(Object.keys(node.props));
|
||||
if (node.props.borders) {
|
||||
applyNodePropertyBorders(rect, node.props.borders, totalWidth, totalHeight);
|
||||
propKeys.delete('borders');
|
||||
}
|
||||
propKeys.forEach((propKey) => {
|
||||
log.warn(`Unknown node property ${propKey}`);
|
||||
});
|
||||
}
|
||||
|
||||
updateNodeBounds(node, rect);
|
||||
|
||||
@@ -332,6 +345,43 @@ const rect = (parent, node) => {
|
||||
return shapeSvg;
|
||||
};
|
||||
|
||||
function applyNodePropertyBorders(rect, borders, totalWidth, totalHeight) {
|
||||
const strokeDashArray = [];
|
||||
const addBorder = (length) => {
|
||||
strokeDashArray.push(length);
|
||||
strokeDashArray.push(0);
|
||||
};
|
||||
const skipBorder = (length) => {
|
||||
strokeDashArray.push(0);
|
||||
strokeDashArray.push(length);
|
||||
};
|
||||
if (borders.includes('t')) {
|
||||
log.debug('add top border');
|
||||
addBorder(totalWidth);
|
||||
} else {
|
||||
skipBorder(totalWidth);
|
||||
}
|
||||
if (borders.includes('r')) {
|
||||
log.debug('add right border');
|
||||
addBorder(totalHeight);
|
||||
} else {
|
||||
skipBorder(totalHeight);
|
||||
}
|
||||
if (borders.includes('b')) {
|
||||
log.debug('add bottom border');
|
||||
addBorder(totalWidth);
|
||||
} else {
|
||||
skipBorder(totalWidth);
|
||||
}
|
||||
if (borders.includes('l')) {
|
||||
log.debug('add left border');
|
||||
addBorder(totalHeight);
|
||||
} else {
|
||||
skipBorder(totalHeight);
|
||||
}
|
||||
rect.attr('stroke-dasharray', strokeDashArray.join(' '));
|
||||
}
|
||||
|
||||
const rectWithTitle = (parent, node) => {
|
||||
// const { shapeSvg, bbox, halfPadding } = labelHelper(parent, node, 'node ' + node.classes);
|
||||
|
||||
|
@@ -19,11 +19,13 @@ export const labelHelper = (parent, node, _classes, isNode) => {
|
||||
// Create the label and insert it after the rect
|
||||
const label = shapeSvg.insert('g').attr('class', 'label').attr('style', node.labelStyle);
|
||||
|
||||
const labelText = typeof node.labelText === 'string' ? node.labelText : node.labelText[0];
|
||||
|
||||
const text = label
|
||||
.node()
|
||||
.appendChild(
|
||||
createLabel(
|
||||
sanitizeText(decodeEntities(node.labelText), getConfig()),
|
||||
sanitizeText(decodeEntities(labelText), getConfig()),
|
||||
node.labelStyle,
|
||||
false,
|
||||
isNode
|
||||
|
@@ -149,7 +149,7 @@ export const addMembers = function (className, members) {
|
||||
|
||||
export const cleanupLabel = function (label) {
|
||||
if (label.substring(0, 1) === ':') {
|
||||
return label.substr(1).trim();
|
||||
return common.sanitizeText(label.substr(1).trim(), configApi.getConfig());
|
||||
} else {
|
||||
return label.trim();
|
||||
}
|
||||
|
@@ -1,15 +1,14 @@
|
||||
/* eslint-env jasmine */
|
||||
import { parser } from './parser/classDiagram';
|
||||
import classDb from './classDb';
|
||||
|
||||
describe('class diagram, ', function() {
|
||||
describe('when parsing data from a classDiagram it', function() {
|
||||
beforeEach(function() {
|
||||
describe('class diagram, ', function () {
|
||||
describe('when parsing data from a classDiagram it', function () {
|
||||
beforeEach(function () {
|
||||
parser.yy = classDb;
|
||||
parser.yy.clear();
|
||||
});
|
||||
|
||||
it('should be possible to apply a css class to a class directly', function() {
|
||||
it('should be possible to apply a css class to a class directly', function () {
|
||||
const str = 'classDiagram\n' + 'class Class01:::exClass';
|
||||
|
||||
parser.parse(str);
|
||||
@@ -32,7 +31,7 @@ describe('class diagram, ', function() {
|
||||
expect(testClass.cssClasses[0]).toBe('exClass');
|
||||
});
|
||||
|
||||
it('should be possible to apply a css class to a class with relations', function() {
|
||||
it('should be possible to apply a css class to a class with relations', function () {
|
||||
const str = 'classDiagram\n' + 'Class01 <|-- Class02\ncssClass "Class01" exClass';
|
||||
|
||||
parser.parse(str);
|
||||
@@ -40,7 +39,7 @@ describe('class diagram, ', function() {
|
||||
expect(parser.yy.getClass('Class01').cssClasses[0]).toBe('exClass');
|
||||
});
|
||||
|
||||
it('should be possible to apply a cssClass to a class', function() {
|
||||
it('should be possible to apply a cssClass to a class', function () {
|
||||
const str = 'classDiagram\n' + 'class Class01\n cssClass "Class01" exClass';
|
||||
|
||||
parser.parse(str);
|
||||
@@ -48,8 +47,9 @@ describe('class diagram, ', function() {
|
||||
expect(parser.yy.getClass('Class01').cssClasses[0]).toBe('exClass');
|
||||
});
|
||||
|
||||
it('should be possible to apply a cssClass to a comma separated list of classes', function() {
|
||||
const str = 'classDiagram\n' + 'class Class01\n class Class02\n cssClass "Class01,Class02" exClass';
|
||||
it('should be possible to apply a cssClass to a comma separated list of classes', function () {
|
||||
const str =
|
||||
'classDiagram\n' + 'class Class01\n class Class02\n cssClass "Class01,Class02" exClass';
|
||||
|
||||
parser.parse(str);
|
||||
|
||||
|
@@ -1,4 +1,3 @@
|
||||
/* eslint-env jasmine */
|
||||
import { parser } from './parser/classDiagram';
|
||||
import classDb from './classDb';
|
||||
|
||||
@@ -10,10 +9,8 @@ describe('class diagram, ', function () {
|
||||
parser.yy = classDb;
|
||||
});
|
||||
|
||||
it('should handle backquoted class names', function() {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class `Car`';
|
||||
it('should handle backquoted class names', function () {
|
||||
const str = 'classDiagram\n' + 'class `Car`';
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
@@ -64,7 +61,7 @@ describe('class diagram, ', function () {
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle visibility for methods and members', function() {
|
||||
it('should handle visibility for methods and members', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class TestClass\n' +
|
||||
@@ -78,7 +75,7 @@ describe('class diagram, ', function () {
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle generic class', function() {
|
||||
it('should handle generic class', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Car~T~\n' +
|
||||
@@ -89,7 +86,7 @@ describe('class diagram, ', function () {
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle generic class with a literal name', function() {
|
||||
it('should handle generic class with a literal name', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class `Car`~T~\n' +
|
||||
@@ -100,7 +97,7 @@ describe('class diagram, ', function () {
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should break when another `{`is encountered before closing the first one while defining generic class with brackets', function() {
|
||||
it('should break when another `{`is encountered before closing the first one while defining generic class with brackets', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Dummy_Class~T~ {\n' +
|
||||
@@ -113,17 +110,17 @@ describe('class diagram, ', function () {
|
||||
' flightNumber : Integer\n' +
|
||||
' departureTime : Date\n' +
|
||||
'}';
|
||||
let testPased =false;
|
||||
try{
|
||||
let testPased = false;
|
||||
try {
|
||||
parser.parse(str);
|
||||
}catch (error){
|
||||
} catch (error) {
|
||||
console.log(error.name);
|
||||
testPased = true;
|
||||
}
|
||||
expect(testPased).toBe(true);
|
||||
});
|
||||
|
||||
it('should break when EOF is encountered before closing the first `{` while defining generic class with brackets', function() {
|
||||
it('should break when EOF is encountered before closing the first `{` while defining generic class with brackets', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Dummy_Class~T~ {\n' +
|
||||
@@ -132,17 +129,17 @@ describe('class diagram, ', function () {
|
||||
'}\n' +
|
||||
'\n' +
|
||||
'class Dummy_Class {\n';
|
||||
let testPased =false;
|
||||
try{
|
||||
let testPased = false;
|
||||
try {
|
||||
parser.parse(str);
|
||||
}catch (error){
|
||||
} catch (error) {
|
||||
console.log(error.name);
|
||||
testPased = true;
|
||||
}
|
||||
expect(testPased).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle generic class with brackets', function() {
|
||||
it('should handle generic class with brackets', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Dummy_Class~T~ {\n' +
|
||||
@@ -155,10 +152,10 @@ describe('class diagram, ', function () {
|
||||
' departureTime : Date\n' +
|
||||
'}';
|
||||
|
||||
parser.parse(str);
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle generic class with brackets and a literal name', function() {
|
||||
it('should handle generic class with brackets and a literal name', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class `Dummy_Class`~T~ {\n' +
|
||||
@@ -171,10 +168,10 @@ describe('class diagram, ', function () {
|
||||
' departureTime : Date\n' +
|
||||
'}';
|
||||
|
||||
parser.parse(str);
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle class definitions', function() {
|
||||
it('should handle class definitions', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Car\n' +
|
||||
@@ -290,8 +287,7 @@ describe('class diagram, ', function () {
|
||||
});
|
||||
|
||||
it('should handle comments at the start', function () {
|
||||
const str =
|
||||
`%% Comment
|
||||
const str = `%% Comment
|
||||
classDiagram
|
||||
class Class1 {
|
||||
int : test
|
||||
@@ -303,8 +299,7 @@ describe('class diagram, ', function () {
|
||||
});
|
||||
|
||||
it('should handle comments at the end', function () {
|
||||
const str =
|
||||
`classDiagram
|
||||
const str = `classDiagram
|
||||
class Class1 {
|
||||
int : test
|
||||
string : foo
|
||||
@@ -319,8 +314,7 @@ foo()
|
||||
});
|
||||
|
||||
it('should handle comments at the end no trailing newline', function () {
|
||||
const str =
|
||||
`classDiagram
|
||||
const str = `classDiagram
|
||||
class Class1 {
|
||||
int : test
|
||||
string : foo
|
||||
@@ -333,8 +327,7 @@ foo()
|
||||
});
|
||||
|
||||
it('should handle a comment with multiple line feeds', function () {
|
||||
const str =
|
||||
`classDiagram
|
||||
const str = `classDiagram
|
||||
|
||||
|
||||
%% Comment
|
||||
@@ -350,8 +343,7 @@ foo()
|
||||
});
|
||||
|
||||
it('should handle a comment with mermaid class diagram code in them', function () {
|
||||
const str =
|
||||
`classDiagram
|
||||
const str = `classDiagram
|
||||
%% Comment Class01 <|-- Class02
|
||||
class Class1 {
|
||||
int : test
|
||||
@@ -393,19 +385,19 @@ foo()
|
||||
});
|
||||
|
||||
it('should handle click statement with click and href link', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1 {\n' +
|
||||
'%% Comment Class01 <|-- Class02\n' +
|
||||
'int : test\n' +
|
||||
'string : foo\n' +
|
||||
'test()\n' +
|
||||
'foo()\n' +
|
||||
'}\n' +
|
||||
'click Class01 href "google.com" ';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1 {\n' +
|
||||
'%% Comment Class01 <|-- Class02\n' +
|
||||
'int : test\n' +
|
||||
'string : foo\n' +
|
||||
'test()\n' +
|
||||
'foo()\n' +
|
||||
'}\n' +
|
||||
'click Class01 href "google.com" ';
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle click statement with link and tooltip', function () {
|
||||
const str =
|
||||
@@ -422,21 +414,20 @@ foo()
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
|
||||
it('should handle click statement with click and href link and tooltip', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1 {\n' +
|
||||
'%% Comment Class01 <|-- Class02\n' +
|
||||
'int : test\n' +
|
||||
'string : foo\n' +
|
||||
'test()\n' +
|
||||
'foo()\n' +
|
||||
'}\n' +
|
||||
'click Class01 href "google.com" "A Tooltip" ';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1 {\n' +
|
||||
'%% Comment Class01 <|-- Class02\n' +
|
||||
'int : test\n' +
|
||||
'string : foo\n' +
|
||||
'test()\n' +
|
||||
'foo()\n' +
|
||||
'}\n' +
|
||||
'click Class01 href "google.com" "A Tooltip" ';
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle click statement with callback', function () {
|
||||
const str =
|
||||
@@ -454,19 +445,19 @@ foo()
|
||||
});
|
||||
|
||||
it('should handle click statement with click and call callback', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1 {\n' +
|
||||
'%% Comment Class01 <|-- Class02\n' +
|
||||
'int : test\n' +
|
||||
'string : foo\n' +
|
||||
'test()\n' +
|
||||
'foo()\n' +
|
||||
'}\n' +
|
||||
'click Class01 call functionCall() ';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1 {\n' +
|
||||
'%% Comment Class01 <|-- Class02\n' +
|
||||
'int : test\n' +
|
||||
'string : foo\n' +
|
||||
'test()\n' +
|
||||
'foo()\n' +
|
||||
'}\n' +
|
||||
'click Class01 call functionCall() ';
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle click statement with callback and tooltip', function () {
|
||||
const str =
|
||||
@@ -484,19 +475,19 @@ foo()
|
||||
});
|
||||
|
||||
it('should handle click statement with click and call callback and tooltip', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1 {\n' +
|
||||
'%% Comment Class01 <|-- Class02\n' +
|
||||
'int : test\n' +
|
||||
'string : foo\n' +
|
||||
'test()\n' +
|
||||
'foo()\n' +
|
||||
'}\n' +
|
||||
'click Class01 call functionCall() "A Tooltip" ';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1 {\n' +
|
||||
'%% Comment Class01 <|-- Class02\n' +
|
||||
'int : test\n' +
|
||||
'string : foo\n' +
|
||||
'test()\n' +
|
||||
'foo()\n' +
|
||||
'}\n' +
|
||||
'click Class01 call functionCall() "A Tooltip" ';
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle dashed relation definition of different types and directions', function () {
|
||||
const str =
|
||||
@@ -522,12 +513,12 @@ foo()
|
||||
|
||||
it('should handle generic types in members in class with brackets', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Car {\n' +
|
||||
'List~Wheel~ wheels\n' +
|
||||
'classDiagram\n' +
|
||||
'class Car {\n' +
|
||||
'List~Wheel~ wheels\n' +
|
||||
'setWheels(List~Wheel~ wheels)\n' +
|
||||
'+getWheels() List~Wheel~\n' +
|
||||
'}';
|
||||
'}';
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
@@ -747,51 +738,71 @@ foo()
|
||||
});
|
||||
|
||||
it('should associate link and css appropriately', function () {
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'link Class1 "google.com"';
|
||||
parser.parse(str);
|
||||
|
||||
const testClass = parser.yy.getClass('Class1');
|
||||
expect(testClass.link).toBe('about:blank');//('google.com'); security needs to be set to 'loose' for this to work right
|
||||
expect(testClass.cssClasses.length).toBe(1);
|
||||
expect(testClass.cssClasses[0]).toBe('clickable');
|
||||
});
|
||||
|
||||
it('should associate click and href link and css appropriately', function () {
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'click Class1 href "google.com"';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'link Class1 "google.com"';
|
||||
parser.parse(str);
|
||||
|
||||
const testClass = parser.yy.getClass('Class1');
|
||||
expect(testClass.link).toBe('about:blank');//('google.com'); security needs to be set to 'loose' for this to work right
|
||||
expect(testClass.link).toBe('about:blank'); //('google.com'); security needs to be set to 'loose' for this to work right
|
||||
expect(testClass.cssClasses.length).toBe(1);
|
||||
expect(testClass.cssClasses[0]).toBe('clickable');
|
||||
});
|
||||
|
||||
it('should associate click and href link and css appropriately', function () {
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'click Class1 href "google.com"';
|
||||
parser.parse(str);
|
||||
|
||||
const testClass = parser.yy.getClass('Class1');
|
||||
expect(testClass.link).toBe('about:blank'); //('google.com'); security needs to be set to 'loose' for this to work right
|
||||
expect(testClass.cssClasses.length).toBe(1);
|
||||
expect(testClass.cssClasses[0]).toBe('clickable');
|
||||
});
|
||||
|
||||
it('should associate link with tooltip', function () {
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'link Class1 "google.com" "A tooltip"';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'link Class1 "google.com" "A tooltip"';
|
||||
parser.parse(str);
|
||||
|
||||
const testClass = parser.yy.getClass('Class1');
|
||||
expect(testClass.link).toBe('about:blank');//('google.com'); security needs to be set to 'loose' for this to work right
|
||||
expect(testClass.link).toBe('about:blank'); //('google.com'); security needs to be set to 'loose' for this to work right
|
||||
expect(testClass.tooltip).toBe('A tooltip');
|
||||
expect(testClass.cssClasses.length).toBe(1);
|
||||
expect(testClass.cssClasses[0]).toBe('clickable');
|
||||
});
|
||||
|
||||
it('should associate click and href link with tooltip', function () {
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'click Class1 href "google.com" "A tooltip"';
|
||||
parser.parse(str);
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'click Class1 href "google.com" "A tooltip"';
|
||||
parser.parse(str);
|
||||
|
||||
const testClass = parser.yy.getClass('Class1');
|
||||
expect(testClass.link).toBe('about:blank');//('google.com'); security needs to be set to 'loose' for this to work right
|
||||
expect(testClass.tooltip).toBe('A tooltip');
|
||||
expect(testClass.cssClasses.length).toBe(1);
|
||||
expect(testClass.cssClasses[0]).toBe('clickable');
|
||||
});
|
||||
const testClass = parser.yy.getClass('Class1');
|
||||
expect(testClass.link).toBe('about:blank'); //('google.com'); security needs to be set to 'loose' for this to work right
|
||||
expect(testClass.tooltip).toBe('A tooltip');
|
||||
expect(testClass.cssClasses.length).toBe(1);
|
||||
expect(testClass.cssClasses[0]).toBe('clickable');
|
||||
});
|
||||
|
||||
it('should associate click and href link with tooltip and target appropriately', function () {
|
||||
spyOn(classDb, 'setLink');
|
||||
spyOn(classDb, 'setTooltip');
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'click Class1 href "google.com" "A tooltip" _self';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'click Class1 href "google.com" "A tooltip" _self';
|
||||
parser.parse(str);
|
||||
|
||||
expect(classDb.setLink).toHaveBeenCalledWith('Class1', 'google.com', '_self');
|
||||
@@ -800,7 +811,11 @@ foo()
|
||||
|
||||
it('should associate click and href link appropriately', function () {
|
||||
spyOn(classDb, 'setLink');
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'click Class1 href "google.com"';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'click Class1 href "google.com"';
|
||||
parser.parse(str);
|
||||
|
||||
expect(classDb.setLink).toHaveBeenCalledWith('Class1', 'google.com');
|
||||
@@ -808,7 +823,11 @@ foo()
|
||||
|
||||
it('should associate click and href link with target appropriately', function () {
|
||||
spyOn(classDb, 'setLink');
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'click Class1 href "google.com" _self';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'click Class1 href "google.com" _self';
|
||||
parser.parse(str);
|
||||
|
||||
expect(classDb.setLink).toHaveBeenCalledWith('Class1', 'google.com', '_self');
|
||||
@@ -817,7 +836,11 @@ foo()
|
||||
it('should associate link appropriately', function () {
|
||||
spyOn(classDb, 'setLink');
|
||||
spyOn(classDb, 'setTooltip');
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'link Class1 "google.com" "A tooltip" _self';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'link Class1 "google.com" "A tooltip" _self';
|
||||
parser.parse(str);
|
||||
|
||||
expect(classDb.setLink).toHaveBeenCalledWith('Class1', 'google.com', '_self');
|
||||
@@ -826,16 +849,23 @@ foo()
|
||||
|
||||
it('should associate callback appropriately', function () {
|
||||
spyOn(classDb, 'setClickEvent');
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'callback Class1 "functionCall"';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'callback Class1 "functionCall"';
|
||||
parser.parse(str);
|
||||
|
||||
expect(classDb.setClickEvent).toHaveBeenCalledWith('Class1', 'functionCall');
|
||||
});
|
||||
|
||||
|
||||
it('should associate click and call callback appropriately', function () {
|
||||
spyOn(classDb, 'setClickEvent');
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'click Class1 call functionCall()';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'click Class1 call functionCall()';
|
||||
parser.parse(str);
|
||||
|
||||
expect(classDb.setClickEvent).toHaveBeenCalledWith('Class1', 'functionCall');
|
||||
@@ -843,16 +873,28 @@ foo()
|
||||
|
||||
it('should associate callback appropriately with an arbitrary number of args', function () {
|
||||
spyOn(classDb, 'setClickEvent');
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'click Class1 call functionCall("test0", test1, test2)';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'click Class1 call functionCall("test0", test1, test2)';
|
||||
parser.parse(str);
|
||||
|
||||
expect(classDb.setClickEvent).toHaveBeenCalledWith('Class1', 'functionCall','"test0", test1, test2');
|
||||
expect(classDb.setClickEvent).toHaveBeenCalledWith(
|
||||
'Class1',
|
||||
'functionCall',
|
||||
'"test0", test1, test2'
|
||||
);
|
||||
});
|
||||
|
||||
it('should associate callback with tooltip', function () {
|
||||
spyOn(classDb, 'setClickEvent');
|
||||
spyOn(classDb, 'setTooltip');
|
||||
const str = 'classDiagram\n' + 'class Class1\n' + 'Class1 : someMethod()\n' + 'click Class1 call functionCall() "A tooltip"';
|
||||
const str =
|
||||
'classDiagram\n' +
|
||||
'class Class1\n' +
|
||||
'Class1 : someMethod()\n' +
|
||||
'click Class1 call functionCall() "A tooltip"';
|
||||
parser.parse(str);
|
||||
|
||||
expect(classDb.setClickEvent).toHaveBeenCalledWith('Class1', 'functionCall');
|
||||
|
@@ -1,12 +1,11 @@
|
||||
/* eslint-env jasmine */
|
||||
const fs = require("fs");
|
||||
const fs = require('fs');
|
||||
|
||||
import { LALRGenerator } from "jison";
|
||||
import { LALRGenerator } from 'jison';
|
||||
|
||||
describe('class diagram grammar', function () {
|
||||
it("should introduce no new conflicts", function() {
|
||||
const file = require.resolve("./parser/classDiagram.jison");
|
||||
const grammarSource = fs.readFileSync(file, "utf8");
|
||||
it('should introduce no new conflicts', function () {
|
||||
const file = require.resolve('./parser/classDiagram.jison');
|
||||
const grammarSource = fs.readFileSync(file, 'utf8');
|
||||
const grammarParser = new LALRGenerator(grammarSource, {});
|
||||
expect(grammarParser.conflicts < 16).toBe(true);
|
||||
});
|
||||
|
@@ -1,4 +1,3 @@
|
||||
/* eslint-env jasmine */
|
||||
import svgDraw from './svgDraw';
|
||||
|
||||
describe('class member Renderer, ', function () {
|
||||
@@ -78,7 +77,7 @@ describe('class member Renderer, ', function () {
|
||||
it('should handle simple method declaration with parameters', function () {
|
||||
const str = 'foo(int id)';
|
||||
let actual = svgDraw.parseMember(str);
|
||||
|
||||
|
||||
expect(actual.displayText).toBe('foo(int id)');
|
||||
expect(actual.cssStyle).toBe('');
|
||||
});
|
||||
@@ -86,7 +85,7 @@ describe('class member Renderer, ', function () {
|
||||
it('should handle simple method declaration with multiple parameters', function () {
|
||||
const str = 'foo(int id, object thing)';
|
||||
let actual = svgDraw.parseMember(str);
|
||||
|
||||
|
||||
expect(actual.displayText).toBe('foo(int id, object thing)');
|
||||
expect(actual.cssStyle).toBe('');
|
||||
});
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { removeScript } from './common';
|
||||
|
||||
describe('when securityLevel is antiscript, all script must be removed', function() {
|
||||
it('should remove all script block, script inline.', function() {
|
||||
describe('when securityLevel is antiscript, all script must be removed', function () {
|
||||
it('should remove all script block, script inline.', function () {
|
||||
const labelString = `1
|
||||
Act1: Hello 1<script src="http://abc.com/script1.js"></script>1
|
||||
<b>Act2</b>:
|
||||
@@ -10,17 +10,17 @@ describe('when securityLevel is antiscript, all script must be removed', functio
|
||||
</script>1
|
||||
1`;
|
||||
|
||||
const result = removeScript(labelString);
|
||||
const hasScript = (result.indexOf("script") >= 0);
|
||||
const result = removeScript(labelString);
|
||||
const hasScript = result.indexOf('script') >= 0;
|
||||
expect(hasScript).toEqual(false);
|
||||
|
||||
const exactlyString = `1
|
||||
const exactlyString = `1
|
||||
Act1: Hello 11
|
||||
<b>Act2</b>:
|
||||
11
|
||||
1`;
|
||||
|
||||
const isEqual = (result == exactlyString);
|
||||
const isEqual = result == exactlyString;
|
||||
expect(isEqual).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
@@ -3,11 +3,10 @@ import erDb from '../erDb';
|
||||
import erDiagram from './erDiagram';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('when parsing ER diagram it...', function () {
|
||||
|
||||
beforeEach(function () {
|
||||
erDiagram.parser.yy = erDb;
|
||||
erDiagram.parser.yy.clear();
|
||||
@@ -78,7 +77,9 @@ describe('when parsing ER diagram it...', function () {
|
||||
const attribute2 = 'string author';
|
||||
const attribute3 = 'float price';
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n}`);
|
||||
erDiagram.parser.parse(
|
||||
`erDiagram\n${entity} {\n${attribute1}\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
});
|
||||
@@ -89,7 +90,9 @@ describe('when parsing ER diagram it...', function () {
|
||||
const attribute2 = 'string author';
|
||||
const attribute3 = 'float price';
|
||||
|
||||
erDiagram.parser.parse(`erDiagram\n${entity} {\n${attribute1}\n}\n${entity} {\n${attribute2}\n${attribute3}\n}`);
|
||||
erDiagram.parser.parse(
|
||||
`erDiagram\n${entity} {\n${attribute1}\n}\n${entity} {\n${attribute2}\n${attribute3}\n}`
|
||||
);
|
||||
const entities = erDb.getEntities();
|
||||
expect(entities[entity].attributes.length).toBe(3);
|
||||
});
|
||||
@@ -196,7 +199,6 @@ describe('when parsing ER diagram it...', function () {
|
||||
/* TODO */
|
||||
});
|
||||
|
||||
|
||||
it('should handle only-one-to-one-or-more relationships', function () {
|
||||
erDiagram.parser.parse('erDiagram\nA ||--|{ B : has');
|
||||
const rels = erDb.getRelationships();
|
||||
@@ -215,7 +217,6 @@ describe('when parsing ER diagram it...', function () {
|
||||
expect(rels.length).toBe(1);
|
||||
expect(rels[0].relSpec.cardA).toBe(erDb.Cardinality.ZERO_OR_MORE);
|
||||
expect(rels[0].relSpec.cardB).toBe(erDb.Cardinality.ONLY_ONE);
|
||||
|
||||
});
|
||||
|
||||
it('should handle zero-or-one-to-zero-or-more relationships', function () {
|
||||
|
@@ -1,16 +1,18 @@
|
||||
import { addToRender } from './flowChartShapes';
|
||||
|
||||
describe('flowchart shapes', function() {
|
||||
describe('flowchart shapes', function () {
|
||||
// rect-based shapes
|
||||
[
|
||||
['stadium', useWidth, useHeight]
|
||||
].forEach(function([shapeType, getW, getH]) {
|
||||
it(`should add a ${shapeType} shape that renders a properly positioned rect element`, function() {
|
||||
[['stadium', useWidth, useHeight]].forEach(function ([shapeType, getW, getH]) {
|
||||
it(`should add a ${shapeType} shape that renders a properly positioned rect element`, function () {
|
||||
const mockRender = MockRender();
|
||||
const mockSvg = MockSvg();
|
||||
addToRender(mockRender);
|
||||
|
||||
[[100, 100], [123, 45], [71, 300]].forEach(function([width, height]) {
|
||||
[
|
||||
[100, 100],
|
||||
[123, 45],
|
||||
[71, 300],
|
||||
].forEach(function ([width, height]) {
|
||||
const shape = mockRender.shapes()[shapeType](mockSvg, { width, height }, {});
|
||||
const w = width + height / 4;
|
||||
const h = height;
|
||||
@@ -24,15 +26,17 @@ describe('flowchart shapes', function() {
|
||||
});
|
||||
|
||||
// path-based shapes
|
||||
[
|
||||
['cylinder', useWidth, useHeight]
|
||||
].forEach(function([shapeType, getW, getH]) {
|
||||
it(`should add a ${shapeType} shape that renders a properly positioned path element`, function() {
|
||||
[['cylinder', useWidth, useHeight]].forEach(function ([shapeType, getW, getH]) {
|
||||
it(`should add a ${shapeType} shape that renders a properly positioned path element`, function () {
|
||||
const mockRender = MockRender();
|
||||
const mockSvg = MockSvg();
|
||||
addToRender(mockRender);
|
||||
|
||||
[[100, 100], [123, 45], [71, 300]].forEach(function([width, height]) {
|
||||
[
|
||||
[100, 100],
|
||||
[123, 45],
|
||||
[71, 300],
|
||||
].forEach(function ([width, height]) {
|
||||
const shape = mockRender.shapes()[shapeType](mockSvg, { width, height }, {});
|
||||
expect(shape.__tag).toEqual('path');
|
||||
expect(shape.__attrs).toHaveProperty('d');
|
||||
@@ -45,20 +49,20 @@ describe('flowchart shapes', function() {
|
||||
[
|
||||
'question',
|
||||
4,
|
||||
function(w, h) {
|
||||
function (w, h) {
|
||||
return (w + h) * 0.9;
|
||||
},
|
||||
function(w, h) {
|
||||
function (w, h) {
|
||||
return (w + h) * 0.9;
|
||||
}
|
||||
},
|
||||
],
|
||||
[
|
||||
'hexagon',
|
||||
6,
|
||||
function(w, h) {
|
||||
function (w, h) {
|
||||
return w + h / 2;
|
||||
},
|
||||
useHeight
|
||||
useHeight,
|
||||
],
|
||||
['rect_left_inv_arrow', 5, useWidth, useHeight],
|
||||
['rect_right_inv_arrow', 5, useWidth, useHeight],
|
||||
@@ -67,13 +71,17 @@ describe('flowchart shapes', function() {
|
||||
['trapezoid', 4, useWidth, useHeight],
|
||||
['inv_trapezoid', 4, useWidth, useHeight],
|
||||
['subroutine', 10, useWidth, useHeight],
|
||||
].forEach(function([shapeType, expectedPointCount, getW, getH]) {
|
||||
it(`should add a ${shapeType} shape that renders a properly translated polygon element`, function() {
|
||||
].forEach(function ([shapeType, expectedPointCount, getW, getH]) {
|
||||
it(`should add a ${shapeType} shape that renders a properly translated polygon element`, function () {
|
||||
const mockRender = MockRender();
|
||||
const mockSvg = MockSvg();
|
||||
addToRender(mockRender);
|
||||
|
||||
[[100, 100], [123, 45], [71, 300]].forEach(function([width, height]) {
|
||||
[
|
||||
[100, 100],
|
||||
[123, 45],
|
||||
[71, 300],
|
||||
].forEach(function ([width, height]) {
|
||||
const shape = mockRender.shapes()[shapeType](mockSvg, { width, height }, {});
|
||||
const dx = -getW(width, height) / 2;
|
||||
const dy = getH(width, height) / 2;
|
||||
@@ -91,7 +99,7 @@ function MockRender() {
|
||||
return {
|
||||
shapes() {
|
||||
return shapes;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -111,7 +119,7 @@ function MockSvg(tag, ...args) {
|
||||
get __attrs() {
|
||||
return attributes;
|
||||
},
|
||||
insert: function(tag, ...args) {
|
||||
insert: function (tag, ...args) {
|
||||
const child = MockSvg(tag, ...args);
|
||||
children.push(child);
|
||||
return child;
|
||||
@@ -119,10 +127,14 @@ function MockSvg(tag, ...args) {
|
||||
attr(name, value) {
|
||||
this.__attrs[name] = value;
|
||||
return this;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param w
|
||||
* @param h
|
||||
*/
|
||||
function useWidth(w, h) {
|
||||
return w;
|
||||
}
|
||||
|
@@ -52,8 +52,9 @@ export const lookUpDomId = function (id) {
|
||||
* @param style
|
||||
* @param classes
|
||||
* @param dir
|
||||
* @param props
|
||||
*/
|
||||
export const addVertex = function (_id, text, type, style, classes, dir) {
|
||||
export const addVertex = function (_id, text, type, style, classes, dir, props = {}) {
|
||||
let txt;
|
||||
let id = _id;
|
||||
if (typeof id === 'undefined') {
|
||||
@@ -109,6 +110,7 @@ export const addVertex = function (_id, text, type, style, classes, dir) {
|
||||
if (typeof dir !== 'undefined') {
|
||||
vertices[id].dir = dir;
|
||||
}
|
||||
vertices[id].props = props;
|
||||
};
|
||||
|
||||
/**
|
||||
|
@@ -2,12 +2,12 @@ import flowDb from './flowDb';
|
||||
|
||||
describe('flow db subgraphs', () => {
|
||||
let subgraphs;
|
||||
beforeEach( ()=>{
|
||||
beforeEach(() => {
|
||||
subgraphs = [
|
||||
{nodes:['a', 'b', 'c', 'e']},
|
||||
{nodes:['f', 'g', 'h']},
|
||||
{nodes:['i', 'j']},
|
||||
{nodes:['k']},
|
||||
{ nodes: ['a', 'b', 'c', 'e'] },
|
||||
{ nodes: ['f', 'g', 'h'] },
|
||||
{ nodes: ['i', 'j'] },
|
||||
{ nodes: ['k'] },
|
||||
];
|
||||
});
|
||||
describe('exist', () => {
|
||||
@@ -23,22 +23,21 @@ describe('flow db subgraphs', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('makeUniq', () => {
|
||||
it('should remove ids from sungraph that already exists in another subgraph even if it gets empty', () => {
|
||||
const subgraph = flowDb.makeUniq({nodes:['i', 'j']}, subgraphs);
|
||||
describe('makeUniq', () => {
|
||||
it('should remove ids from sungraph that already exists in another subgraph even if it gets empty', () => {
|
||||
const subgraph = flowDb.makeUniq({ nodes: ['i', 'j'] }, subgraphs);
|
||||
|
||||
expect(subgraph.nodes).toEqual([]);
|
||||
});
|
||||
it('should remove ids from sungraph that already exists in another subgraph', () => {
|
||||
const subgraph = flowDb.makeUniq({nodes:['i', 'j', 'o']}, subgraphs);
|
||||
|
||||
expect(subgraph.nodes).toEqual(['o']);
|
||||
});
|
||||
it('should not remove ids from subgraph if they are unique', () => {
|
||||
const subgraph = flowDb.makeUniq({nodes:['q', 'r', 's']}, subgraphs);
|
||||
|
||||
expect(subgraph.nodes).toEqual(['q', 'r', 's']);
|
||||
});
|
||||
expect(subgraph.nodes).toEqual([]);
|
||||
});
|
||||
});
|
||||
it('should remove ids from sungraph that already exists in another subgraph', () => {
|
||||
const subgraph = flowDb.makeUniq({ nodes: ['i', 'j', 'o'] }, subgraphs);
|
||||
|
||||
expect(subgraph.nodes).toEqual(['o']);
|
||||
});
|
||||
it('should not remove ids from subgraph if they are unique', () => {
|
||||
const subgraph = flowDb.makeUniq({ nodes: ['q', 'r', 's'] }, subgraphs);
|
||||
|
||||
expect(subgraph.nodes).toEqual(['q', 'r', 's']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@@ -152,6 +152,7 @@ export const addVertices = function (vert, g, svgId) {
|
||||
width: vertex.type === 'group' ? 500 : undefined,
|
||||
dir: vertex.dir,
|
||||
type: vertex.type,
|
||||
props: vertex.props,
|
||||
padding: getConfig().flowchart.padding,
|
||||
});
|
||||
|
||||
@@ -168,6 +169,7 @@ export const addVertices = function (vert, g, svgId) {
|
||||
width: vertex.type === 'group' ? 500 : undefined,
|
||||
type: vertex.type,
|
||||
dir: vertex.dir,
|
||||
props: vertex.props,
|
||||
padding: getConfig().flowchart.padding,
|
||||
});
|
||||
});
|
||||
|
@@ -3,12 +3,12 @@ import { setConfig } from '../../config';
|
||||
|
||||
setConfig({
|
||||
flowchart: {
|
||||
htmlLabels: false
|
||||
}
|
||||
htmlLabels: false,
|
||||
},
|
||||
});
|
||||
|
||||
describe('the flowchart renderer', function() {
|
||||
describe('when adding vertices to a graph', function() {
|
||||
describe('the flowchart renderer', function () {
|
||||
describe('when adding vertices to a graph', function () {
|
||||
[
|
||||
['round', 'rect', 5],
|
||||
['square', 'rect'],
|
||||
@@ -25,14 +25,14 @@ describe('the flowchart renderer', function() {
|
||||
['stadium', 'stadium'],
|
||||
['subroutine', 'subroutine'],
|
||||
['cylinder', 'cylinder'],
|
||||
['group', 'rect']
|
||||
].forEach(function([type, expectedShape, expectedRadios = 0]) {
|
||||
it(`should add the correct shaped node to the graph for vertex type ${type}`, function() {
|
||||
['group', 'rect'],
|
||||
].forEach(function ([type, expectedShape, expectedRadios = 0]) {
|
||||
it(`should add the correct shaped node to the graph for vertex type ${type}`, function () {
|
||||
const addedNodes = [];
|
||||
const mockG = {
|
||||
setNode: function(id, object) {
|
||||
setNode: function (id, object) {
|
||||
addedNodes.push([id, object]);
|
||||
}
|
||||
},
|
||||
};
|
||||
addVertices(
|
||||
{
|
||||
@@ -41,8 +41,8 @@ describe('the flowchart renderer', function() {
|
||||
id: 'my-node-id',
|
||||
classes: [],
|
||||
styles: [],
|
||||
text: 'my vertex text'
|
||||
}
|
||||
text: 'my vertex text',
|
||||
},
|
||||
},
|
||||
mockG,
|
||||
'svg-id'
|
||||
@@ -57,18 +57,15 @@ describe('the flowchart renderer', function() {
|
||||
});
|
||||
});
|
||||
|
||||
[
|
||||
'Multi<br>Line',
|
||||
'Multi<br/>Line',
|
||||
'Multi<br />Line',
|
||||
'Multi<br\t/>Line'
|
||||
].forEach(function(labelText) {
|
||||
it('should handle multiline texts with different line breaks', function() {
|
||||
['Multi<br>Line', 'Multi<br/>Line', 'Multi<br />Line', 'Multi<br\t/>Line'].forEach(function (
|
||||
labelText
|
||||
) {
|
||||
it('should handle multiline texts with different line breaks', function () {
|
||||
const addedNodes = [];
|
||||
const mockG = {
|
||||
setNode: function(id, object) {
|
||||
setNode: function (id, object) {
|
||||
addedNodes.push([id, object]);
|
||||
}
|
||||
},
|
||||
};
|
||||
addVertices(
|
||||
{
|
||||
@@ -77,8 +74,8 @@ describe('the flowchart renderer', function() {
|
||||
id: 'my-node-id',
|
||||
classes: [],
|
||||
styles: [],
|
||||
text: 'Multi<br>Line'
|
||||
}
|
||||
text: 'Multi<br>Line',
|
||||
},
|
||||
},
|
||||
mockG,
|
||||
'svg-id'
|
||||
@@ -98,14 +95,18 @@ describe('the flowchart renderer', function() {
|
||||
[['fill:#fff'], 'fill:#fff;', ''],
|
||||
[['color:#ccc'], '', 'color:#ccc;'],
|
||||
[['fill:#fff', 'color:#ccc'], 'fill:#fff;', 'color:#ccc;'],
|
||||
[['fill:#fff', 'color:#ccc', 'text-align:center'], 'fill:#fff;', 'color:#ccc;text-align:center;']
|
||||
].forEach(function([style, expectedStyle, expectedLabelStyle]) {
|
||||
it(`should add the styles to style and/or labelStyle for style ${style}`, function() {
|
||||
[
|
||||
['fill:#fff', 'color:#ccc', 'text-align:center'],
|
||||
'fill:#fff;',
|
||||
'color:#ccc;text-align:center;',
|
||||
],
|
||||
].forEach(function ([style, expectedStyle, expectedLabelStyle]) {
|
||||
it(`should add the styles to style and/or labelStyle for style ${style}`, function () {
|
||||
const addedNodes = [];
|
||||
const mockG = {
|
||||
setNode: function(id, object) {
|
||||
setNode: function (id, object) {
|
||||
addedNodes.push([id, object]);
|
||||
}
|
||||
},
|
||||
};
|
||||
addVertices(
|
||||
{
|
||||
@@ -114,8 +115,8 @@ describe('the flowchart renderer', function() {
|
||||
id: 'my-node-id',
|
||||
classes: [],
|
||||
styles: style,
|
||||
text: 'my vertex text'
|
||||
}
|
||||
text: 'my vertex text',
|
||||
},
|
||||
},
|
||||
mockG,
|
||||
'svg-id'
|
||||
@@ -129,12 +130,12 @@ describe('the flowchart renderer', function() {
|
||||
});
|
||||
});
|
||||
|
||||
it(`should add default class to all nodes which do not have another class assigned`, function() {
|
||||
it(`should add default class to all nodes which do not have another class assigned`, function () {
|
||||
const addedNodes = [];
|
||||
const mockG = {
|
||||
setNode: function(id, object) {
|
||||
setNode: function (id, object) {
|
||||
addedNodes.push([id, object]);
|
||||
}
|
||||
},
|
||||
};
|
||||
addVertices(
|
||||
{
|
||||
@@ -143,15 +144,15 @@ describe('the flowchart renderer', function() {
|
||||
id: 'defaultNode',
|
||||
classes: [],
|
||||
styles: [],
|
||||
text: 'my vertex text'
|
||||
text: 'my vertex text',
|
||||
},
|
||||
v2: {
|
||||
type: 'rect',
|
||||
id: 'myNode',
|
||||
classes: ['myClass'],
|
||||
styles: [],
|
||||
text: 'my vertex text'
|
||||
}
|
||||
text: 'my vertex text',
|
||||
},
|
||||
},
|
||||
mockG,
|
||||
'svg-id'
|
||||
@@ -164,13 +165,13 @@ describe('the flowchart renderer', function() {
|
||||
});
|
||||
});
|
||||
|
||||
describe('when adding edges to a graph', function() {
|
||||
it('should handle multiline texts and set centered label position', function() {
|
||||
describe('when adding edges to a graph', function () {
|
||||
it('should handle multiline texts and set centered label position', function () {
|
||||
const addedEdges = [];
|
||||
const mockG = {
|
||||
setEdge: function(s, e, data, c) {
|
||||
setEdge: function (s, e, data, c) {
|
||||
addedEdges.push(data);
|
||||
}
|
||||
},
|
||||
};
|
||||
addEdges(
|
||||
[
|
||||
@@ -181,13 +182,13 @@ describe('the flowchart renderer', function() {
|
||||
{ style: ['stroke:DarkGray', 'stroke-width:2px'], text: 'Multi<br>Line' },
|
||||
{ style: ['stroke:DarkGray', 'stroke-width:2px'], text: 'Multi<br/>Line' },
|
||||
{ style: ['stroke:DarkGray', 'stroke-width:2px'], text: 'Multi<br />Line' },
|
||||
{ style: ['stroke:DarkGray', 'stroke-width:2px'], text: 'Multi<br\t/>Line' }
|
||||
{ style: ['stroke:DarkGray', 'stroke-width:2px'], text: 'Multi<br\t/>Line' },
|
||||
],
|
||||
mockG,
|
||||
'svg-id'
|
||||
);
|
||||
|
||||
addedEdges.forEach(function(edge) {
|
||||
addedEdges.forEach(function (edge) {
|
||||
expect(edge).toHaveProperty('label', 'Multi\nLine');
|
||||
expect(edge).toHaveProperty('labelpos', 'c');
|
||||
});
|
||||
@@ -197,22 +198,20 @@ describe('the flowchart renderer', function() {
|
||||
[['stroke:DarkGray'], 'stroke:DarkGray;', ''],
|
||||
[['color:red'], '', 'fill:red;'],
|
||||
[['stroke:DarkGray', 'color:red'], 'stroke:DarkGray;', 'fill:red;'],
|
||||
[['stroke:DarkGray', 'color:red', 'stroke-width:2px'], 'stroke:DarkGray;stroke-width:2px;', 'fill:red;']
|
||||
].forEach(function([style, expectedStyle, expectedLabelStyle]) {
|
||||
it(`should add the styles to style and/or labelStyle for style ${style}`, function() {
|
||||
[
|
||||
['stroke:DarkGray', 'color:red', 'stroke-width:2px'],
|
||||
'stroke:DarkGray;stroke-width:2px;',
|
||||
'fill:red;',
|
||||
],
|
||||
].forEach(function ([style, expectedStyle, expectedLabelStyle]) {
|
||||
it(`should add the styles to style and/or labelStyle for style ${style}`, function () {
|
||||
const addedEdges = [];
|
||||
const mockG = {
|
||||
setEdge: function(s, e, data, c) {
|
||||
setEdge: function (s, e, data, c) {
|
||||
addedEdges.push(data);
|
||||
}
|
||||
},
|
||||
};
|
||||
addEdges(
|
||||
[
|
||||
{ style: style, text: 'styling' }
|
||||
],
|
||||
mockG,
|
||||
'svg-id'
|
||||
);
|
||||
addEdges([{ style: style, text: 'styling' }], mockG, 'svg-id');
|
||||
|
||||
expect(addedEdges).toHaveLength(1);
|
||||
expect(addedEdges[0]).toHaveProperty('style', expectedStyle);
|
||||
|
@@ -3,16 +3,16 @@ import flow from './flow';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('[Arrows] when parsing', () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
});
|
||||
|
||||
it('should handle a nodes and edges', function() {
|
||||
it('should handle a nodes and edges', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -29,7 +29,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it("should handle angle bracket ' > ' as direction LR", function() {
|
||||
it("should handle angle bracket ' > ' as direction LR", function () {
|
||||
const res = flow.parser.parse('graph >;A-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -49,7 +49,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it("should handle angle bracket ' < ' as direction RL", function() {
|
||||
it("should handle angle bracket ' < ' as direction RL", function () {
|
||||
const res = flow.parser.parse('graph <;A-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -69,7 +69,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it("should handle caret ' ^ ' as direction BT", function() {
|
||||
it("should handle caret ' ^ ' as direction BT", function () {
|
||||
const res = flow.parser.parse('graph ^;A-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -90,7 +90,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it("should handle lower-case 'v' as direction TB", function() {
|
||||
it("should handle lower-case 'v' as direction TB", function () {
|
||||
const res = flow.parser.parse('graph v;A-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -110,7 +110,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a nodes and edges and a space between link and node', function() {
|
||||
it('should handle a nodes and edges and a space between link and node', function () {
|
||||
const res = flow.parser.parse('graph TD;A --> B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -127,7 +127,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle a nodes and edges, a space between link and node and each line ending without semicolon', function() {
|
||||
it('should handle a nodes and edges, a space between link and node and each line ending without semicolon', function () {
|
||||
const res = flow.parser.parse('graph TD\nA --> B\n style e red');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -144,7 +144,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle statements ending without semicolon', function() {
|
||||
it('should handle statements ending without semicolon', function () {
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nB-->C');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -161,9 +161,9 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
describe('it should handle multi directional arrows', function() {
|
||||
describe('point', function() {
|
||||
it('should handle double edged nodes and edges', function() {
|
||||
describe('it should handle multi directional arrows', function () {
|
||||
describe('point', function () {
|
||||
it('should handle double edged nodes and edges', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA<-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -180,7 +180,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes with text', function() {
|
||||
it('should handle double edged nodes with text', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA<-- text -->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -197,7 +197,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes and edges on thick arrows', function() {
|
||||
it('should handle double edged nodes and edges on thick arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA<==>B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -214,7 +214,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes with text on thick arrows', function() {
|
||||
it('should handle double edged nodes with text on thick arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA<== text ==>B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -231,7 +231,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes and edges on dotted arrows', function() {
|
||||
it('should handle double edged nodes and edges on dotted arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA<-.->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -248,7 +248,7 @@ describe('[Arrows] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes with text on dotted arrows', function() {
|
||||
it('should handle double edged nodes with text on dotted arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA<-. text .->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
@@ -3,16 +3,16 @@ import flow from './flow';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('[Comments] when parsing', () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
});
|
||||
|
||||
it('should handle comments', function() {
|
||||
it('should handle comments', function () {
|
||||
const res = flow.parser.parse('graph TD;\n%% Comment\n A-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -27,7 +27,7 @@ describe('[Comments] when parsing', () => {
|
||||
expect(edges[0].text).toBe('');
|
||||
});
|
||||
|
||||
it('should handle comments at the start', function() {
|
||||
it('should handle comments at the start', function () {
|
||||
const res = flow.parser.parse('%% Comment\ngraph TD;\n A-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -42,7 +42,7 @@ describe('[Comments] when parsing', () => {
|
||||
expect(edges[0].text).toBe('');
|
||||
});
|
||||
|
||||
it('should handle comments at the end', function() {
|
||||
it('should handle comments at the end', function () {
|
||||
const res = flow.parser.parse('graph TD;\n A-->B\n %% Comment at the end\n');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -57,7 +57,7 @@ describe('[Comments] when parsing', () => {
|
||||
expect(edges[0].text).toBe('');
|
||||
});
|
||||
|
||||
it('should handle comments at the end no trailing newline', function() {
|
||||
it('should handle comments at the end no trailing newline', function () {
|
||||
const res = flow.parser.parse('graph TD;\n A-->B\n%% Commento');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -72,7 +72,7 @@ describe('[Comments] when parsing', () => {
|
||||
expect(edges[0].text).toBe('');
|
||||
});
|
||||
|
||||
it('should handle comments at the end many trailing newlines', function() {
|
||||
it('should handle comments at the end many trailing newlines', function () {
|
||||
const res = flow.parser.parse('graph TD;\n A-->B\n%% Commento\n\n\n');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -87,7 +87,7 @@ describe('[Comments] when parsing', () => {
|
||||
expect(edges[0].text).toBe('');
|
||||
});
|
||||
|
||||
it('should handle no trailing newlines', function() {
|
||||
it('should handle no trailing newlines', function () {
|
||||
const res = flow.parser.parse('graph TD;\n A-->B');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -102,7 +102,7 @@ describe('[Comments] when parsing', () => {
|
||||
expect(edges[0].text).toBe('');
|
||||
});
|
||||
|
||||
it('should handle many trailing newlines', function() {
|
||||
it('should handle many trailing newlines', function () {
|
||||
const res = flow.parser.parse('graph TD;\n A-->B\n\n');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -117,7 +117,7 @@ describe('[Comments] when parsing', () => {
|
||||
expect(edges[0].text).toBe('');
|
||||
});
|
||||
|
||||
it('should handle a comment with blank rows in-between', function() {
|
||||
it('should handle a comment with blank rows in-between', function () {
|
||||
const res = flow.parser.parse('graph TD;\n\n\n %% Comment\n A-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -132,7 +132,7 @@ describe('[Comments] when parsing', () => {
|
||||
expect(edges[0].text).toBe('');
|
||||
});
|
||||
|
||||
it('should handle a comment with mermaid flowchart code in them', function() {
|
||||
it('should handle a comment with mermaid flowchart code in them', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD;\n\n\n %% Test od>Odd shape]-->|Two line<br>edge comment|ro;\n A-->B;'
|
||||
);
|
||||
|
@@ -8,18 +8,17 @@ import { setConfig } from '../../../config';
|
||||
|
||||
// const clean = DOMPurify.sanitize(dirty);
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('when parsing directions', function() {
|
||||
beforeEach(function() {
|
||||
describe('when parsing directions', function () {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
flow.parser.yy.setGen('gen-2');
|
||||
});
|
||||
|
||||
|
||||
it('should use default direction from top level', function() {
|
||||
it('should use default direction from top level', function () {
|
||||
const res = flow.parser.parse(`flowchart TB
|
||||
subgraph A
|
||||
a --> b
|
||||
@@ -34,7 +33,7 @@ describe('when parsing directions', function() {
|
||||
expect(subgraph.id).toBe('A');
|
||||
expect(subgraph.dir).toBe(undefined);
|
||||
});
|
||||
it('should handle a subgraph with a direction', function() {
|
||||
it('should handle a subgraph with a direction', function () {
|
||||
const res = flow.parser.parse(`flowchart TB
|
||||
subgraph A
|
||||
direction BT
|
||||
@@ -50,7 +49,7 @@ describe('when parsing directions', function() {
|
||||
expect(subgraph.id).toBe('A');
|
||||
expect(subgraph.dir).toBe('BT');
|
||||
});
|
||||
it('should use the last defined direction', function() {
|
||||
it('should use the last defined direction', function () {
|
||||
const res = flow.parser.parse(`flowchart TB
|
||||
subgraph A
|
||||
direction BT
|
||||
@@ -68,7 +67,7 @@ describe('when parsing directions', function() {
|
||||
expect(subgraph.dir).toBe('RL');
|
||||
});
|
||||
|
||||
it('should handle nested subgraphs 1', function() {
|
||||
it('should handle nested subgraphs 1', function () {
|
||||
const res = flow.parser.parse(`flowchart TB
|
||||
subgraph A
|
||||
direction RL
|
||||
@@ -84,8 +83,8 @@ describe('when parsing directions', function() {
|
||||
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||
expect(subgraphs.length).toBe(2);
|
||||
|
||||
const subgraphA = filter(subgraphs,o => o.id === 'A')[0];
|
||||
const subgraphB = filter(subgraphs,o => o.id === 'B')[0];
|
||||
const subgraphA = filter(subgraphs, (o) => o.id === 'A')[0];
|
||||
const subgraphB = filter(subgraphs, (o) => o.id === 'B')[0];
|
||||
|
||||
expect(subgraphB.nodes[0]).toBe('c');
|
||||
expect(subgraphB.dir).toBe('LR');
|
||||
@@ -95,5 +94,4 @@ describe('when parsing directions', function() {
|
||||
expect(subgraphA.nodes).not.toContain('c');
|
||||
expect(subgraphA.dir).toBe('RL');
|
||||
});
|
||||
|
||||
});
|
||||
|
@@ -3,16 +3,16 @@ import flow from './flow';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('[Edges] when parsing', () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
});
|
||||
|
||||
it('should handle open ended edges', function() {
|
||||
it('should handle open ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A---B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -21,7 +21,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_open');
|
||||
});
|
||||
|
||||
it('should handle cross ended edges', function() {
|
||||
it('should handle cross ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A--xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -30,7 +30,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('should handle open ended edges', function() {
|
||||
it('should handle open ended edges', function () {
|
||||
const res = flow.parser.parse('graph TD;A--oB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -39,8 +39,8 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_circle');
|
||||
});
|
||||
|
||||
describe('cross', function() {
|
||||
it('should handle double edged nodes and edges', function() {
|
||||
describe('cross', function () {
|
||||
it('should handle double edged nodes and edges', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA x--x B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -57,7 +57,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes with text', function() {
|
||||
it('should handle double edged nodes with text', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA x-- text --x B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -74,7 +74,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes and edges on thick arrows', function() {
|
||||
it('should handle double edged nodes and edges on thick arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA x==x B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -91,7 +91,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes with text on thick arrows', function() {
|
||||
it('should handle double edged nodes with text on thick arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA x== text ==x B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -108,7 +108,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes and edges on dotted arrows', function() {
|
||||
it('should handle double edged nodes and edges on dotted arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA x-.-x B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -125,7 +125,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes with text on dotted arrows', function() {
|
||||
it('should handle double edged nodes with text on dotted arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA x-. text .-x B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -143,8 +143,8 @@ describe('[Edges] when parsing', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('circle', function() {
|
||||
it('should handle double edged nodes and edges', function() {
|
||||
describe('circle', function () {
|
||||
it('should handle double edged nodes and edges', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA o--o B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -161,7 +161,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes with text', function() {
|
||||
it('should handle double edged nodes with text', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA o-- text --o B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -178,7 +178,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes and edges on thick arrows', function() {
|
||||
it('should handle double edged nodes and edges on thick arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA o==o B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -195,7 +195,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes with text on thick arrows', function() {
|
||||
it('should handle double edged nodes with text on thick arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA o== text ==o B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -212,7 +212,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes and edges on dotted arrows', function() {
|
||||
it('should handle double edged nodes and edges on dotted arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA o-.-o B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -229,7 +229,7 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[0].length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle double edged nodes with text on dotted arrows', function() {
|
||||
it('should handle double edged nodes with text on dotted arrows', function () {
|
||||
const res = flow.parser.parse('graph TD;\nA o-. text .-o B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -247,7 +247,7 @@ describe('[Edges] when parsing', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multiple edges', function() {
|
||||
it('should handle multiple edges', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD;A---|This is the 123 s text|B;\nA---|This is the second edge|B;'
|
||||
);
|
||||
@@ -271,9 +271,9 @@ describe('[Edges] when parsing', () => {
|
||||
expect(edges[1].length).toBe(1);
|
||||
});
|
||||
|
||||
describe('edge length', function() {
|
||||
describe('edge length', function () {
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle normal edges with length ${length}`, function() {
|
||||
it(`should handle normal edges with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA -${'-'.repeat(length)}- B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -292,7 +292,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle normal labelled edges with length ${length}`, function() {
|
||||
it(`should handle normal labelled edges with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA -- Label -${'-'.repeat(length)}- B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -311,7 +311,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle normal edges with arrows with length ${length}`, function() {
|
||||
it(`should handle normal edges with arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA -${'-'.repeat(length)}> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -330,7 +330,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle normal labelled edges with arrows with length ${length}`, function() {
|
||||
it(`should handle normal labelled edges with arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA -- Label -${'-'.repeat(length)}> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -349,7 +349,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle normal edges with double arrows with length ${length}`, function() {
|
||||
it(`should handle normal edges with double arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA <-${'-'.repeat(length)}> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -368,7 +368,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle normal labelled edges with double arrows with length ${length}`, function() {
|
||||
it(`should handle normal labelled edges with double arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA <-- Label -${'-'.repeat(length)}> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -387,7 +387,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle thick edges with length ${length}`, function() {
|
||||
it(`should handle thick edges with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA =${'='.repeat(length)}= B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -406,7 +406,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle thick labelled edges with length ${length}`, function() {
|
||||
it(`should handle thick labelled edges with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA == Label =${'='.repeat(length)}= B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -425,7 +425,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle thick edges with arrows with length ${length}`, function() {
|
||||
it(`should handle thick edges with arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA =${'='.repeat(length)}> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -444,7 +444,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle thick labelled edges with arrows with length ${length}`, function() {
|
||||
it(`should handle thick labelled edges with arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA == Label =${'='.repeat(length)}> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -463,7 +463,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle thick edges with double arrows with length ${length}`, function() {
|
||||
it(`should handle thick edges with double arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA <=${'='.repeat(length)}> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -482,7 +482,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle thick labelled edges with double arrows with length ${length}`, function() {
|
||||
it(`should handle thick labelled edges with double arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA <== Label =${'='.repeat(length)}> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -501,7 +501,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle dotted edges with length ${length}`, function() {
|
||||
it(`should handle dotted edges with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA -${'.'.repeat(length)}- B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -520,7 +520,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle dotted labelled edges with length ${length}`, function() {
|
||||
it(`should handle dotted labelled edges with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA -. Label ${'.'.repeat(length)}- B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -539,7 +539,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle dotted edges with arrows with length ${length}`, function() {
|
||||
it(`should handle dotted edges with arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA -${'.'.repeat(length)}-> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -558,7 +558,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle dotted labelled edges with arrows with length ${length}`, function() {
|
||||
it(`should handle dotted labelled edges with arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA -. Label ${'.'.repeat(length)}-> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -577,7 +577,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle dotted edges with double arrows with length ${length}`, function() {
|
||||
it(`should handle dotted edges with double arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA <-${'.'.repeat(length)}-> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -596,7 +596,7 @@ describe('[Edges] when parsing', () => {
|
||||
}
|
||||
|
||||
for (let length = 1; length <= 3; ++length) {
|
||||
it(`should handle dotted edges with double arrows with length ${length}`, function() {
|
||||
it(`should handle dotted edges with double arrows with length ${length}`, function () {
|
||||
const res = flow.parser.parse(`graph TD;\nA <-. Label ${'.'.repeat(length)}-> B;`);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
File diff suppressed because one or more lines are too long
@@ -5,16 +5,16 @@ import { setConfig } from '../../../config';
|
||||
const spyOn = jest.spyOn;
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('[Interactions] when parsing', () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
});
|
||||
|
||||
it('it should be possible to use click to a callback', function() {
|
||||
it('it should be possible to use click to a callback', function () {
|
||||
spyOn(flowDb, 'setClickEvent');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A callback');
|
||||
|
||||
@@ -24,7 +24,7 @@ describe('[Interactions] when parsing', () => {
|
||||
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback');
|
||||
});
|
||||
|
||||
it('it should be possible to use click to a click and call callback', function() {
|
||||
it('it should be possible to use click to a click and call callback', function () {
|
||||
spyOn(flowDb, 'setClickEvent');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A call callback()');
|
||||
|
||||
@@ -34,7 +34,7 @@ describe('[Interactions] when parsing', () => {
|
||||
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback');
|
||||
});
|
||||
|
||||
it('it should be possible to use click to a callback with toolip', function() {
|
||||
it('it should be possible to use click to a callback with toolip', function () {
|
||||
spyOn(flowDb, 'setClickEvent');
|
||||
spyOn(flowDb, 'setTooltip');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A callback "tooltip"');
|
||||
@@ -43,10 +43,10 @@ describe('[Interactions] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A','tooltip');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||
});
|
||||
|
||||
it('it should be possible to use click to a click and call callback with toolip', function() {
|
||||
it('it should be possible to use click to a click and call callback with toolip', function () {
|
||||
spyOn(flowDb, 'setClickEvent');
|
||||
spyOn(flowDb, 'setTooltip');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A call callback() "tooltip"');
|
||||
@@ -55,20 +55,20 @@ describe('[Interactions] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A','tooltip');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||
});
|
||||
|
||||
it('it should be possible to use click to a callback with an arbitrary number of args', function() {
|
||||
spyOn(flowDb, 'setClickEvent');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A call callback("test0", test1, test2)');
|
||||
it('it should be possible to use click to a callback with an arbitrary number of args', function () {
|
||||
spyOn(flowDb, 'setClickEvent');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A call callback("test0", test1, test2)');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback','"test0", test1, test2');
|
||||
});
|
||||
expect(flowDb.setClickEvent).toHaveBeenCalledWith('A', 'callback', '"test0", test1, test2');
|
||||
});
|
||||
|
||||
it('should handle interaction - click to a link', function() {
|
||||
it('should handle interaction - click to a link', function () {
|
||||
spyOn(flowDb, 'setLink');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A "click.html"');
|
||||
|
||||
@@ -78,7 +78,7 @@ describe('[Interactions] when parsing', () => {
|
||||
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html');
|
||||
});
|
||||
|
||||
it('should handle interaction - click to a click and href link', function() {
|
||||
it('should handle interaction - click to a click and href link', function () {
|
||||
spyOn(flowDb, 'setLink');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A href "click.html"');
|
||||
|
||||
@@ -88,7 +88,7 @@ describe('[Interactions] when parsing', () => {
|
||||
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html');
|
||||
});
|
||||
|
||||
it('should handle interaction - click to a link with tooltip', function() {
|
||||
it('should handle interaction - click to a link with tooltip', function () {
|
||||
spyOn(flowDb, 'setLink');
|
||||
spyOn(flowDb, 'setTooltip');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A "click.html" "tooltip"');
|
||||
@@ -97,10 +97,10 @@ describe('[Interactions] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A','tooltip');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||
});
|
||||
|
||||
it('should handle interaction - click to a click and href link with tooltip', function() {
|
||||
it('should handle interaction - click to a click and href link with tooltip', function () {
|
||||
spyOn(flowDb, 'setLink');
|
||||
spyOn(flowDb, 'setTooltip');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A href "click.html" "tooltip"');
|
||||
@@ -109,10 +109,10 @@ describe('[Interactions] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A','tooltip');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||
});
|
||||
|
||||
it('should handle interaction - click to a link with target', function() {
|
||||
it('should handle interaction - click to a link with target', function () {
|
||||
spyOn(flowDb, 'setLink');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A "click.html" _blank');
|
||||
|
||||
@@ -122,7 +122,7 @@ describe('[Interactions] when parsing', () => {
|
||||
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html', '_blank');
|
||||
});
|
||||
|
||||
it('should handle interaction - click to a click and href link with target', function() {
|
||||
it('should handle interaction - click to a click and href link with target', function () {
|
||||
spyOn(flowDb, 'setLink');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A href "click.html" _blank');
|
||||
|
||||
@@ -132,7 +132,7 @@ describe('[Interactions] when parsing', () => {
|
||||
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html', '_blank');
|
||||
});
|
||||
|
||||
it('should handle interaction - click to a link with tooltip and target', function() {
|
||||
it('should handle interaction - click to a link with tooltip and target', function () {
|
||||
spyOn(flowDb, 'setLink');
|
||||
spyOn(flowDb, 'setTooltip');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A "click.html" "tooltip" _blank');
|
||||
@@ -141,19 +141,18 @@ describe('[Interactions] when parsing', () => {
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html', '_blank');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A','tooltip');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||
});
|
||||
|
||||
it('should handle interaction - click to a click and href link with tooltip and target', function() {
|
||||
spyOn(flowDb, 'setLink');
|
||||
spyOn(flowDb, 'setTooltip');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A href "click.html" "tooltip" _blank');
|
||||
it('should handle interaction - click to a click and href link with tooltip and target', function () {
|
||||
spyOn(flowDb, 'setLink');
|
||||
spyOn(flowDb, 'setTooltip');
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclick A href "click.html" "tooltip" _blank');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html', '_blank');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A','tooltip');
|
||||
});
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
|
||||
expect(flowDb.setLink).toHaveBeenCalledWith('A', 'click.html', '_blank');
|
||||
expect(flowDb.setTooltip).toHaveBeenCalledWith('A', 'tooltip');
|
||||
});
|
||||
});
|
||||
|
@@ -3,16 +3,16 @@ import flow from './flow';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('[Lines] when parsing', () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
});
|
||||
|
||||
it('should handle line interpolation default definitions', function() {
|
||||
it('should handle line interpolation default definitions', function () {
|
||||
const res = flow.parser.parse('graph TD\n' + 'A-->B\n' + 'linkStyle default interpolate basis');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -21,7 +21,7 @@ describe('[Lines] when parsing', () => {
|
||||
expect(edges.defaultInterpolate).toBe('basis');
|
||||
});
|
||||
|
||||
it('should handle line interpolation numbered definitions', function() {
|
||||
it('should handle line interpolation numbered definitions', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD\n' +
|
||||
'A-->B\n' +
|
||||
@@ -37,7 +37,7 @@ describe('[Lines] when parsing', () => {
|
||||
expect(edges[1].interpolate).toBe('cardinal');
|
||||
});
|
||||
|
||||
it('should handle line interpolation multi-numbered definitions', function() {
|
||||
it('should handle line interpolation multi-numbered definitions', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD\n' + 'A-->B\n' + 'A-->C\n' + 'linkStyle 0,1 interpolate basis'
|
||||
);
|
||||
@@ -49,7 +49,7 @@ describe('[Lines] when parsing', () => {
|
||||
expect(edges[1].interpolate).toBe('basis');
|
||||
});
|
||||
|
||||
it('should handle line interpolation default with style', function() {
|
||||
it('should handle line interpolation default with style', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD\n' + 'A-->B\n' + 'linkStyle default interpolate basis stroke-width:1px;'
|
||||
);
|
||||
@@ -60,7 +60,7 @@ describe('[Lines] when parsing', () => {
|
||||
expect(edges.defaultInterpolate).toBe('basis');
|
||||
});
|
||||
|
||||
it('should handle line interpolation numbered with style', function() {
|
||||
it('should handle line interpolation numbered with style', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD\n' +
|
||||
'A-->B\n' +
|
||||
@@ -76,7 +76,7 @@ describe('[Lines] when parsing', () => {
|
||||
expect(edges[1].interpolate).toBe('cardinal');
|
||||
});
|
||||
|
||||
it('should handle line interpolation multi-numbered with style', function() {
|
||||
it('should handle line interpolation multi-numbered with style', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD\n' + 'A-->B\n' + 'A-->C\n' + 'linkStyle 0,1 interpolate basis stroke-width:1px;'
|
||||
);
|
||||
@@ -88,8 +88,8 @@ describe('[Lines] when parsing', () => {
|
||||
expect(edges[1].interpolate).toBe('basis');
|
||||
});
|
||||
|
||||
describe('it should handle new line type notation', function() {
|
||||
it('it should handle regular lines', function() {
|
||||
describe('it should handle new line type notation', function () {
|
||||
it('it should handle regular lines', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -98,7 +98,7 @@ describe('[Lines] when parsing', () => {
|
||||
expect(edges[0].stroke).toBe('normal');
|
||||
});
|
||||
|
||||
it('it should handle dotted lines', function() {
|
||||
it('it should handle dotted lines', function () {
|
||||
const res = flow.parser.parse('graph TD;A-.->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -107,7 +107,7 @@ describe('[Lines] when parsing', () => {
|
||||
expect(edges[0].stroke).toBe('dotted');
|
||||
});
|
||||
|
||||
it('it should handle dotted lines', function() {
|
||||
it('it should handle dotted lines', function () {
|
||||
const res = flow.parser.parse('graph TD;A==>B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
@@ -3,16 +3,16 @@ import flow from './flow';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('[Singlenodes] when parsing', () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
});
|
||||
|
||||
it('should handle a single node', function() {
|
||||
it('should handle a single node', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;A;');
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(edges.length).toBe(0);
|
||||
expect(vert['A'].styles.length).toBe(0);
|
||||
});
|
||||
it('should handle a single node with white space after it (SN1)', function() {
|
||||
it('should handle a single node with white space after it (SN1)', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;A ;');
|
||||
|
||||
@@ -33,7 +33,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['A'].styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single square node', function() {
|
||||
it('should handle a single square node', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a[A];');
|
||||
|
||||
@@ -45,7 +45,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].type).toBe('square');
|
||||
});
|
||||
|
||||
it('should handle a single round square node', function() {
|
||||
it('should handle a single round square node', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a[A];');
|
||||
|
||||
@@ -57,7 +57,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].type).toBe('square');
|
||||
});
|
||||
|
||||
it('should handle a single circle node', function() {
|
||||
it('should handle a single circle node', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a((A));');
|
||||
|
||||
@@ -68,7 +68,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].type).toBe('circle');
|
||||
});
|
||||
|
||||
it('should handle a single round node', function() {
|
||||
it('should handle a single round node', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a(A);');
|
||||
|
||||
@@ -79,7 +79,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].type).toBe('round');
|
||||
});
|
||||
|
||||
it('should handle a single odd node', function() {
|
||||
it('should handle a single odd node', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a>A];');
|
||||
|
||||
@@ -90,7 +90,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].type).toBe('odd');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node', function() {
|
||||
it('should handle a single diamond node', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a{A};');
|
||||
|
||||
@@ -101,7 +101,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node with whitespace after it', function() {
|
||||
it('should handle a single diamond node with whitespace after it', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a{A} ;');
|
||||
|
||||
@@ -112,7 +112,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].type).toBe('diamond');
|
||||
});
|
||||
|
||||
it('should handle a single diamond node with html in it (SN3)', function() {
|
||||
it('should handle a single diamond node with html in it (SN3)', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a{A <br> end};');
|
||||
|
||||
@@ -124,7 +124,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single hexagon node', function() {
|
||||
it('should handle a single hexagon node', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a{{A}};');
|
||||
|
||||
@@ -135,7 +135,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].type).toBe('hexagon');
|
||||
});
|
||||
|
||||
it('should handle a single hexagon node with html in it', function() {
|
||||
it('should handle a single hexagon node with html in it', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a{{A <br> end}};');
|
||||
|
||||
@@ -147,7 +147,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single round node with html in it', function() {
|
||||
it('should handle a single round node with html in it', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;a(A <br> end);');
|
||||
|
||||
@@ -159,7 +159,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['a'].text).toBe('A <br> end');
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics starting on a char', function() {
|
||||
it('should handle a single node with alphanumerics starting on a char', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;id1;');
|
||||
|
||||
@@ -170,7 +170,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['id1'].styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with a single digit', function() {
|
||||
it('should handle a single node with a single digit', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;1;');
|
||||
|
||||
@@ -181,7 +181,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['1'].text).toBe('1');
|
||||
});
|
||||
|
||||
it('should handle a single node with a single digit in a subgraph', function() {
|
||||
it('should handle a single node with a single digit in a subgraph', function () {
|
||||
// Silly but syntactically correct
|
||||
|
||||
const res = flow.parser.parse('graph TD;subgraph "hello";1;end;');
|
||||
@@ -193,7 +193,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['1'].text).toBe('1');
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics starting on a num', function() {
|
||||
it('should handle a single node with alphanumerics starting on a num', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;1id;');
|
||||
|
||||
@@ -204,7 +204,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['1id'].styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics containing a minus sign', function() {
|
||||
it('should handle a single node with alphanumerics containing a minus sign', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;i-d;');
|
||||
|
||||
@@ -215,7 +215,7 @@ describe('[Singlenodes] when parsing', () => {
|
||||
expect(vert['i-d'].styles.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle a single node with alphanumerics containing a underscore sign', function() {
|
||||
it('should handle a single node with alphanumerics containing a underscore sign', function () {
|
||||
// Silly but syntactically correct
|
||||
const res = flow.parser.parse('graph TD;i_d;');
|
||||
|
||||
|
@@ -3,18 +3,18 @@ import flow from './flow';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('[Style] when parsing', () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
flow.parser.yy.setGen('gen-2');
|
||||
});
|
||||
|
||||
// log.debug(flow.parser.parse('graph TD;style Q background:#fff;'));
|
||||
it('should handle styles for vertices', function() {
|
||||
it('should handle styles for vertices', function () {
|
||||
const res = flow.parser.parse('graph TD;style Q background:#fff;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -27,7 +27,7 @@ describe('[Style] when parsing', () => {
|
||||
});
|
||||
|
||||
// log.debug(flow.parser.parse('graph TD;style Q background:#fff;'));
|
||||
it('should handle styles for edges', function() {
|
||||
it('should handle styles for edges', function () {
|
||||
const res = flow.parser.parse('graph TD;a-->b;\nstyle #0 stroke: #f66;');
|
||||
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
@@ -35,7 +35,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(edges.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle multiple styles for a vortex', function() {
|
||||
it('should handle multiple styles for a vortex', function () {
|
||||
const res = flow.parser.parse('graph TD;style R background:#fff,border:1px solid red;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -46,7 +46,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(vert['R'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle multiple styles in a graph', function() {
|
||||
it('should handle multiple styles in a graph', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD;style S background:#aaa;\nstyle T background:#bbb,border:1px solid red;'
|
||||
);
|
||||
@@ -61,7 +61,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle styles and graph definitions in a graph', function() {
|
||||
it('should handle styles and graph definitions in a graph', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD;S-->T;\nstyle S background:#aaa;\nstyle T background:#bbb,border:1px solid red;'
|
||||
);
|
||||
@@ -76,7 +76,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should handle styles and graph definitions in a graph', function() {
|
||||
it('should handle styles and graph definitions in a graph', function () {
|
||||
const res = flow.parser.parse('graph TD;style T background:#bbb,border:1px solid red;');
|
||||
// const res = flow.parser.parse('graph TD;style T background: #bbb;');
|
||||
|
||||
@@ -87,8 +87,10 @@ describe('[Style] when parsing', () => {
|
||||
expect(vert['T'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should keep node label text (if already defined) when a style is applied', function() {
|
||||
const res = flow.parser.parse('graph TD;A(( ));B((Test));C;style A background:#fff;style D border:1px solid red;');
|
||||
it('should keep node label text (if already defined) when a style is applied', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD;A(( ));B((Test));C;style A background:#fff;style D border:1px solid red;'
|
||||
);
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
@@ -98,7 +100,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(vert['D'].text).toBe('D');
|
||||
});
|
||||
|
||||
it('should be possible to declare a class', function() {
|
||||
it('should be possible to declare a class', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD;classDef exClass background:#bbb,border:1px solid red;'
|
||||
);
|
||||
@@ -111,7 +113,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to declare a class with a dot in the style', function() {
|
||||
it('should be possible to declare a class with a dot in the style', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD;classDef exClass background:#bbb,border:1.5px solid red;'
|
||||
);
|
||||
@@ -123,7 +125,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
it('should be possible to declare a class with a space in the style', function() {
|
||||
it('should be possible to declare a class with a space in the style', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD;classDef exClass background: #bbb,border:1.5px solid red;'
|
||||
);
|
||||
@@ -135,7 +137,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(classes['exClass'].styles[0]).toBe('background: #bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1.5px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex', function() {
|
||||
it('should be possible to apply a class to a vertex', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TD;' + '\n';
|
||||
@@ -151,7 +153,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex with an id containing _', function() {
|
||||
it('should be possible to apply a class to a vertex with an id containing _', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TD;' + '\n';
|
||||
@@ -167,7 +169,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a vertex directly', function() {
|
||||
it('should be possible to apply a class to a vertex directly', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TD;' + '\n';
|
||||
@@ -184,7 +186,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class ', function() {
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class ', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TD;' + '\n';
|
||||
@@ -201,7 +203,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class-->B[test2] ', function() {
|
||||
it('should be possible to apply a class to a vertex directly : usecase A[text].class-->B[test2] ', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TD;' + '\n';
|
||||
@@ -218,7 +220,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
|
||||
it('should be possible to apply a class to a vertex directly 2', function() {
|
||||
it('should be possible to apply a class to a vertex directly 2', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TD;' + '\n';
|
||||
@@ -234,7 +236,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(classes['exClass'].styles[0]).toBe('background:#bbb');
|
||||
expect(classes['exClass'].styles[1]).toBe('border:1px solid red');
|
||||
});
|
||||
it('should be possible to apply a class to a comma separated list of vertices', function() {
|
||||
it('should be possible to apply a class to a comma separated list of vertices', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TD;' + '\n';
|
||||
@@ -254,7 +256,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(vertices['b'].classes[0]).toBe('exClass');
|
||||
});
|
||||
|
||||
it('should handle style definitions with more then 1 digit in a row', function() {
|
||||
it('should handle style definitions with more then 1 digit in a row', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD\n' +
|
||||
'A-->B1\n' +
|
||||
@@ -277,7 +279,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
|
||||
it('should handle multi-numbered style definitions with more then 1 digit in a row', function() {
|
||||
it('should handle multi-numbered style definitions with more then 1 digit in a row', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD\n' +
|
||||
'A-->B1\n' +
|
||||
@@ -301,7 +303,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
|
||||
it('should handle classDefs with style in classes', function() {
|
||||
it('should handle classDefs with style in classes', function () {
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nclassDef exClass font-style:bold;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -310,7 +312,7 @@ describe('[Style] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
|
||||
it('should handle classDefs with % in classes', function() {
|
||||
it('should handle classDefs with % in classes', function () {
|
||||
const res = flow.parser.parse(
|
||||
'graph TD\nA-->B\nclassDef exClass fill:#f96,stroke:#333,stroke-width:4px,font-size:50%,font-style:bold;'
|
||||
);
|
||||
|
@@ -3,17 +3,17 @@ import flow from './flow';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('[Text] when parsing', () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
});
|
||||
|
||||
describe('it should handle text on edges', function() {
|
||||
it('it should handle text without space', function() {
|
||||
describe('it should handle text on edges', function () {
|
||||
it('it should handle text without space', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|textNoSpace|B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -22,7 +22,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('should handle with space', function() {
|
||||
it('should handle with space', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text including space|B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -31,7 +31,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('it should handle text with /', function() {
|
||||
it('it should handle text with /', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text with / should work|B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -40,7 +40,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].text).toBe('text with / should work');
|
||||
});
|
||||
|
||||
it('it should handle space and space between vertices and link', function() {
|
||||
it('it should handle space and space between vertices and link', function () {
|
||||
const res = flow.parser.parse('graph TD;A --x|textNoSpace| B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -49,7 +49,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('should handle space and CAPS', function() {
|
||||
it('should handle space and CAPS', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text including CAPS space|B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -58,7 +58,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('should handle space and dir', function() {
|
||||
it('should handle space and dir', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text including URL space|B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -68,7 +68,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].text).toBe('text including URL space');
|
||||
});
|
||||
|
||||
it('should handle space and send', function() {
|
||||
it('should handle space and send', function () {
|
||||
const res = flow.parser.parse('graph TD;A--text including URL space and send-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -77,7 +77,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
expect(edges[0].text).toBe('text including URL space and send');
|
||||
});
|
||||
it('should handle space and send', function() {
|
||||
it('should handle space and send', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- text including URL space and send -->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -87,7 +87,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].text).toBe('text including URL space and send');
|
||||
});
|
||||
|
||||
it('should handle space and dir (TD)', function() {
|
||||
it('should handle space and dir (TD)', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text including R TD space|B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -96,7 +96,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(edges[0].text).toBe('text including R TD space');
|
||||
});
|
||||
it('should handle `', function() {
|
||||
it('should handle `', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text including `|B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -105,7 +105,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(edges[0].text).toBe('text including `');
|
||||
});
|
||||
it('should handle v in node ids only v', function() {
|
||||
it('should handle v in node ids only v', function () {
|
||||
// only v
|
||||
const res = flow.parser.parse('graph TD;A--xv(my text);');
|
||||
|
||||
@@ -115,7 +115,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['v'].text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids v at end', function() {
|
||||
it('should handle v in node ids v at end', function () {
|
||||
// v at end
|
||||
const res = flow.parser.parse('graph TD;A--xcsv(my text);');
|
||||
|
||||
@@ -125,7 +125,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['csv'].text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids v in middle', function() {
|
||||
it('should handle v in node ids v in middle', function () {
|
||||
// v in middle
|
||||
const res = flow.parser.parse('graph TD;A--xava(my text);');
|
||||
|
||||
@@ -135,7 +135,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['ava'].text).toBe('my text');
|
||||
});
|
||||
it('should handle v in node ids, v at start', function() {
|
||||
it('should handle v in node ids, v at start', function () {
|
||||
// v at start
|
||||
const res = flow.parser.parse('graph TD;A--xva(my text);');
|
||||
|
||||
@@ -145,7 +145,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(vert['va'].text).toBe('my text');
|
||||
});
|
||||
it('should handle keywords', function() {
|
||||
it('should handle keywords', function () {
|
||||
const res = flow.parser.parse('graph TD;A--x|text including graph space|B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -153,19 +153,19 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].text).toBe('text including graph space');
|
||||
});
|
||||
it('should handle keywords', function() {
|
||||
it('should handle keywords', function () {
|
||||
const res = flow.parser.parse('graph TD;V-->a[v]');
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['a'].text).toBe('v');
|
||||
});
|
||||
it('should handle keywords', function() {
|
||||
it('should handle keywords', function () {
|
||||
const res = flow.parser.parse('graph TD;V-->a[v]');
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
expect(vert['a'].text).toBe('v');
|
||||
});
|
||||
it('should handle quoted text', function() {
|
||||
it('should handle quoted text', function () {
|
||||
const res = flow.parser.parse('graph TD;V-- "test string()" -->a[v]');
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
const edges = flow.parser.yy.getEdges();
|
||||
@@ -174,7 +174,7 @@ describe('[Text] when parsing', () => {
|
||||
});
|
||||
|
||||
describe('it should handle text on lines', () => {
|
||||
it('it should handle normal text on lines', function() {
|
||||
it('it should handle normal text on lines', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- test text with == -->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -182,7 +182,7 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].stroke).toBe('normal');
|
||||
});
|
||||
it('it should handle dotted text on lines (TD3)', function() {
|
||||
it('it should handle dotted text on lines (TD3)', function () {
|
||||
const res = flow.parser.parse('graph TD;A-. test text with == .->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -190,7 +190,7 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].stroke).toBe('dotted');
|
||||
});
|
||||
it('it should handle thick text on lines', function() {
|
||||
it('it should handle thick text on lines', function () {
|
||||
const res = flow.parser.parse('graph TD;A== test text with - ==>B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -200,8 +200,8 @@ describe('[Text] when parsing', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('it should handle text on edges using the new notation', function() {
|
||||
it('it should handle text without space', function() {
|
||||
describe('it should handle text on edges using the new notation', function () {
|
||||
it('it should handle text without space', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- textNoSpace --xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -210,7 +210,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('it should handle text with multiple leading space', function() {
|
||||
it('it should handle text with multiple leading space', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- textNoSpace --xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -219,7 +219,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('should handle with space', function() {
|
||||
it('should handle with space', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- text including space --xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -228,7 +228,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('it should handle text with /', function() {
|
||||
it('it should handle text with /', function () {
|
||||
const res = flow.parser.parse('graph TD;A -- text with / should work --x B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -237,7 +237,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].text).toBe('text with / should work');
|
||||
});
|
||||
|
||||
it('it should handle space and space between vertices and link', function() {
|
||||
it('it should handle space and space between vertices and link', function () {
|
||||
const res = flow.parser.parse('graph TD;A -- textNoSpace --x B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -246,7 +246,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('should handle space and CAPS', function() {
|
||||
it('should handle space and CAPS', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- text including CAPS space --xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -255,7 +255,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
});
|
||||
|
||||
it('should handle space and dir', function() {
|
||||
it('should handle space and dir', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- text including URL space --xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -265,7 +265,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].text).toBe('text including URL space');
|
||||
});
|
||||
|
||||
it('should handle space and dir (TD2)', function() {
|
||||
it('should handle space and dir (TD2)', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- text including R TD space --xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -274,7 +274,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].type).toBe('arrow_cross');
|
||||
expect(edges[0].text).toBe('text including R TD space');
|
||||
});
|
||||
it('should handle keywords', function() {
|
||||
it('should handle keywords', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- text including graph space and v --xB;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -282,7 +282,7 @@ describe('[Text] when parsing', () => {
|
||||
|
||||
expect(edges[0].text).toBe('text including graph space and v');
|
||||
});
|
||||
it('should handle keywords', function() {
|
||||
it('should handle keywords', function () {
|
||||
const res = flow.parser.parse('graph TD;A-- text including graph space and v --xB[blav]');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -301,8 +301,8 @@ describe('[Text] when parsing', () => {
|
||||
// });
|
||||
});
|
||||
|
||||
describe('it should handle text in vertices, ', function() {
|
||||
it('it should handle space', function() {
|
||||
describe('it should handle text in vertices, ', function () {
|
||||
it('it should handle space', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(Chimpansen hoppar);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -311,7 +311,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar');
|
||||
});
|
||||
it('it should handle åäö and minus', function() {
|
||||
it('it should handle åäö and minus', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C{Chimpansen hoppar åäö-ÅÄÖ};');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -321,7 +321,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['C'].text).toBe('Chimpansen hoppar åäö-ÅÄÖ');
|
||||
});
|
||||
|
||||
it('it should handle with åäö, minus and space and br', function() {
|
||||
it('it should handle with åäö, minus and space and br', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(Chimpansen hoppar åäö <br> - ÅÄÖ);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -339,21 +339,21 @@ describe('[Text] when parsing', () => {
|
||||
// expect(vert['C'].type).toBe('round');
|
||||
// expect(vert['C'].text).toBe(' A[Object(foo,bar)]-->B(Thing);');
|
||||
// });
|
||||
it('it should handle unicode chars', function() {
|
||||
it('it should handle unicode chars', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(Начало);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['C'].text).toBe('Начало');
|
||||
});
|
||||
it('it should handle backslask', function() {
|
||||
it('it should handle backslask', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(c:\\windows);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
||||
expect(vert['C'].text).toBe('c:\\windows');
|
||||
});
|
||||
it('it should handle CAPS', function() {
|
||||
it('it should handle CAPS', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(some CAPS);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -362,7 +362,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['C'].type).toBe('round');
|
||||
expect(vert['C'].text).toBe('some CAPS');
|
||||
});
|
||||
it('it should handle directions', function() {
|
||||
it('it should handle directions', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->C(some URL);');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -373,7 +373,7 @@ describe('[Text] when parsing', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multi-line text', function() {
|
||||
it('should handle multi-line text', function () {
|
||||
const res = flow.parser.parse('graph TD;A--o|text space|B;\n B-->|more text with space|C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -393,7 +393,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[1].text).toBe('more text with space');
|
||||
});
|
||||
|
||||
it('should handle text in vertices with space', function() {
|
||||
it('should handle text in vertices with space', function () {
|
||||
const res = flow.parser.parse('graph TD;A[chimpansen hoppar]-->C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -403,7 +403,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in vertices with space with spaces between vertices and link', function() {
|
||||
it('should handle text in vertices with space with spaces between vertices and link', function () {
|
||||
const res = flow.parser.parse('graph TD;A[chimpansen hoppar] --> C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -412,7 +412,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['A'].type).toBe('square');
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
});
|
||||
it('should handle text including _ in vertices', function() {
|
||||
it('should handle text including _ in vertices', function () {
|
||||
const res = flow.parser.parse('graph TD;A[chimpansen_hoppar] --> C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -422,7 +422,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['A'].text).toBe('chimpansen_hoppar');
|
||||
});
|
||||
|
||||
it('should handle quoted text in vertices ', function() {
|
||||
it('should handle quoted text in vertices ', function () {
|
||||
const res = flow.parser.parse('graph TD;A["chimpansen hoppar ()[]"] --> C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -432,7 +432,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar ()[]');
|
||||
});
|
||||
|
||||
it('should handle text in circle vertices with space', function() {
|
||||
it('should handle text in circle vertices with space', function () {
|
||||
const res = flow.parser.parse('graph TD;A((chimpansen hoppar))-->C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -442,7 +442,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in ellipse vertices', function() {
|
||||
it('should handle text in ellipse vertices', function () {
|
||||
const res = flow.parser.parse('graph TD\nA(-this is an ellipse-)-->B');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -452,7 +452,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['A'].text).toBe('this is an ellipse');
|
||||
});
|
||||
|
||||
it('should handle text in diamond vertices with space', function() {
|
||||
it('should handle text in diamond vertices with space', function () {
|
||||
const res = flow.parser.parse('graph TD;A(chimpansen hoppar)-->C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -462,7 +462,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['A'].text).toBe('chimpansen hoppar');
|
||||
});
|
||||
|
||||
it('should handle text in with ?', function() {
|
||||
it('should handle text in with ?', function () {
|
||||
const res = flow.parser.parse('graph TD;A(?)-->|?|C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -471,7 +471,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(vert['A'].text).toBe('?');
|
||||
expect(edges[0].text).toBe('?');
|
||||
});
|
||||
it('should handle text in with éèêàçô', function() {
|
||||
it('should handle text in with éèêàçô', function () {
|
||||
const res = flow.parser.parse('graph TD;A(éèêàçô)-->|éèêàçô|C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -481,7 +481,7 @@ describe('[Text] when parsing', () => {
|
||||
expect(edges[0].text).toBe('éèêàçô');
|
||||
});
|
||||
|
||||
it('should handle text in with ,.?!+-*', function() {
|
||||
it('should handle text in with ,.?!+-*', function () {
|
||||
const res = flow.parser.parse('graph TD;A(,.?!+-*)-->|,.?!+-*|C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
|
@@ -3,17 +3,17 @@ import flow from './flow';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('when parsing flowcharts', function() {
|
||||
beforeEach(function() {
|
||||
describe('when parsing flowcharts', function () {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
flow.parser.yy.setGen('gen-2');
|
||||
});
|
||||
|
||||
it('should handle chaining of vertices', function() {
|
||||
|
||||
it('should handle chaining of vertices', function () {
|
||||
const res = flow.parser.parse(`
|
||||
graph TD
|
||||
A-->B-->C;
|
||||
@@ -35,7 +35,7 @@ describe('when parsing flowcharts', function() {
|
||||
expect(edges[1].type).toBe('arrow_point');
|
||||
expect(edges[1].text).toBe('');
|
||||
});
|
||||
it('should handle chaining of vertices', function() {
|
||||
it('should handle chaining of vertices', function () {
|
||||
const res = flow.parser.parse(`
|
||||
graph TD
|
||||
A & B --> C;
|
||||
@@ -57,7 +57,7 @@ describe('when parsing flowcharts', function() {
|
||||
expect(edges[1].type).toBe('arrow_point');
|
||||
expect(edges[1].text).toBe('');
|
||||
});
|
||||
it('should multiple vertices in link statement in the begining', function() {
|
||||
it('should multiple vertices in link statement in the begining', function () {
|
||||
const res = flow.parser.parse(`
|
||||
graph TD
|
||||
A-->B & C;
|
||||
@@ -79,7 +79,7 @@ describe('when parsing flowcharts', function() {
|
||||
expect(edges[1].type).toBe('arrow_point');
|
||||
expect(edges[1].text).toBe('');
|
||||
});
|
||||
it('should multiple vertices in link statement at the end', function() {
|
||||
it('should multiple vertices in link statement at the end', function () {
|
||||
const res = flow.parser.parse(`
|
||||
graph TD
|
||||
A & B--> C & D;
|
||||
@@ -110,7 +110,7 @@ describe('when parsing flowcharts', function() {
|
||||
expect(edges[3].type).toBe('arrow_point');
|
||||
expect(edges[3].text).toBe('');
|
||||
});
|
||||
it('should handle chaining of vertices at both ends at once', function() {
|
||||
it('should handle chaining of vertices at both ends at once', function () {
|
||||
const res = flow.parser.parse(`
|
||||
graph TD
|
||||
A & B--> C & D;
|
||||
@@ -141,7 +141,7 @@ describe('when parsing flowcharts', function() {
|
||||
expect(edges[3].type).toBe('arrow_point');
|
||||
expect(edges[3].text).toBe('');
|
||||
});
|
||||
it('should handle chaining and multiple nodes in in link statement FVC ', function() {
|
||||
it('should handle chaining and multiple nodes in in link statement FVC ', function () {
|
||||
const res = flow.parser.parse(`
|
||||
graph TD
|
||||
A --> B & B2 & C --> D2;
|
||||
@@ -181,7 +181,7 @@ describe('when parsing flowcharts', function() {
|
||||
expect(edges[5].type).toBe('arrow_point');
|
||||
expect(edges[5].text).toBe('');
|
||||
});
|
||||
it('should handle chaining and multiple nodes in in link statement with extra info in statements', function() {
|
||||
it('should handle chaining and multiple nodes in in link statement with extra info in statements', function () {
|
||||
const res = flow.parser.parse(`
|
||||
graph TD
|
||||
A[ h ] -- hello --> B[" test "]:::exClass & C --> D;
|
||||
|
@@ -118,6 +118,7 @@ that id.
|
||||
"])" return 'STADIUMEND';
|
||||
"[[" return 'SUBROUTINESTART';
|
||||
"]]" return 'SUBROUTINEEND';
|
||||
"[|" return 'VERTEX_WITH_PROPS_START';
|
||||
"[(" return 'CYLINDERSTART';
|
||||
")]" return 'CYLINDEREND';
|
||||
\- return 'MINUS';
|
||||
@@ -380,6 +381,8 @@ vertex: idString SQS text SQE
|
||||
{$$ = $1;yy.addVertex($1,$3,'stadium');}
|
||||
| idString SUBROUTINESTART text SUBROUTINEEND
|
||||
{$$ = $1;yy.addVertex($1,$3,'subroutine');}
|
||||
| idString VERTEX_WITH_PROPS_START ALPHA COLON ALPHA PIPE text SQE
|
||||
{$$ = $1;yy.addVertex($1,$7,'rect',undefined,undefined,undefined, Object.fromEntries([[$3, $5]]));}
|
||||
| idString CYLINDERSTART text CYLINDEREND
|
||||
{$$ = $1;yy.addVertex($1,$3,'cylinder');}
|
||||
| idString PS text PE
|
||||
@@ -559,5 +562,5 @@ alphaNumToken : PUNCTUATION | AMP | UNICODE_TEXT | NUM| ALPHA | COLON | COMMA |
|
||||
|
||||
idStringToken : ALPHA|UNDERSCORE |UNICODE_TEXT | NUM| COLON | COMMA | PLUS | MINUS | DOWN |EQUALS | MULT | BRKT | DOT | PUNCTUATION | AMP | DEFAULT;
|
||||
|
||||
graphCodeTokens: STADIUMSTART | STADIUMEND | SUBROUTINESTART | SUBROUTINEEND | CYLINDERSTART | CYLINDEREND | TRAPSTART | TRAPEND | INVTRAPSTART | INVTRAPEND | PIPE | PS | PE | SQS | SQE | DIAMOND_START | DIAMOND_STOP | TAGSTART | TAGEND | ARROW_CROSS | ARROW_POINT | ARROW_CIRCLE | ARROW_OPEN | QUOTE | SEMI;
|
||||
graphCodeTokens: STADIUMSTART | STADIUMEND | SUBROUTINESTART | SUBROUTINEEND | VERTEX_WITH_PROPS_START | CYLINDERSTART | CYLINDEREND | TRAPSTART | TRAPEND | INVTRAPSTART | INVTRAPEND | PIPE | PS | PE | SQS | SQE | DIAMOND_START | DIAMOND_STOP | TAGSTART | TAGEND | ARROW_CROSS | ARROW_POINT | ARROW_CIRCLE | ARROW_OPEN | QUOTE | SEMI;
|
||||
%%
|
||||
|
@@ -3,16 +3,16 @@ import flow from './flow';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('when parsing ', function() {
|
||||
beforeEach(function() {
|
||||
describe('when parsing ', function () {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
});
|
||||
|
||||
it('it should handle a trailing whitespaces after statememnts', function() {
|
||||
it('it should handle a trailing whitespaces after statememnts', function () {
|
||||
const res = flow.parser.parse('graph TD;\n\n\n %% Comment\n A-->B; \n B-->C;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -27,7 +27,7 @@ describe('when parsing ', function() {
|
||||
expect(edges[0].text).toBe('');
|
||||
});
|
||||
|
||||
it('should handle node names with "end" substring', function() {
|
||||
it('should handle node names with "end" substring', function () {
|
||||
const res = flow.parser.parse('graph TD\nendpoint --> sender');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -39,7 +39,7 @@ describe('when parsing ', function() {
|
||||
expect(edges[0].end).toBe('sender');
|
||||
});
|
||||
|
||||
it('should handle node names ending with keywords', function() {
|
||||
it('should handle node names ending with keywords', function () {
|
||||
const res = flow.parser.parse('graph TD\nblend --> monograph');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -51,7 +51,7 @@ describe('when parsing ', function() {
|
||||
expect(edges[0].end).toBe('monograph');
|
||||
});
|
||||
|
||||
it('should allow default in the node name/id', function() {
|
||||
it('should allow default in the node name/id', function () {
|
||||
const res = flow.parser.parse('graph TD\ndefault --> monograph');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -63,8 +63,8 @@ describe('when parsing ', function() {
|
||||
expect(edges[0].end).toBe('monograph');
|
||||
});
|
||||
|
||||
describe('special characters should be be handled.', function() {
|
||||
const charTest = function(char, result) {
|
||||
describe('special characters should be be handled.', function () {
|
||||
const charTest = function (char, result) {
|
||||
const res = flow.parser.parse('graph TD;A(' + char + ')-->B;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -80,7 +80,7 @@ describe('when parsing ', function() {
|
||||
flow.parser.yy.clear();
|
||||
};
|
||||
|
||||
it("it should be able to parse a '.'", function() {
|
||||
it("it should be able to parse a '.'", function () {
|
||||
charTest('.');
|
||||
charTest('Start 103a.a1');
|
||||
});
|
||||
@@ -89,27 +89,27 @@ describe('when parsing ', function() {
|
||||
// charTest('_')
|
||||
// })
|
||||
|
||||
it("it should be able to parse a ':'", function() {
|
||||
it("it should be able to parse a ':'", function () {
|
||||
charTest(':');
|
||||
});
|
||||
|
||||
it("it should be able to parse a ','", function() {
|
||||
it("it should be able to parse a ','", function () {
|
||||
charTest(',');
|
||||
});
|
||||
|
||||
it("it should be able to parse text containing '-'", function() {
|
||||
it("it should be able to parse text containing '-'", function () {
|
||||
charTest('a-b');
|
||||
});
|
||||
|
||||
it("it should be able to parse a '+'", function() {
|
||||
it("it should be able to parse a '+'", function () {
|
||||
charTest('+');
|
||||
});
|
||||
|
||||
it("it should be able to parse a '*'", function() {
|
||||
it("it should be able to parse a '*'", function () {
|
||||
charTest('*');
|
||||
});
|
||||
|
||||
it("it should be able to parse a '<'", function() {
|
||||
it("it should be able to parse a '<'", function () {
|
||||
charTest('<', '<');
|
||||
});
|
||||
|
||||
@@ -117,16 +117,15 @@ describe('when parsing ', function() {
|
||||
// charTest('>', '>');
|
||||
// });
|
||||
|
||||
|
||||
// it("it should be able to parse a '='", function() {
|
||||
// charTest('=', '=');
|
||||
// });
|
||||
it("it should be able to parse a '&'", function() {
|
||||
it("it should be able to parse a '&'", function () {
|
||||
charTest('&');
|
||||
});
|
||||
});
|
||||
|
||||
it('should be possible to use direction in node ids', function() {
|
||||
it('should be possible to use direction in node ids', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TD;' + '\n';
|
||||
@@ -138,7 +137,7 @@ describe('when parsing ', function() {
|
||||
expect(vertices['node1TB'].id).toBe('node1TB');
|
||||
});
|
||||
|
||||
it('should be possible to use direction in node ids', function() {
|
||||
it('should be possible to use direction in node ids', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TD;A--x|text including URL space|B;';
|
||||
@@ -147,7 +146,7 @@ describe('when parsing ', function() {
|
||||
const classes = flow.parser.yy.getClasses();
|
||||
expect(vertices['A'].id).toBe('A');
|
||||
});
|
||||
it('should be possible to use numbers as labels', function() {
|
||||
it('should be possible to use numbers as labels', function () {
|
||||
let statement = '';
|
||||
|
||||
statement = statement + 'graph TB;subgraph "number as labels";1;end;';
|
||||
|
@@ -4,16 +4,16 @@ import filter from 'lodash/filter';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('when parsing subgraphs', function() {
|
||||
beforeEach(function() {
|
||||
describe('when parsing subgraphs', function () {
|
||||
beforeEach(function () {
|
||||
flow.parser.yy = flowDb;
|
||||
flow.parser.yy.clear();
|
||||
flow.parser.yy.setGen('gen-2');
|
||||
});
|
||||
it('should handle subgraph with tab indentation', function() {
|
||||
it('should handle subgraph with tab indentation', function () {
|
||||
const res = flow.parser.parse('graph TB\nsubgraph One\n\ta1-->a2\nend');
|
||||
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||
expect(subgraphs.length).toBe(1);
|
||||
@@ -25,7 +25,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(subgraph.title).toBe('One');
|
||||
expect(subgraph.id).toBe('One');
|
||||
});
|
||||
it('should handle subgraph with chaining nodes indentation', function() {
|
||||
it('should handle subgraph with chaining nodes indentation', function () {
|
||||
const res = flow.parser.parse('graph TB\nsubgraph One\n\ta1-->a2-->a3\nend');
|
||||
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||
expect(subgraphs.length).toBe(1);
|
||||
@@ -38,7 +38,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(subgraph.id).toBe('One');
|
||||
});
|
||||
|
||||
it('should handle subgraph with multiple words in title', function() {
|
||||
it('should handle subgraph with multiple words in title', function () {
|
||||
const res = flow.parser.parse('graph TB\nsubgraph "Some Title"\n\ta1-->a2\nend');
|
||||
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||
expect(subgraphs.length).toBe(1);
|
||||
@@ -50,7 +50,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(subgraph.id).toBe('subGraph0');
|
||||
});
|
||||
|
||||
it('should handle subgraph with id and title notation', function() {
|
||||
it('should handle subgraph with id and title notation', function () {
|
||||
const res = flow.parser.parse('graph TB\nsubgraph some-id[Some Title]\n\ta1-->a2\nend');
|
||||
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||
expect(subgraphs.length).toBe(1);
|
||||
@@ -62,7 +62,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(subgraph.id).toBe('some-id');
|
||||
});
|
||||
|
||||
xit('should handle subgraph without id and space in title', function() {
|
||||
xit('should handle subgraph without id and space in title', function () {
|
||||
const res = flow.parser.parse('graph TB\nsubgraph Some Title\n\ta1-->a2\nend');
|
||||
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||
expect(subgraphs.length).toBe(1);
|
||||
@@ -74,7 +74,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(subgraph.id).toBe('some-id');
|
||||
});
|
||||
|
||||
it('should handle subgraph id starting with a number', function() {
|
||||
it('should handle subgraph id starting with a number', function () {
|
||||
const res = flow.parser.parse(`graph TD
|
||||
A[Christmas] -->|Get money| B(Go shopping)
|
||||
subgraph 1test
|
||||
@@ -89,7 +89,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(subgraph.id).toBe('1test');
|
||||
});
|
||||
|
||||
it('should handle subgraphs1', function() {
|
||||
it('should handle subgraphs1', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->B;subgraph myTitle;c-->d;end;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -97,7 +97,7 @@ describe('when parsing subgraphs', function() {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
it('should handle subgraphs with title in quotes', function() {
|
||||
it('should handle subgraphs with title in quotes', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->B;subgraph "title in quotes";c-->d;end;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -111,7 +111,7 @@ describe('when parsing subgraphs', function() {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
it('should handle subgraphs in old style that was broken', function() {
|
||||
it('should handle subgraphs in old style that was broken', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->B;subgraph old style that is broken;c-->d;end;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -125,7 +125,7 @@ describe('when parsing subgraphs', function() {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
it('should handle subgraphs with dashes in the title', function() {
|
||||
it('should handle subgraphs with dashes in the title', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->B;subgraph a-b-c;c-->d;end;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -139,7 +139,7 @@ describe('when parsing subgraphs', function() {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
it('should handle subgraphs with id and title in brackets', function() {
|
||||
it('should handle subgraphs with id and title in brackets', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->B;subgraph uid1[text of doom];c-->d;end;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -154,7 +154,7 @@ describe('when parsing subgraphs', function() {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
it('should handle subgraphs with id and title in brackets and quotes', function() {
|
||||
it('should handle subgraphs with id and title in brackets and quotes', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->B;subgraph uid2["text of doom"];c-->d;end;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -169,7 +169,7 @@ describe('when parsing subgraphs', function() {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
it('should handle subgraphs with id and title in brackets without spaces', function() {
|
||||
it('should handle subgraphs with id and title in brackets without spaces', function () {
|
||||
const res = flow.parser.parse('graph TD;A-->B;subgraph uid2[textofdoom];c-->d;end;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -185,7 +185,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
|
||||
it('should handle subgraphs2', function() {
|
||||
it('should handle subgraphs2', function () {
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle\n\n c-->d \nend\n');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -194,7 +194,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
|
||||
it('should handle subgraphs3', function() {
|
||||
it('should handle subgraphs3', function () {
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle \n\n c-->d \nend\n');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -203,7 +203,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
|
||||
it('should handle nested subgraphs', function() {
|
||||
it('should handle nested subgraphs', function () {
|
||||
const str =
|
||||
'graph TD\n' +
|
||||
'A-->B\n' +
|
||||
@@ -215,7 +215,7 @@ describe('when parsing subgraphs', function() {
|
||||
const res = flow.parser.parse(str);
|
||||
});
|
||||
|
||||
it('should handle subgraphs4', function() {
|
||||
it('should handle subgraphs4', function () {
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle\nc-->d\nend;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -224,7 +224,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
|
||||
it('should handle subgraphs5', function() {
|
||||
it('should handle subgraphs5', function () {
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle\nc-- text -->d\nd-->e\n end;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -232,7 +232,7 @@ describe('when parsing subgraphs', function() {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
it('should handle subgraphs with multi node statements in it', function() {
|
||||
it('should handle subgraphs with multi node statements in it', function () {
|
||||
const res = flow.parser.parse('graph TD\nA-->B\nsubgraph myTitle\na & b --> c & e\n end;');
|
||||
|
||||
const vert = flow.parser.yy.getVertices();
|
||||
@@ -240,7 +240,7 @@ describe('when parsing subgraphs', function() {
|
||||
|
||||
expect(edges[0].type).toBe('arrow_point');
|
||||
});
|
||||
it('should handle nested subgraphs 1', function() {
|
||||
it('should handle nested subgraphs 1', function () {
|
||||
const res = flow.parser.parse(`flowchart TB
|
||||
subgraph A
|
||||
b-->B
|
||||
@@ -254,8 +254,8 @@ describe('when parsing subgraphs', function() {
|
||||
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||
expect(subgraphs.length).toBe(2);
|
||||
|
||||
const subgraphA = filter(subgraphs,o => o.id === 'A')[0];
|
||||
const subgraphB = filter(subgraphs,o => o.id === 'B')[0];
|
||||
const subgraphA = filter(subgraphs, (o) => o.id === 'A')[0];
|
||||
const subgraphB = filter(subgraphs, (o) => o.id === 'B')[0];
|
||||
|
||||
expect(subgraphB.nodes[0]).toBe('c');
|
||||
expect(subgraphA.nodes).toContain('B');
|
||||
@@ -263,7 +263,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(subgraphA.nodes).toContain('a');
|
||||
expect(subgraphA.nodes).not.toContain('c');
|
||||
});
|
||||
it('should handle nested subgraphs 2', function() {
|
||||
it('should handle nested subgraphs 2', function () {
|
||||
const res = flow.parser.parse(`flowchart TB
|
||||
b-->B
|
||||
a-->c
|
||||
@@ -279,8 +279,8 @@ describe('when parsing subgraphs', function() {
|
||||
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||
expect(subgraphs.length).toBe(2);
|
||||
|
||||
const subgraphA = filter(subgraphs,o => o.id === 'A')[0];
|
||||
const subgraphB = filter(subgraphs,o => o.id === 'B')[0];
|
||||
const subgraphA = filter(subgraphs, (o) => o.id === 'A')[0];
|
||||
const subgraphB = filter(subgraphs, (o) => o.id === 'B')[0];
|
||||
|
||||
expect(subgraphB.nodes[0]).toBe('c');
|
||||
expect(subgraphA.nodes).toContain('B');
|
||||
@@ -288,7 +288,7 @@ describe('when parsing subgraphs', function() {
|
||||
expect(subgraphA.nodes).toContain('a');
|
||||
expect(subgraphA.nodes).not.toContain('c');
|
||||
});
|
||||
it('should handle nested subgraphs 3', function() {
|
||||
it('should handle nested subgraphs 3', function () {
|
||||
const res = flow.parser.parse(`flowchart TB
|
||||
subgraph B
|
||||
c
|
||||
@@ -302,8 +302,8 @@ describe('when parsing subgraphs', function() {
|
||||
const subgraphs = flow.parser.yy.getSubGraphs();
|
||||
expect(subgraphs.length).toBe(2);
|
||||
|
||||
const subgraphA = filter(subgraphs,o => o.id === 'A')[0];
|
||||
const subgraphB = filter(subgraphs,o => o.id === 'B')[0];
|
||||
const subgraphA = filter(subgraphs, (o) => o.id === 'A')[0];
|
||||
const subgraphB = filter(subgraphs, (o) => o.id === 'B')[0];
|
||||
expect(subgraphB.nodes[0]).toBe('c');
|
||||
expect(subgraphA.nodes).toContain('B');
|
||||
expect(subgraphA.nodes).toContain('b');
|
||||
|
@@ -1,13 +1,12 @@
|
||||
/* eslint-env jasmine */
|
||||
import moment from 'moment-mini';
|
||||
import ganttDb from './ganttDb';
|
||||
|
||||
describe('when using the ganttDb', function() {
|
||||
beforeEach(function() {
|
||||
describe('when using the ganttDb', function () {
|
||||
beforeEach(function () {
|
||||
ganttDb.clear();
|
||||
});
|
||||
|
||||
describe('when using relative times', function() {
|
||||
describe('when using relative times', function () {
|
||||
it.each`
|
||||
diff | date | expected
|
||||
${' 1d'} | ${moment('2019-01-01')} | ${moment('2019-01-02').toDate()}
|
||||
@@ -17,8 +16,8 @@ describe('when using the ganttDb', function() {
|
||||
});
|
||||
});
|
||||
|
||||
describe('when calling the clear function', function() {
|
||||
beforeEach(function() {
|
||||
describe('when calling the clear function', function () {
|
||||
beforeEach(function () {
|
||||
ganttDb.setDateFormat('YYYY-MM-DD');
|
||||
ganttDb.enableInclusiveEndDates();
|
||||
ganttDb.setTodayMarker('off');
|
||||
@@ -83,7 +82,7 @@ describe('when using the ganttDb', function() {
|
||||
expStartDate2,
|
||||
expEndDate2,
|
||||
expId2,
|
||||
expTask2
|
||||
expTask2,
|
||||
}) => {
|
||||
ganttDb.setDateFormat('YYYY-MM-DD');
|
||||
ganttDb.addSection(section);
|
||||
@@ -99,7 +98,7 @@ describe('when using the ganttDb', function() {
|
||||
}
|
||||
);
|
||||
|
||||
it('should handle relative start date based on id regardless of sections', function() {
|
||||
it('should handle relative start date based on id regardless of sections', function () {
|
||||
ganttDb.setDateFormat('YYYY-MM-DD');
|
||||
ganttDb.addSection('testa1');
|
||||
ganttDb.addTask('test1', 'id1,2013-01-01,2w');
|
||||
@@ -119,7 +118,7 @@ describe('when using the ganttDb', function() {
|
||||
expect(tasks[2].startTime).toEqual(new Date(2013, 0, 15));
|
||||
expect(tasks[2].endTime).toEqual(new Date(2013, 0, 17));
|
||||
});
|
||||
it('should ignore weekends', function() {
|
||||
it('should ignore weekends', function () {
|
||||
ganttDb.setDateFormat('YYYY-MM-DD');
|
||||
ganttDb.setExcludes('weekends 2019-02-06,friday');
|
||||
ganttDb.addSection('weekends skip test');
|
||||
@@ -176,7 +175,7 @@ describe('when using the ganttDb', function() {
|
||||
expect(tasks[6].task).toEqual('test7');
|
||||
});
|
||||
|
||||
it('should maintain the order in which tasks are created', function() {
|
||||
it('should maintain the order in which tasks are created', function () {
|
||||
ganttDb.setTitle('Project Execution');
|
||||
ganttDb.setDateFormat('YYYY-MM-DD');
|
||||
ganttDb.addSection('section A section');
|
||||
@@ -275,7 +274,9 @@ describe('when using the ganttDb', function() {
|
||||
expect(tasks[10].task).toEqual('Describe gantt syntax');
|
||||
|
||||
expect(tasks[11].startTime).toEqual(moment('2014-01-11', 'YYYY-MM-DD').toDate());
|
||||
expect(tasks[11].endTime).toEqual(moment('2014-01-11 20:00:00', 'YYYY-MM-DD HH:mm:ss').toDate());
|
||||
expect(tasks[11].endTime).toEqual(
|
||||
moment('2014-01-11 20:00:00', 'YYYY-MM-DD HH:mm:ss').toDate()
|
||||
);
|
||||
expect(tasks[11].order).toEqual(11);
|
||||
expect(tasks[11].id).toEqual('task7');
|
||||
expect(tasks[11].task).toEqual('Add gantt diagram to demo page');
|
||||
@@ -294,20 +295,25 @@ describe('when using the ganttDb', function() {
|
||||
expect(tasks[13].task).toEqual('Describe gantt syntax');
|
||||
|
||||
expect(tasks[14].startTime).toEqual(moment('2014-01-16', 'YYYY-MM-DD').toDate());
|
||||
expect(tasks[14].endTime).toEqual(moment('2014-01-16 20:00:00', 'YYYY-MM-DD HH:mm:ss').toDate());
|
||||
expect(tasks[14].endTime).toEqual(
|
||||
moment('2014-01-16 20:00:00', 'YYYY-MM-DD HH:mm:ss').toDate()
|
||||
);
|
||||
expect(tasks[14].order).toEqual(14);
|
||||
expect(tasks[14].id).toEqual('task9');
|
||||
expect(tasks[14].task).toEqual('Add gantt diagram to demo page');
|
||||
|
||||
expect(tasks[15].startTime).toEqual(moment('2014-01-16 20:00:00', 'YYYY-MM-DD HH:mm:ss').toDate());
|
||||
expect(tasks[15].endTime).toEqual(moment('2014-01-18 20:00:00', 'YYYY-MM-DD HH:mm:ss').toDate());
|
||||
expect(tasks[15].startTime).toEqual(
|
||||
moment('2014-01-16 20:00:00', 'YYYY-MM-DD HH:mm:ss').toDate()
|
||||
);
|
||||
expect(tasks[15].endTime).toEqual(
|
||||
moment('2014-01-18 20:00:00', 'YYYY-MM-DD HH:mm:ss').toDate()
|
||||
);
|
||||
expect(tasks[15].order).toEqual(15);
|
||||
expect(tasks[15].id).toEqual('task10');
|
||||
expect(tasks[15].task).toEqual('Add another diagram to demo page');
|
||||
});
|
||||
|
||||
|
||||
it('should work when end date is the 31st', function() {
|
||||
it('should work when end date is the 31st', function () {
|
||||
ganttDb.setDateFormat('YYYY-MM-DD');
|
||||
ganttDb.addSection('Task endTime is on the 31st day of the month');
|
||||
ganttDb.addTask('test1', 'id1,2019-09-30,11d');
|
||||
@@ -327,14 +333,14 @@ describe('when using the ganttDb', function() {
|
||||
expect(tasks[1].task).toEqual('test2');
|
||||
});
|
||||
|
||||
describe('when setting inclusive end dates', function() {
|
||||
beforeEach(function() {
|
||||
describe('when setting inclusive end dates', function () {
|
||||
beforeEach(function () {
|
||||
ganttDb.setDateFormat('YYYY-MM-DD');
|
||||
ganttDb.enableInclusiveEndDates();
|
||||
ganttDb.addTask('test1', 'id1,2019-02-01,1d');
|
||||
ganttDb.addTask('test2', 'id2,2019-02-01,2019-02-03');
|
||||
});
|
||||
it('should automatically add one day to all end dates', function() {
|
||||
it('should automatically add one day to all end dates', function () {
|
||||
const tasks = ganttDb.getTasks();
|
||||
expect(tasks[0].startTime).toEqual(moment('2019-02-01', 'YYYY-MM-DD').toDate());
|
||||
expect(tasks[0].endTime).toEqual(moment('2019-02-02', 'YYYY-MM-DD').toDate());
|
||||
|
@@ -1,44 +1,42 @@
|
||||
/* eslint-env jasmine */
|
||||
/* eslint-disable no-eval */
|
||||
import { parser } from './gantt';
|
||||
import ganttDb from '../ganttDb';
|
||||
|
||||
const spyOn = jest.spyOn;
|
||||
const parserFnConstructor = str => {
|
||||
const parserFnConstructor = (str) => {
|
||||
return () => {
|
||||
parser.parse(str);
|
||||
};
|
||||
};
|
||||
|
||||
describe('when parsing a gantt diagram it', function() {
|
||||
beforeEach(function() {
|
||||
describe('when parsing a gantt diagram it', function () {
|
||||
beforeEach(function () {
|
||||
parser.yy = ganttDb;
|
||||
parser.yy.clear();
|
||||
});
|
||||
|
||||
it('should handle a dateFormat definition', function() {
|
||||
it('should handle a dateFormat definition', function () {
|
||||
const str = 'gantt\ndateFormat yyyy-mm-dd';
|
||||
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle a inclusive end date definition', function() {
|
||||
it('should handle a inclusive end date definition', function () {
|
||||
const str = 'gantt\ndateFormat yyyy-mm-dd\ninclusiveEndDates';
|
||||
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
});
|
||||
it('should handle a title definition', function() {
|
||||
it('should handle a title definition', function () {
|
||||
const str = 'gantt\ndateFormat yyyy-mm-dd\ntitle Adding gantt diagram functionality to mermaid';
|
||||
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
});
|
||||
it('should handle an excludes definition', function() {
|
||||
it('should handle an excludes definition', function () {
|
||||
const str =
|
||||
'gantt\ndateFormat yyyy-mm-dd\ntitle Adding gantt diagram functionality to mermaid\nexcludes weekdays 2019-02-01';
|
||||
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
});
|
||||
it('should handle a todayMarker definition', function() {
|
||||
it('should handle a todayMarker definition', function () {
|
||||
spyOn(ganttDb, 'setTodayMarker');
|
||||
const str =
|
||||
'gantt\ndateFormat yyyy-mm-dd\ntitle Adding gantt diagram functionality to mermaid\nexcludes weekdays 2019-02-01\ntodayMarker off';
|
||||
@@ -46,7 +44,7 @@ describe('when parsing a gantt diagram it', function() {
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
expect(ganttDb.setTodayMarker).toHaveBeenCalledWith('off');
|
||||
});
|
||||
it('should handle a section definition', function() {
|
||||
it('should handle a section definition', function () {
|
||||
const str =
|
||||
'gantt\n' +
|
||||
'dateFormat yyyy-mm-dd\n' +
|
||||
@@ -56,7 +54,7 @@ describe('when parsing a gantt diagram it', function() {
|
||||
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
});
|
||||
it('should handle multiline section titles with different line breaks', function() {
|
||||
it('should handle multiline section titles with different line breaks', function () {
|
||||
const str =
|
||||
'gantt\n' +
|
||||
'dateFormat yyyy-mm-dd\n' +
|
||||
@@ -68,16 +66,16 @@ describe('when parsing a gantt diagram it', function() {
|
||||
});
|
||||
/**
|
||||
* Beslutsflöde inligt nedan. Obs bla bla bla
|
||||
* ```
|
||||
* graph TD
|
||||
* A[Hard pledge] -- text on link -->B(Round edge)
|
||||
* B --> C{to do or not to do}
|
||||
* C -->|Too| D[Result one]
|
||||
* C -->|Doo| E[Result two]
|
||||
```
|
||||
* params bapa - a unique bapap
|
||||
*
|
||||
* graph TD
|
||||
* A[Hard pledge] -- text on link -->B(Round edge)
|
||||
* B --> C{to do or not to do}
|
||||
* C -->|Too| D[Result one]
|
||||
* C -->|Doo| E[Result two]
|
||||
*
|
||||
* Params bapa - a unique bapap
|
||||
*/
|
||||
it('should handle a task definition', function() {
|
||||
it('should handle a task definition', function () {
|
||||
const str =
|
||||
'gantt\n' +
|
||||
'dateFormat YYYY-MM-DD\n' +
|
||||
@@ -117,7 +115,7 @@ describe('when parsing a gantt diagram it', function() {
|
||||
|
||||
const tasks = parser.yy.getTasks();
|
||||
|
||||
allowedTags.forEach(function(t) {
|
||||
allowedTags.forEach(function (t) {
|
||||
if (eval(t)) {
|
||||
expect(tasks[0][t]).toBeTruthy();
|
||||
} else {
|
||||
@@ -125,7 +123,7 @@ describe('when parsing a gantt diagram it', function() {
|
||||
}
|
||||
});
|
||||
});
|
||||
it('should parse callback specifier with no args', function() {
|
||||
it('should parse callback specifier with no args', function () {
|
||||
spyOn(ganttDb, 'setClickEvent');
|
||||
const str =
|
||||
'gantt\n' +
|
||||
@@ -139,7 +137,7 @@ describe('when parsing a gantt diagram it', function() {
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
expect(ganttDb.setClickEvent).toHaveBeenCalledWith('cl2', 'ganttTestClick', null);
|
||||
});
|
||||
it('should parse callback specifier with arbitrary number of args', function() {
|
||||
it('should parse callback specifier with arbitrary number of args', function () {
|
||||
spyOn(ganttDb, 'setClickEvent');
|
||||
const str =
|
||||
'gantt\n' +
|
||||
@@ -152,7 +150,10 @@ describe('when parsing a gantt diagram it', function() {
|
||||
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
const args = '"test1", "test2", "test3"';
|
||||
expect(ganttDb.setClickEvent).toHaveBeenCalledWith('cl2', 'ganttTestClick', '"test0", test1, test2');
|
||||
expect(ganttDb.setClickEvent).toHaveBeenCalledWith(
|
||||
'cl2',
|
||||
'ganttTestClick',
|
||||
'"test0", test1, test2'
|
||||
);
|
||||
});
|
||||
|
||||
});
|
||||
|
@@ -1,4 +1,4 @@
|
||||
/* eslint-env jasmine */
|
||||
|
||||
// Todo reintroduce without cryptoRandomString
|
||||
import gitGraphAst from './gitGraphAst';
|
||||
import { parser } from './parser/gitGraph';
|
||||
|
@@ -1,12 +1,11 @@
|
||||
/* eslint-env jasmine */
|
||||
describe('when parsing an info graph it', function() {
|
||||
describe('when parsing an info graph it', function () {
|
||||
var ex;
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
ex = require('./parser/info').parser;
|
||||
ex.yy = require('./infoDb');
|
||||
});
|
||||
|
||||
it('should handle an info definition', function() {
|
||||
it('should handle an info definition', function () {
|
||||
var str = `info
|
||||
showInfo`;
|
||||
|
||||
|
@@ -1,18 +1,17 @@
|
||||
/* eslint-env jasmine */
|
||||
import pieDb from '../pieDb';
|
||||
import pie from './pie';
|
||||
import { setConfig } from '../../../config';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('when parsing pie', function() {
|
||||
beforeEach(function() {
|
||||
describe('when parsing pie', function () {
|
||||
beforeEach(function () {
|
||||
pie.parser.yy = pieDb;
|
||||
pie.parser.yy.clear();
|
||||
});
|
||||
it('should handle very simple pie', function() {
|
||||
it('should handle very simple pie', function () {
|
||||
const res = pie.parser.parse(`pie
|
||||
"ash" : 100
|
||||
`);
|
||||
@@ -20,7 +19,7 @@ describe('when parsing pie', function() {
|
||||
const section1 = sections['ash'];
|
||||
expect(section1).toBe(100);
|
||||
});
|
||||
it('should handle simple pie', function() {
|
||||
it('should handle simple pie', function () {
|
||||
const res = pie.parser.parse(`pie
|
||||
"ash" : 60
|
||||
"bat" : 40
|
||||
@@ -29,7 +28,7 @@ describe('when parsing pie', function() {
|
||||
const section1 = sections['ash'];
|
||||
expect(section1).toBe(60);
|
||||
});
|
||||
it('should handle simple pie with comments', function() {
|
||||
it('should handle simple pie with comments', function () {
|
||||
const res = pie.parser.parse(`pie
|
||||
%% comments
|
||||
"ash" : 60
|
||||
@@ -40,7 +39,7 @@ describe('when parsing pie', function() {
|
||||
expect(section1).toBe(60);
|
||||
});
|
||||
|
||||
it('should handle simple pie with a directive', function() {
|
||||
it('should handle simple pie with a directive', function () {
|
||||
const res = pie.parser.parse(`%%{init: {'logLevel':0}}%%
|
||||
pie
|
||||
"ash" : 60
|
||||
@@ -51,7 +50,7 @@ pie
|
||||
expect(section1).toBe(60);
|
||||
});
|
||||
|
||||
it('should handle simple pie with a title', function() {
|
||||
it('should handle simple pie with a title', function () {
|
||||
const res = pie.parser.parse(`pie title a 60/40 pie
|
||||
"ash" : 60
|
||||
"bat" : 40
|
||||
@@ -63,7 +62,7 @@ pie
|
||||
expect(title).toBe('a 60/40 pie');
|
||||
});
|
||||
|
||||
it('should handle simple pie with positive decimal', function() {
|
||||
it('should handle simple pie with positive decimal', function () {
|
||||
const res = pie.parser.parse(`pie
|
||||
"ash" : 60.67
|
||||
"bat" : 40
|
||||
@@ -73,7 +72,7 @@ pie
|
||||
expect(section1).toBe(60.67);
|
||||
});
|
||||
|
||||
it('should handle simple pie with negative decimal', function() {
|
||||
it('should handle simple pie with negative decimal', function () {
|
||||
expect(() => {
|
||||
pie.parser.parse(`pie
|
||||
"ash" : 60.67
|
||||
|
@@ -3,20 +3,19 @@ import requirementDb from '../requirementDb';
|
||||
import reqDiagram from './requirementDiagram';
|
||||
|
||||
setConfig({
|
||||
securityLevel: 'strict'
|
||||
securityLevel: 'strict',
|
||||
});
|
||||
|
||||
describe('when parsing requirement diagram it...', function() {
|
||||
|
||||
beforeEach(function() {
|
||||
describe('when parsing requirement diagram it...', function () {
|
||||
beforeEach(function () {
|
||||
reqDiagram.parser.yy = requirementDb;
|
||||
reqDiagram.parser.yy.clear();
|
||||
});
|
||||
|
||||
it('will accept full requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
it('will accept full requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -30,7 +29,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -45,10 +44,10 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(Object.keys(requirementDb.getRelationships()).length).toBe(0);
|
||||
});
|
||||
|
||||
it('will accept full element definition', function() {
|
||||
const expectedName = "test_el";
|
||||
const expectedType = "test_type";
|
||||
const expectedDocRef = "test_ref"
|
||||
it('will accept full element definition', function () {
|
||||
const expectedName = 'test_el';
|
||||
const expectedType = 'test_type';
|
||||
const expectedDocRef = 'test_ref';
|
||||
|
||||
let lines = [
|
||||
`requirementDiagram`,
|
||||
@@ -58,7 +57,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`docref: ${expectedDocRef}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -68,23 +67,18 @@ describe('when parsing requirement diagram it...', function() {
|
||||
let foundElement = requirementDb.getElements()[expectedName];
|
||||
expect(foundElement).toBeDefined();
|
||||
expect(foundElement.type).toBe(expectedType);
|
||||
expect(foundElement.docRef).toBe(
|
||||
expectedDocRef);
|
||||
expect(foundElement.docRef).toBe(expectedDocRef);
|
||||
|
||||
expect(Object.keys(requirementDb.getRelationships()).length).toBe(0);
|
||||
});
|
||||
|
||||
it('will accept full relationship definition', function() {
|
||||
const expectedSrc = "a";
|
||||
const expectedDest = "b";
|
||||
it('will accept full relationship definition', function () {
|
||||
const expectedSrc = 'a';
|
||||
const expectedDest = 'b';
|
||||
const expectedType = requirementDb.Relationships.CONTAINS;
|
||||
|
||||
let lines = [
|
||||
`requirementDiagram`,
|
||||
``,
|
||||
`${expectedSrc} - ${expectedType} -> ${expectedDest}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let lines = [`requirementDiagram`, ``, `${expectedSrc} - ${expectedType} -> ${expectedDest}`];
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -97,11 +91,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundRelationship.dst).toBe(expectedDest);
|
||||
});
|
||||
|
||||
it('will accept "requirement" type of requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
it('will accept "requirement" type of requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = requirementDb.RequirementType.REQUIREMENT;
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -115,7 +109,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -124,11 +118,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept "functionalRequirement" type of requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
it('will accept "functionalRequirement" type of requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = requirementDb.RequirementType.FUNCTIONAL_REQUIREMENT;
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -142,7 +136,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -151,11 +145,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept "interfaceRequirement" type of requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
it('will accept "interfaceRequirement" type of requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = requirementDb.RequirementType.INTERFACE_REQUIREMENT;
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -169,7 +163,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -178,11 +172,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept "performanceRequirement" type of requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
it('will accept "performanceRequirement" type of requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = requirementDb.RequirementType.PERFORMANCE_REQUIREMENT;
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -196,7 +190,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -205,11 +199,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept "physicalRequirement" type of requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
it('will accept "physicalRequirement" type of requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = requirementDb.RequirementType.PHYSICAL_REQUIREMENT;
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -223,7 +217,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -232,11 +226,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept "designConstraint" type of requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
it('will accept "designConstraint" type of requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = requirementDb.RequirementType.DESIGN_CONSTRAINT;
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -250,7 +244,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -259,11 +253,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept "low" type of risk requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
const expectedType = "designConstraint";
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
it('will accept "low" type of risk requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = 'designConstraint';
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.LOW_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -277,7 +271,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -286,11 +280,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
|
||||
it('will accept "medium" type of risk requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
const expectedType = "designConstraint";
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
it('will accept "medium" type of risk requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = 'designConstraint';
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.MED_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -304,7 +298,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -313,11 +307,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
|
||||
it('will accept "high" type of risk requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
const expectedType = "designConstraint";
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
it('will accept "high" type of risk requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = 'designConstraint';
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -331,7 +325,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -340,11 +334,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.risk).toBe(expectedRisk);
|
||||
});
|
||||
|
||||
it('will accept "Analysis" type of verification method requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
const expectedType = "designConstraint";
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
it('will accept "Analysis" type of verification method requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = 'designConstraint';
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_ANALYSIS;
|
||||
|
||||
@@ -358,7 +352,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -367,11 +361,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
|
||||
it('will accept "Inspection" type of verification method requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
const expectedType = "designConstraint";
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
it('will accept "Inspection" type of verification method requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = 'designConstraint';
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_INSPECTION;
|
||||
|
||||
@@ -385,7 +379,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -394,11 +388,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
|
||||
it('will accept "Test" type of verification method requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
const expectedType = "designConstraint";
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
it('will accept "Test" type of verification method requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = 'designConstraint';
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_TEST;
|
||||
|
||||
@@ -412,7 +406,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -421,11 +415,11 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
|
||||
it('will accept "Demonstration" type of verification method requirement definition', function() {
|
||||
const expectedName = "test_req";
|
||||
it('will accept "Demonstration" type of verification method requirement definition', function () {
|
||||
const expectedName = 'test_req';
|
||||
const expectedType = requirementDb.RequirementType.DESIGN_CONSTRAINT;
|
||||
const expectedId = "test_id";
|
||||
const expectedText = "the test text."
|
||||
const expectedId = 'test_id';
|
||||
const expectedText = 'the test text.';
|
||||
const expectedRisk = requirementDb.RiskLevel.HIGH_RISK;
|
||||
const expectedVerifyMethod = requirementDb.VerifyType.VERIFY_DEMONSTRATION;
|
||||
|
||||
@@ -439,7 +433,7 @@ describe('when parsing requirement diagram it...', function() {
|
||||
`verifymethod: ${expectedVerifyMethod}`,
|
||||
`}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -448,17 +442,13 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundReq.verifyMethod).toBe(expectedVerifyMethod);
|
||||
});
|
||||
|
||||
it('will accept contains relationship definition', function() {
|
||||
const expectedSrc = "a";
|
||||
const expectedDest = "b";
|
||||
it('will accept contains relationship definition', function () {
|
||||
const expectedSrc = 'a';
|
||||
const expectedDest = 'b';
|
||||
const expectedType = requirementDb.Relationships.CONTAINS;
|
||||
|
||||
let lines = [
|
||||
`requirementDiagram`,
|
||||
``,
|
||||
`${expectedSrc} - ${expectedType} -> ${expectedDest}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let lines = [`requirementDiagram`, ``, `${expectedSrc} - ${expectedType} -> ${expectedDest}`];
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -466,17 +456,13 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundRelationship.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept copies relationship definition', function() {
|
||||
const expectedSrc = "a";
|
||||
const expectedDest = "b";
|
||||
it('will accept copies relationship definition', function () {
|
||||
const expectedSrc = 'a';
|
||||
const expectedDest = 'b';
|
||||
const expectedType = requirementDb.Relationships.COPIES;
|
||||
|
||||
let lines = [
|
||||
`requirementDiagram`,
|
||||
``,
|
||||
`${expectedSrc} - ${expectedType} -> ${expectedDest}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let lines = [`requirementDiagram`, ``, `${expectedSrc} - ${expectedType} -> ${expectedDest}`];
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -484,17 +470,13 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundRelationship.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept derives relationship definition', function() {
|
||||
const expectedSrc = "a";
|
||||
const expectedDest = "b";
|
||||
it('will accept derives relationship definition', function () {
|
||||
const expectedSrc = 'a';
|
||||
const expectedDest = 'b';
|
||||
const expectedType = requirementDb.Relationships.DERIVES;
|
||||
|
||||
let lines = [
|
||||
`requirementDiagram`,
|
||||
``,
|
||||
`${expectedSrc} - ${expectedType} -> ${expectedDest}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let lines = [`requirementDiagram`, ``, `${expectedSrc} - ${expectedType} -> ${expectedDest}`];
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -502,17 +484,13 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundRelationship.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept satisfies relationship definition', function() {
|
||||
const expectedSrc = "a";
|
||||
const expectedDest = "b";
|
||||
it('will accept satisfies relationship definition', function () {
|
||||
const expectedSrc = 'a';
|
||||
const expectedDest = 'b';
|
||||
const expectedType = requirementDb.Relationships.SATISFIES;
|
||||
|
||||
let lines = [
|
||||
`requirementDiagram`,
|
||||
``,
|
||||
`${expectedSrc} - ${expectedType} -> ${expectedDest}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let lines = [`requirementDiagram`, ``, `${expectedSrc} - ${expectedType} -> ${expectedDest}`];
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -520,17 +498,13 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundRelationship.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept verifies relationship definition', function() {
|
||||
const expectedSrc = "a";
|
||||
const expectedDest = "b";
|
||||
it('will accept verifies relationship definition', function () {
|
||||
const expectedSrc = 'a';
|
||||
const expectedDest = 'b';
|
||||
const expectedType = requirementDb.Relationships.VERIFIES;
|
||||
|
||||
let lines = [
|
||||
`requirementDiagram`,
|
||||
``,
|
||||
`${expectedSrc} - ${expectedType} -> ${expectedDest}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let lines = [`requirementDiagram`, ``, `${expectedSrc} - ${expectedType} -> ${expectedDest}`];
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -538,17 +512,13 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundRelationship.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept refines relationship definition', function() {
|
||||
const expectedSrc = "a";
|
||||
const expectedDest = "b";
|
||||
it('will accept refines relationship definition', function () {
|
||||
const expectedSrc = 'a';
|
||||
const expectedDest = 'b';
|
||||
const expectedType = requirementDb.Relationships.REFINES;
|
||||
|
||||
let lines = [
|
||||
`requirementDiagram`,
|
||||
``,
|
||||
`${expectedSrc} - ${expectedType} -> ${expectedDest}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let lines = [`requirementDiagram`, ``, `${expectedSrc} - ${expectedType} -> ${expectedDest}`];
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
@@ -556,22 +526,17 @@ describe('when parsing requirement diagram it...', function() {
|
||||
expect(foundRelationship.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
it('will accept traces relationship definition', function() {
|
||||
const expectedSrc = "a";
|
||||
const expectedDest = "b";
|
||||
it('will accept traces relationship definition', function () {
|
||||
const expectedSrc = 'a';
|
||||
const expectedDest = 'b';
|
||||
const expectedType = requirementDb.Relationships.TRACES;
|
||||
|
||||
let lines = [
|
||||
`requirementDiagram`,
|
||||
``,
|
||||
`${expectedSrc} - ${expectedType} -> ${expectedDest}`,
|
||||
];
|
||||
let doc = lines.join("\n");
|
||||
let lines = [`requirementDiagram`, ``, `${expectedSrc} - ${expectedType} -> ${expectedDest}`];
|
||||
let doc = lines.join('\n');
|
||||
|
||||
reqDiagram.parser.parse(doc);
|
||||
|
||||
let foundRelationship = requirementDb.getRelationships()[0];
|
||||
expect(foundRelationship.type).toBe(expectedType);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
@@ -1,10 +1,14 @@
|
||||
/* eslint-env jasmine */
|
||||
import { parser } from './parser/sequenceDiagram';
|
||||
import sequenceDb from './sequenceDb';
|
||||
import * as configApi from '../../config';
|
||||
import renderer from './sequenceRenderer';
|
||||
import mermaidAPI from '../../mermaidAPI';
|
||||
|
||||
/**
|
||||
* @param conf
|
||||
* @param key
|
||||
* @param value
|
||||
*/
|
||||
function addConf(conf, key, value) {
|
||||
if (value !== undefined) {
|
||||
conf[key] = value;
|
||||
@@ -12,12 +16,12 @@ function addConf(conf, key, value) {
|
||||
return conf;
|
||||
}
|
||||
|
||||
describe('when parsing a sequenceDiagram', function() {
|
||||
beforeEach(function() {
|
||||
describe('when parsing a sequenceDiagram', function () {
|
||||
beforeEach(function () {
|
||||
parser.yy = sequenceDb;
|
||||
parser.yy.clear();
|
||||
});
|
||||
it('it should handle a sequenceDiagram definition', function() {
|
||||
it('it should handle a sequenceDiagram definition', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob:Hello Bob, how are you?
|
||||
@@ -35,7 +39,7 @@ Bob-->Alice: I am good thanks!`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[2].from).toBe('Bob');
|
||||
});
|
||||
it('it should not show sequence numbers per default', function() {
|
||||
it('it should not show sequence numbers per default', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob:Hello Bob, how are you?
|
||||
@@ -45,7 +49,7 @@ Bob-->Alice: I am good thanks!`;
|
||||
mermaidAPI.parse(str);
|
||||
expect(parser.yy.showSequenceNumbers()).toBe(false);
|
||||
});
|
||||
it('it should show sequence numbers when autonumber is enabled', function() {
|
||||
it('it should show sequence numbers when autonumber is enabled', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
autonumber
|
||||
@@ -56,7 +60,7 @@ Bob-->Alice: I am good thanks!`;
|
||||
mermaidAPI.parse(str);
|
||||
expect(parser.yy.showSequenceNumbers()).toBe(true);
|
||||
});
|
||||
it('it should handle a sequenceDiagram definition with a title', function() {
|
||||
it('it should handle a sequenceDiagram definition with a title', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
title: Diagram Title
|
||||
@@ -77,7 +81,7 @@ Bob-->Alice: I am good thanks!`;
|
||||
expect(messages[2].from).toBe('Bob');
|
||||
expect(title).toBe('Diagram Title');
|
||||
});
|
||||
it('it should space in actor names', function() {
|
||||
it('it should space in actor names', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob:Hello Bob, how are - you?
|
||||
@@ -94,7 +98,7 @@ Bob-->Alice: I am good thanks!`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[1].from).toBe('Bob');
|
||||
});
|
||||
it('it should handle dashes in actor names', function() {
|
||||
it('it should handle dashes in actor names', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice-in-Wonderland->Bob:Hello Bob, how are - you?
|
||||
@@ -102,7 +106,7 @@ Bob-->Alice-in-Wonderland:I am good thanks!`;
|
||||
|
||||
mermaidAPI.parse(str);
|
||||
const actors = parser.yy.getActors();
|
||||
expect(actors["Alice-in-Wonderland"].description).toBe('Alice-in-Wonderland');
|
||||
expect(actors['Alice-in-Wonderland'].description).toBe('Alice-in-Wonderland');
|
||||
actors.Bob.description = 'Bob';
|
||||
|
||||
const messages = parser.yy.getMessages();
|
||||
@@ -111,7 +115,7 @@ Bob-->Alice-in-Wonderland:I am good thanks!`;
|
||||
expect(messages[0].from).toBe('Alice-in-Wonderland');
|
||||
expect(messages[1].from).toBe('Bob');
|
||||
});
|
||||
it('it should alias participants', function() {
|
||||
it('it should alias participants', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant A as Alice
|
||||
@@ -132,7 +136,7 @@ B-->A: I am good thanks!`;
|
||||
expect(messages[0].from).toBe('A');
|
||||
expect(messages[1].from).toBe('B');
|
||||
});
|
||||
it('it should alias a mix of actors and participants apa12', function() {
|
||||
it('it should alias a mix of actors and participants apa12', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
actor Alice as Alice2
|
||||
@@ -160,7 +164,7 @@ sequenceDiagram
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[4].to).toBe('Joan');
|
||||
});
|
||||
it('it should alias actors apa13', function() {
|
||||
it('it should alias actors apa13', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
actor A as Alice
|
||||
@@ -180,7 +184,7 @@ B-->A: I am good thanks!`;
|
||||
expect(messages[0].from).toBe('A');
|
||||
expect(messages[1].from).toBe('B');
|
||||
});
|
||||
it('it should handle in async messages', function() {
|
||||
it('it should handle in async messages', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice-xBob:Hello Bob, how are you?`;
|
||||
@@ -195,7 +199,7 @@ Alice-xBob:Hello Bob, how are you?`;
|
||||
expect(messages.length).toBe(1);
|
||||
expect(messages[0].type).toBe(parser.yy.LINETYPE.SOLID_CROSS);
|
||||
});
|
||||
it('it should handle in async dotted messages', function() {
|
||||
it('it should handle in async dotted messages', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice--xBob:Hello Bob, how are you?`;
|
||||
@@ -210,7 +214,7 @@ Alice--xBob:Hello Bob, how are you?`;
|
||||
expect(messages.length).toBe(1);
|
||||
expect(messages[0].type).toBe(parser.yy.LINETYPE.DOTTED_CROSS);
|
||||
});
|
||||
it('it should handle in sync messages', function() {
|
||||
it('it should handle in sync messages', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice-)Bob:Hello Bob, how are you?`;
|
||||
@@ -225,7 +229,7 @@ Alice-)Bob:Hello Bob, how are you?`;
|
||||
expect(messages.length).toBe(1);
|
||||
expect(messages[0].type).toBe(parser.yy.LINETYPE.SOLID_POINT);
|
||||
});
|
||||
it('it should handle in sync dotted messages', function() {
|
||||
it('it should handle in sync dotted messages', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice--)Bob:Hello Bob, how are you?`;
|
||||
@@ -240,7 +244,7 @@ Alice--)Bob:Hello Bob, how are you?`;
|
||||
expect(messages.length).toBe(1);
|
||||
expect(messages[0].type).toBe(parser.yy.LINETYPE.DOTTED_POINT);
|
||||
});
|
||||
it('it should handle in arrow messages', function() {
|
||||
it('it should handle in arrow messages', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->>Bob:Hello Bob, how are you?`;
|
||||
@@ -255,7 +259,7 @@ Alice->>Bob:Hello Bob, how are you?`;
|
||||
expect(messages.length).toBe(1);
|
||||
expect(messages[0].type).toBe(parser.yy.LINETYPE.SOLID);
|
||||
});
|
||||
it('it should handle in arrow messages', function() {
|
||||
it('it should handle in arrow messages', function () {
|
||||
const str = 'sequenceDiagram\n' + 'Alice-->>Bob:Hello Bob, how are you?';
|
||||
|
||||
mermaidAPI.parse(str);
|
||||
@@ -268,7 +272,7 @@ Alice->>Bob:Hello Bob, how are you?`;
|
||||
expect(messages.length).toBe(1);
|
||||
expect(messages[0].type).toBe(parser.yy.LINETYPE.DOTTED);
|
||||
});
|
||||
it('it should handle actor activation', function() {
|
||||
it('it should handle actor activation', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice-->>Bob:Hello Bob, how are you?
|
||||
@@ -291,7 +295,7 @@ deactivate Bob`;
|
||||
expect(messages[3].type).toBe(parser.yy.LINETYPE.ACTIVE_END);
|
||||
expect(messages[3].from.actor).toBe('Bob');
|
||||
});
|
||||
it('it should handle actor one line notation activation', function() {
|
||||
it('it should handle actor one line notation activation', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice-->>+Bob:Hello Bob, how are you?
|
||||
@@ -312,7 +316,7 @@ deactivate Bob`;
|
||||
expect(messages[3].type).toBe(parser.yy.LINETYPE.ACTIVE_END);
|
||||
expect(messages[3].from.actor).toBe('Bob');
|
||||
});
|
||||
it('it should handle stacked activations', function() {
|
||||
it('it should handle stacked activations', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice-->>+Bob:Hello Bob, how are you?
|
||||
@@ -339,7 +343,7 @@ deactivate Bob`;
|
||||
expect(messages[7].type).toBe(parser.yy.LINETYPE.ACTIVE_END);
|
||||
expect(messages[7].from.actor).toBe('Carol');
|
||||
});
|
||||
it('it should handle fail parsing when activating an inactive participant', function() {
|
||||
it('it should handle fail parsing when activating an inactive participant', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant user as End User
|
||||
@@ -366,7 +370,7 @@ deactivate Bob`;
|
||||
expect(error).toBe(true);
|
||||
});
|
||||
|
||||
it('it should handle comments in a sequenceDiagram', function() {
|
||||
it('it should handle comments in a sequenceDiagram', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -385,7 +389,7 @@ deactivate Bob`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[2].from).toBe('Bob');
|
||||
});
|
||||
it('it should handle new lines in a sequenceDiagram', function() {
|
||||
it('it should handle new lines in a sequenceDiagram', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -406,7 +410,7 @@ deactivate Bob`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[2].from).toBe('Bob');
|
||||
});
|
||||
it('it should handle semicolons', function() {
|
||||
it('it should handle semicolons', function () {
|
||||
const str = `
|
||||
sequenceDiagram;Alice->Bob: Hello Bob, how are you?;Note right of Bob: Bob thinks;Bob-->Alice: I am good thanks!;`;
|
||||
|
||||
@@ -421,7 +425,7 @@ sequenceDiagram;Alice->Bob: Hello Bob, how are you?;Note right of Bob: Bob think
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[2].from).toBe('Bob');
|
||||
});
|
||||
it('it should handle one leading space in lines in a sequenceDiagram', function() {
|
||||
it('it should handle one leading space in lines in a sequenceDiagram', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -441,7 +445,7 @@ Bob-->Alice: I am good thanks!`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[2].from).toBe('Bob');
|
||||
});
|
||||
it('it should handle several leading spaces in lines in a sequenceDiagram', function() {
|
||||
it('it should handle several leading spaces in lines in a sequenceDiagram', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -461,7 +465,7 @@ Bob-->Alice: I am good thanks!`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[2].from).toBe('Bob');
|
||||
});
|
||||
it('it should handle several leading spaces in lines in a sequenceDiagram', function() {
|
||||
it('it should handle several leading spaces in lines in a sequenceDiagram', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant Alice
|
||||
@@ -486,7 +490,7 @@ Bob-->John: Jolly good!`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[2].from).toBe('John');
|
||||
});
|
||||
it('it should handle different line breaks', function() {
|
||||
it('it should handle different line breaks', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant 1 as multiline<br>text
|
||||
@@ -559,7 +563,7 @@ note right of 1:nowrap: multiline<br/>text
|
||||
expect(messages[5].wrap).toBe(false);
|
||||
expect(messages[6].wrap).toBe(false);
|
||||
expect(messages[7].wrap).toBe(false);
|
||||
})
|
||||
});
|
||||
it('it should handle notes and messages with wrap specified', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
@@ -584,7 +588,7 @@ note right of 3:wrap: multiline<br/>text
|
||||
expect(messages[1].wrap).toBe(true);
|
||||
expect(messages[2].wrap).toBe(true);
|
||||
expect(messages[3].wrap).toBe(true);
|
||||
})
|
||||
});
|
||||
it('it should handle notes and messages with nowrap or line breaks', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
@@ -601,8 +605,8 @@ note right of 2: single-line text
|
||||
expect(messages[1].message).toBe('single-line text');
|
||||
expect(messages[0].wrap).toBe(false);
|
||||
expect(messages[1].wrap).toBe(false);
|
||||
})
|
||||
it('it should handle notes over a single actor', function() {
|
||||
});
|
||||
it('it should handle notes over a single actor', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -615,7 +619,7 @@ Note over Bob: Bob thinks
|
||||
expect(messages[1].from).toBe('Bob');
|
||||
expect(messages[1].to).toBe('Bob');
|
||||
});
|
||||
it('it should handle notes over multiple actors', function() {
|
||||
it('it should handle notes over multiple actors', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -631,7 +635,7 @@ Note over Bob,Alice: resolution
|
||||
expect(messages[2].from).toBe('Bob');
|
||||
expect(messages[2].to).toBe('Alice');
|
||||
});
|
||||
it('it should handle loop statements', function() {
|
||||
it('it should handle loop statements', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -654,7 +658,7 @@ end`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[1].from).toBe('Bob');
|
||||
});
|
||||
it('it should add a rect around sequence', function() {
|
||||
it('it should add a rect around sequence', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -678,7 +682,7 @@ end`;
|
||||
expect(messages[4].type).toEqual(parser.yy.LINETYPE.RECT_END);
|
||||
});
|
||||
|
||||
it('it should allow for nested rects', function() {
|
||||
it('it should allow for nested rects', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -705,7 +709,7 @@ end`;
|
||||
expect(messages[5].type).toEqual(parser.yy.LINETYPE.DOTTED_OPEN);
|
||||
expect(messages[6].type).toEqual(parser.yy.LINETYPE.RECT_END);
|
||||
});
|
||||
it('it should handle opt statements', function() {
|
||||
it('it should handle opt statements', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -728,7 +732,7 @@ end`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[1].from).toBe('Bob');
|
||||
});
|
||||
it('it should handle alt statements', function() {
|
||||
it('it should handle alt statements', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -754,7 +758,7 @@ end`;
|
||||
expect(messages[0].from).toBe('Alice');
|
||||
expect(messages[1].from).toBe('Bob');
|
||||
});
|
||||
it('it should handle alt statements with multiple elses', function() {
|
||||
it('it should handle alt statements with multiple elses', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -781,7 +785,7 @@ end`;
|
||||
expect(messages[7].from).toBe('Bob');
|
||||
expect(messages[8].type).toBe(parser.yy.LINETYPE.ALT_END);
|
||||
});
|
||||
it('it should handle par statements a sequenceDiagram', function() {
|
||||
it('it should handle par statements a sequenceDiagram', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
par Parallel one
|
||||
@@ -808,7 +812,7 @@ end`;
|
||||
expect(messages[1].from).toBe('Alice');
|
||||
expect(messages[2].from).toBe('Bob');
|
||||
});
|
||||
it('it should handle special characters in signals', function() {
|
||||
it('it should handle special characters in signals', function () {
|
||||
const str = 'sequenceDiagram\n' + 'Alice->Bob: -:<>,;# comment';
|
||||
|
||||
mermaidAPI.parse(str);
|
||||
@@ -816,7 +820,7 @@ end`;
|
||||
const messages = parser.yy.getMessages();
|
||||
expect(messages[0].message).toBe('-:<>,');
|
||||
});
|
||||
it('it should handle special characters in notes', function() {
|
||||
it('it should handle special characters in notes', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -827,7 +831,7 @@ Note right of Bob: -:<>,;# comment`;
|
||||
const messages = parser.yy.getMessages();
|
||||
expect(messages[1].message).toBe('-:<>,');
|
||||
});
|
||||
it('it should handle special characters in loop', function() {
|
||||
it('it should handle special characters in loop', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -840,7 +844,7 @@ end`;
|
||||
const messages = parser.yy.getMessages();
|
||||
expect(messages[1].message).toBe('-:<>,');
|
||||
});
|
||||
it('it should handle special characters in opt', function() {
|
||||
it('it should handle special characters in opt', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -853,7 +857,7 @@ end`;
|
||||
const messages = parser.yy.getMessages();
|
||||
expect(messages[1].message).toBe('-:<>,');
|
||||
});
|
||||
it('it should handle special characters in alt', function() {
|
||||
it('it should handle special characters in alt', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -869,7 +873,7 @@ end`;
|
||||
expect(messages[1].message).toBe('-:<>,');
|
||||
expect(messages[3].message).toBe(',<>:-');
|
||||
});
|
||||
it('it should handle special characters in par', function() {
|
||||
it('it should handle special characters in par', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -885,7 +889,7 @@ end`;
|
||||
expect(messages[1].message).toBe('-:<>,');
|
||||
expect(messages[3].message).toBe(',<>:-');
|
||||
});
|
||||
it('it should handle no-label loop', function() {
|
||||
it('it should handle no-label loop', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -899,7 +903,7 @@ end`;
|
||||
expect(messages[1].message).toBe('');
|
||||
expect(messages[2].message).toBe('I am good thanks!');
|
||||
});
|
||||
it('it should handle no-label opt', function() {
|
||||
it('it should handle no-label opt', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -913,7 +917,7 @@ end`;
|
||||
expect(messages[1].message).toBe('');
|
||||
expect(messages[2].message).toBe('I am good thanks!');
|
||||
});
|
||||
it('it should handle no-label alt', function() {
|
||||
it('it should handle no-label alt', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -930,7 +934,7 @@ end`;
|
||||
expect(messages[3].message).toBe('');
|
||||
expect(messages[4].message).toBe('I am good thanks!');
|
||||
});
|
||||
it('it should handle no-label par', function() {
|
||||
it('it should handle no-label par', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -965,19 +969,19 @@ link a: Tests @ https://tests.contoso.com/?svc=alice@contoso.com
|
||||
|
||||
mermaidAPI.parse(str);
|
||||
const actors = parser.yy.getActors();
|
||||
expect(actors.a.links["Repo"]).toBe("https://repo.contoso.com/");
|
||||
expect(actors.b.links["Repo"]).toBe(undefined);
|
||||
expect(actors.a.links["Dashboard"]).toBe("https://dashboard.contoso.com/");
|
||||
expect(actors.b.links["Dashboard"]).toBe("https://dashboard.contoso.com/");
|
||||
expect(actors.a.links["On-Call"]).toBe("https://oncall.contoso.com/?svc=alice");
|
||||
expect(actors.c.links["Dashboard"]).toBe(undefined);
|
||||
expect(actors.a.links["Endpoint"]).toBe("https://alice.contoso.com");
|
||||
expect(actors.a.links["Swagger"]).toBe("https://swagger.contoso.com");
|
||||
expect(actors.a.links["Tests"]).toBe("https://tests.contoso.com/?svc=alice@contoso.com");
|
||||
expect(actors.a.links['Repo']).toBe('https://repo.contoso.com/');
|
||||
expect(actors.b.links['Repo']).toBe(undefined);
|
||||
expect(actors.a.links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.b.links['Dashboard']).toBe('https://dashboard.contoso.com/');
|
||||
expect(actors.a.links['On-Call']).toBe('https://oncall.contoso.com/?svc=alice');
|
||||
expect(actors.c.links['Dashboard']).toBe(undefined);
|
||||
expect(actors.a.links['Endpoint']).toBe('https://alice.contoso.com');
|
||||
expect(actors.a.links['Swagger']).toBe('https://swagger.contoso.com');
|
||||
expect(actors.a.links['Tests']).toBe('https://tests.contoso.com/?svc=alice@contoso.com');
|
||||
});
|
||||
|
||||
it('it should handle properties EXPERIMENTAL: USE WITH CAUTION', function () {
|
||||
//Be aware that the syntax for "properties" is likely to be changed.
|
||||
//Be aware that the syntax for "properties" is likely to be changed.
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant a as Alice
|
||||
@@ -990,16 +994,15 @@ properties b: {"class": "external-service-actor", "icon": "@computer"}
|
||||
|
||||
mermaidAPI.parse(str);
|
||||
const actors = parser.yy.getActors();
|
||||
expect(actors.a.properties["class"]).toBe("internal-service-actor");
|
||||
expect(actors.b.properties["class"]).toBe("external-service-actor");
|
||||
expect(actors.a.properties["icon"]).toBe("@clock");
|
||||
expect(actors.b.properties["icon"]).toBe("@computer");
|
||||
expect(actors.c.properties["class"]).toBe(undefined);
|
||||
expect(actors.a.properties['class']).toBe('internal-service-actor');
|
||||
expect(actors.b.properties['class']).toBe('external-service-actor');
|
||||
expect(actors.a.properties['icon']).toBe('@clock');
|
||||
expect(actors.b.properties['icon']).toBe('@computer');
|
||||
expect(actors.c.properties['class']).toBe(undefined);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('when checking the bounds in a sequenceDiagram', function() {
|
||||
describe('when checking the bounds in a sequenceDiagram', function () {
|
||||
beforeAll(() => {
|
||||
let conf = {
|
||||
diagramMarginX: 50,
|
||||
@@ -1011,22 +1014,21 @@ describe('when checking the bounds in a sequenceDiagram', function() {
|
||||
boxMargin: 10,
|
||||
messageMargin: 40,
|
||||
boxTextMargin: 15,
|
||||
noteMargin: 25
|
||||
noteMargin: 25,
|
||||
};
|
||||
|
||||
mermaidAPI.initialize({ sequence: conf });
|
||||
});
|
||||
|
||||
let conf;
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
mermaidAPI.reset();
|
||||
parser.yy = sequenceDb;
|
||||
parser.yy.clear();
|
||||
renderer.bounds.init();
|
||||
conf = parser.yy.getConfig();
|
||||
});
|
||||
it('it should handle a simple bound call', function() {
|
||||
|
||||
it('it should handle a simple bound call', function () {
|
||||
renderer.bounds.insert(100, 100, 200, 200);
|
||||
|
||||
const { bounds } = renderer.bounds.getBounds();
|
||||
@@ -1035,8 +1037,7 @@ describe('when checking the bounds in a sequenceDiagram', function() {
|
||||
expect(bounds.stopx).toBe(200);
|
||||
expect(bounds.stopy).toBe(200);
|
||||
});
|
||||
it('it should handle an expanding bound', function() {
|
||||
|
||||
it('it should handle an expanding bound', function () {
|
||||
renderer.bounds.insert(100, 100, 200, 200);
|
||||
renderer.bounds.insert(25, 50, 300, 400);
|
||||
|
||||
@@ -1046,8 +1047,7 @@ describe('when checking the bounds in a sequenceDiagram', function() {
|
||||
expect(bounds.stopx).toBe(300);
|
||||
expect(bounds.stopy).toBe(400);
|
||||
});
|
||||
it('it should handle inserts within the bound without changing the outer bounds', function() {
|
||||
|
||||
it('it should handle inserts within the bound without changing the outer bounds', function () {
|
||||
renderer.bounds.insert(100, 100, 200, 200);
|
||||
renderer.bounds.insert(25, 50, 300, 400);
|
||||
renderer.bounds.insert(125, 150, 150, 200);
|
||||
@@ -1058,8 +1058,7 @@ describe('when checking the bounds in a sequenceDiagram', function() {
|
||||
expect(bounds.stopx).toBe(300);
|
||||
expect(bounds.stopy).toBe(400);
|
||||
});
|
||||
it('it should handle a loop without expanding the area', function() {
|
||||
|
||||
it('it should handle a loop without expanding the area', function () {
|
||||
renderer.bounds.insert(25, 50, 300, 400);
|
||||
renderer.bounds.verticalPos = 150;
|
||||
renderer.bounds.newLoop();
|
||||
@@ -1080,8 +1079,7 @@ describe('when checking the bounds in a sequenceDiagram', function() {
|
||||
expect(bounds.stopx).toBe(300);
|
||||
expect(bounds.stopy).toBe(400);
|
||||
});
|
||||
it('it should handle multiple loops withtout expanding the bounds', function() {
|
||||
|
||||
it('it should handle multiple loops withtout expanding the bounds', function () {
|
||||
renderer.bounds.insert(100, 100, 1000, 1000);
|
||||
renderer.bounds.verticalPos = 200;
|
||||
renderer.bounds.newLoop();
|
||||
@@ -1112,8 +1110,7 @@ describe('when checking the bounds in a sequenceDiagram', function() {
|
||||
expect(bounds.stopx).toBe(1000);
|
||||
expect(bounds.stopy).toBe(1000);
|
||||
});
|
||||
it('it should handle a loop that expands the area', function() {
|
||||
|
||||
it('it should handle a loop that expands the area', function () {
|
||||
renderer.bounds.insert(100, 100, 200, 200);
|
||||
renderer.bounds.verticalPos = 200;
|
||||
renderer.bounds.newLoop();
|
||||
@@ -1136,7 +1133,7 @@ describe('when checking the bounds in a sequenceDiagram', function() {
|
||||
});
|
||||
});
|
||||
|
||||
describe('when rendering a sequenceDiagram APA', function() {
|
||||
describe('when rendering a sequenceDiagram APA', function () {
|
||||
beforeAll(() => {
|
||||
let conf = {
|
||||
diagramMarginX: 50,
|
||||
@@ -1150,12 +1147,12 @@ describe('when rendering a sequenceDiagram APA', function() {
|
||||
boxTextMargin: 15,
|
||||
noteMargin: 25,
|
||||
wrap: false,
|
||||
mirrorActors: false
|
||||
mirrorActors: false,
|
||||
};
|
||||
configApi.setSiteConfig({ logLevel: 5, sequence: conf });
|
||||
});
|
||||
let conf;
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
mermaidAPI.reset();
|
||||
conf = {
|
||||
diagramMarginX: 50,
|
||||
@@ -1169,21 +1166,21 @@ describe('when rendering a sequenceDiagram APA', function() {
|
||||
boxTextMargin: 15,
|
||||
noteMargin: 25,
|
||||
wrap: false,
|
||||
mirrorActors: false
|
||||
mirrorActors: false,
|
||||
};
|
||||
configApi.setSiteConfig({ logLevel: 5, sequence: conf });
|
||||
parser.yy = sequenceDb;
|
||||
parser.yy.clear();
|
||||
// conf = parser.yy.getConfig();
|
||||
});
|
||||
['tspan', 'fo', 'old', undefined].forEach(function(textPlacement) {
|
||||
['tspan', 'fo', 'old', undefined].forEach(function (textPlacement) {
|
||||
it(`
|
||||
it should handle one actor, when textPlacement is ${textPlacement}`, function() {
|
||||
it should handle one actor, when textPlacement is ${textPlacement}`, function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant Alice`;
|
||||
|
||||
mermaidAPI.reinitialize({sequence: { textPlacement: textPlacement}});
|
||||
mermaidAPI.reinitialize({ sequence: { textPlacement: textPlacement } });
|
||||
mermaidAPI.parse(str);
|
||||
// renderer.setConf(mermaidAPI.getConfig().sequence);
|
||||
renderer.draw(str, 'tst');
|
||||
@@ -1195,7 +1192,7 @@ participant Alice`;
|
||||
expect(bounds.stopy).toBe(conf.height);
|
||||
});
|
||||
});
|
||||
it('it should handle same actor with different whitespace properly', function() {
|
||||
it('it should handle same actor with different whitespace properly', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant Alice
|
||||
@@ -1208,7 +1205,7 @@ participant Alice
|
||||
const actors = parser.yy.getActors();
|
||||
expect(Object.keys(actors)).toEqual(['Alice']);
|
||||
});
|
||||
it('it should handle one actor and a centered note', function() {
|
||||
it('it should handle one actor and a centered note', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant Alice
|
||||
@@ -1226,7 +1223,7 @@ Note over Alice: Alice thinks
|
||||
// 10 comes from mock of text height
|
||||
expect(bounds.stopy).toBe(models.lastNote().stopy);
|
||||
});
|
||||
it('it should handle one actor and a note to the left', function() {
|
||||
it('it should handle one actor and a note to the left', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant Alice
|
||||
@@ -1242,7 +1239,7 @@ Note left of Alice: Alice thinks`;
|
||||
// 10 comes from mock of text height
|
||||
expect(bounds.stopy).toBe(models.lastNote().stopy);
|
||||
});
|
||||
it('it should handle one actor and a note to the right', function() {
|
||||
it('it should handle one actor and a note to the right', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
participant Alice
|
||||
@@ -1258,7 +1255,7 @@ Note right of Alice: Alice thinks`;
|
||||
// 10 comes from mock of text height
|
||||
expect(bounds.stopy).toBe(models.lastNote().stopy);
|
||||
});
|
||||
it('it should handle two actors', function() {
|
||||
it('it should handle two actors', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?`;
|
||||
@@ -1272,7 +1269,7 @@ Alice->Bob: Hello Bob, how are you?`;
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
});
|
||||
it('it should handle two actors with init directive', function() {
|
||||
it('it should handle two actors with init directive', function () {
|
||||
const str = `
|
||||
%%{init: {'logLevel': 0}}%%
|
||||
sequenceDiagram
|
||||
@@ -1289,7 +1286,7 @@ Alice->Bob: Hello Bob, how are you?`;
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
});
|
||||
it('it should handle two actors with init directive with multiline directive', function() {
|
||||
it('it should handle two actors with init directive with multiline directive', function () {
|
||||
const str = `
|
||||
%%{init: { 'logLevel': 0}}%%
|
||||
sequenceDiagram
|
||||
@@ -1309,10 +1306,9 @@ Alice->Bob: Hello Bob, how are you?`;
|
||||
expect(bounds.starty).toBe(0);
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
expect(msgs.every(v => v.wrap)).toBe(true);
|
||||
|
||||
expect(msgs.every((v) => v.wrap)).toBe(true);
|
||||
});
|
||||
it('it should handle two actors and two centered shared notes', function() {
|
||||
it('it should handle two actors and two centered shared notes', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -1329,7 +1325,7 @@ Note over Bob,Alice: Looks back
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastNote().stopy);
|
||||
});
|
||||
it('it should draw two actors and two messages', function() {
|
||||
it('it should draw two actors and two messages', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -1344,7 +1340,7 @@ Bob->Alice: Fine!`;
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
});
|
||||
it('it should draw two actors notes to the right', function() {
|
||||
it('it should draw two actors notes to the right', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -1363,7 +1359,7 @@ Bob->Alice: Fine!`;
|
||||
expect(bounds.stopx).toBe(expStopX);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
});
|
||||
it('it should draw two actors notes to the left', function() {
|
||||
it('it should draw two actors notes to the left', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -1380,7 +1376,7 @@ Bob->Alice: Fine!`;
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
});
|
||||
it('it should draw two actors notes to the left with text wrapped (inline)', function() {
|
||||
it('it should draw two actors notes to the left with text wrapped (inline)', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->>Bob:wrap: Hello Bob, how are you? If you are not available right now, I can leave you a message. Please get back to me as soon as you can!
|
||||
@@ -1399,7 +1395,7 @@ Bob->>Alice: Fine!`;
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
});
|
||||
it('it should draw two actors notes to the left with text wrapped (directive)', function() {
|
||||
it('it should draw two actors notes to the left with text wrapped (directive)', function () {
|
||||
const str = `
|
||||
%%{init: { 'theme': 'dark' } }%%
|
||||
sequenceDiagram
|
||||
@@ -1417,12 +1413,12 @@ Bob->>Alice: Fine!`;
|
||||
expect(bounds.startx).toBe(-(conf.width / 2) - conf.actorMargin / 2);
|
||||
expect(bounds.starty).toBe(0);
|
||||
expect(mermaid.theme).toBe('dark');
|
||||
expect(msgs.every(v => v.wrap)).toBe(true);
|
||||
expect(msgs.every((v) => v.wrap)).toBe(true);
|
||||
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
});
|
||||
it('it should draw two actors notes to the left with text wrapped and the init directive sets the theme to dark', function() {
|
||||
it('it should draw two actors notes to the left with text wrapped and the init directive sets the theme to dark', function () {
|
||||
const str = `
|
||||
%%{init:{'theme':'dark'}}%%
|
||||
sequenceDiagram
|
||||
@@ -1440,12 +1436,12 @@ Bob->>Alice: Fine!`;
|
||||
expect(bounds.startx).toBe(-(conf.width / 2) - conf.actorMargin / 2);
|
||||
expect(bounds.starty).toBe(0);
|
||||
expect(mermaid.theme).toBe('dark');
|
||||
expect(msgs.every(v => v.wrap)).toBe(true);
|
||||
expect(msgs.every((v) => v.wrap)).toBe(true);
|
||||
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
});
|
||||
it('it should draw two actors, notes to the left with text wrapped and the init directive sets the theme to dark and fontFamily to Menlo, fontSize to 18, and fontWeight to 800', function() {
|
||||
it('it should draw two actors, notes to the left with text wrapped and the init directive sets the theme to dark and fontFamily to Menlo, fontSize to 18, and fontWeight to 800', function () {
|
||||
const str = `
|
||||
%%{init: { "theme": "dark", 'config': { "fontFamily": "Menlo", "fontSize": 18, "messageFontWeight": 400, "wrap": true }}}%%
|
||||
sequenceDiagram
|
||||
@@ -1464,12 +1460,12 @@ Bob->>Alice: Fine!`;
|
||||
expect(mermaid.sequence.fontFamily).toBe('Menlo');
|
||||
expect(mermaid.sequence.fontSize).toBe(18);
|
||||
expect(mermaid.sequence.messageFontWeight).toBe(400);
|
||||
expect(msgs.every(v => v.wrap)).toBe(true);
|
||||
expect(msgs.every((v) => v.wrap)).toBe(true);
|
||||
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastMessage().stopy + 10);
|
||||
});
|
||||
it('it should draw two loops', function() {
|
||||
it('it should draw two loops', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, how are you?
|
||||
@@ -1486,7 +1482,7 @@ end`;
|
||||
expect(bounds.stopx).toBe(conf.width * 2 + conf.actorMargin);
|
||||
expect(bounds.stopy).toBe(models.lastLoop().stopy);
|
||||
});
|
||||
it('it should draw background rect', function() {
|
||||
it('it should draw background rect', function () {
|
||||
const str = `
|
||||
sequenceDiagram
|
||||
Alice->Bob: Hello Bob, are you alright?
|
||||
@@ -1505,8 +1501,7 @@ end`;
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe('when rendering a sequenceDiagram with actor mirror activated', function() {
|
||||
describe('when rendering a sequenceDiagram with actor mirror activated', function () {
|
||||
beforeAll(() => {
|
||||
let conf = {
|
||||
diagramMarginX: 50,
|
||||
@@ -1522,22 +1517,22 @@ describe('when rendering a sequenceDiagram with actor mirror activated', functio
|
||||
mirrorActors: true,
|
||||
// Depending on css styling this might need adjustment
|
||||
// Prolongs the edge of the diagram downwards
|
||||
bottomMarginAdj: 1
|
||||
bottomMarginAdj: 1,
|
||||
};
|
||||
|
||||
mermaidAPI.initialize({ sequence: conf });
|
||||
});
|
||||
|
||||
let conf;
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
mermaidAPI.reset();
|
||||
parser.yy = sequenceDb;
|
||||
parser.yy.clear();
|
||||
conf = parser.yy.getConfig();
|
||||
renderer.bounds.init();
|
||||
});
|
||||
['tspan', 'fo', 'old', undefined].forEach(function(textPlacement) {
|
||||
it('it should handle one actor, when textPlacement is' + textPlacement, function() {
|
||||
['tspan', 'fo', 'old', undefined].forEach(function (textPlacement) {
|
||||
it('it should handle one actor, when textPlacement is' + textPlacement, function () {
|
||||
mermaidAPI.initialize(addConf(conf, 'textPlacement', textPlacement));
|
||||
renderer.bounds.init();
|
||||
const str = `
|
||||
@@ -1556,8 +1551,8 @@ participant Alice`;
|
||||
});
|
||||
});
|
||||
|
||||
describe('when rendering a sequenceDiagram with directives', function() {
|
||||
beforeAll(function() {
|
||||
describe('when rendering a sequenceDiagram with directives', function () {
|
||||
beforeAll(function () {
|
||||
let conf = {
|
||||
diagramMarginX: 50,
|
||||
diagramMarginY: 10,
|
||||
@@ -1567,13 +1562,13 @@ describe('when rendering a sequenceDiagram with directives', function() {
|
||||
boxMargin: 10,
|
||||
messageMargin: 40,
|
||||
boxTextMargin: 15,
|
||||
noteMargin: 25
|
||||
noteMargin: 25,
|
||||
};
|
||||
mermaidAPI.initialize({ sequence: conf });
|
||||
});
|
||||
|
||||
let conf;
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
mermaidAPI.reset();
|
||||
parser.yy = sequenceDb;
|
||||
parser.yy.clear();
|
||||
@@ -1581,7 +1576,7 @@ describe('when rendering a sequenceDiagram with directives', function() {
|
||||
renderer.bounds.init();
|
||||
});
|
||||
|
||||
it('it should handle one actor, when theme is dark and logLevel is 1 DX1', function() {
|
||||
it('it should handle one actor, when theme is dark and logLevel is 1 DX1', function () {
|
||||
const str = `
|
||||
%%{init: { "theme": "dark", "logLevel": 1 } }%%
|
||||
sequenceDiagram
|
||||
@@ -1600,9 +1595,11 @@ participant Alice
|
||||
expect(bounds.startx).toBe(0);
|
||||
expect(bounds.startx).toBe(0);
|
||||
expect(bounds.starty).toBe(0);
|
||||
expect(bounds.stopy).toBe(models.lastActor().y + models.lastActor().height + mermaid.sequence.boxMargin);
|
||||
expect(bounds.stopy).toBe(
|
||||
models.lastActor().y + models.lastActor().height + mermaid.sequence.boxMargin
|
||||
);
|
||||
});
|
||||
it('it should handle one actor, when logLevel is 3', function() {
|
||||
it('it should handle one actor, when logLevel is 3', function () {
|
||||
const str = `
|
||||
%%{initialize: { "logLevel": 3 }}%%
|
||||
sequenceDiagram
|
||||
@@ -1618,6 +1615,8 @@ participant Alice
|
||||
expect(bounds.startx).toBe(0);
|
||||
expect(bounds.startx).toBe(0);
|
||||
expect(bounds.starty).toBe(0);
|
||||
expect(bounds.stopy).toBe(models.lastActor().y + models.lastActor().height + mermaid.sequence.boxMargin);
|
||||
expect(bounds.stopy).toBe(
|
||||
models.lastActor().y + models.lastActor().height + mermaid.sequence.boxMargin
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@@ -1,10 +1,9 @@
|
||||
/* eslint-env jasmine */
|
||||
const svgDraw = require('./svgDraw');
|
||||
const { MockD3 } = require('d3');
|
||||
|
||||
describe('svgDraw', function() {
|
||||
describe('drawRect', function() {
|
||||
it('it should append a rectangle', function() {
|
||||
describe('svgDraw', function () {
|
||||
describe('drawRect', function () {
|
||||
it('it should append a rectangle', function () {
|
||||
const svg = MockD3('svg');
|
||||
svgDraw.drawRect(svg, {
|
||||
x: 10,
|
||||
@@ -15,7 +14,7 @@ describe('svgDraw', function() {
|
||||
height: '20',
|
||||
rx: '10',
|
||||
ry: '10',
|
||||
class: 'unitTestRectangleClass'
|
||||
class: 'unitTestRectangleClass',
|
||||
});
|
||||
expect(svg.__children.length).toBe(1);
|
||||
const rect = svg.__children[0];
|
||||
@@ -40,7 +39,7 @@ describe('svgDraw', function() {
|
||||
width: '20',
|
||||
height: '20',
|
||||
rx: '10',
|
||||
ry: '10'
|
||||
ry: '10',
|
||||
});
|
||||
expect(svg.__children.length).toBe(1);
|
||||
const rect = svg.__children[0];
|
||||
@@ -49,8 +48,8 @@ describe('svgDraw', function() {
|
||||
expect(rect.attr).not.toHaveBeenCalledWith('class', expect.anything());
|
||||
});
|
||||
});
|
||||
describe('drawText', function() {
|
||||
it('it should append a single element', function() {
|
||||
describe('drawText', function () {
|
||||
it('it should append a single element', function () {
|
||||
const svg = MockD3('svg');
|
||||
svgDraw.drawText(svg, {
|
||||
x: 10,
|
||||
@@ -74,7 +73,7 @@ describe('svgDraw', function() {
|
||||
expect(text.style).toHaveBeenCalledWith('font-size', '10px');
|
||||
expect(text.style).toHaveBeenCalledWith('font-weight', '500');
|
||||
});
|
||||
it('it should append a multiple elements', function() {
|
||||
it('it should append a multiple elements', function () {
|
||||
const svg = MockD3('svg');
|
||||
svgDraw.drawText(svg, {
|
||||
x: 10,
|
||||
@@ -101,8 +100,8 @@ describe('svgDraw', function() {
|
||||
expect(text3.text).toHaveBeenCalledWith('fine lines');
|
||||
});
|
||||
});
|
||||
describe('drawBackgroundRect', function() {
|
||||
it('it should append a rect before the previous element within a given bound', function() {
|
||||
describe('drawBackgroundRect', function () {
|
||||
it('it should append a rect before the previous element within a given bound', function () {
|
||||
const svg = MockD3('svg');
|
||||
const boundingRect = {
|
||||
startx: 50,
|
||||
@@ -110,7 +109,7 @@ describe('svgDraw', function() {
|
||||
stopx: 150,
|
||||
stopy: 260,
|
||||
title: undefined,
|
||||
fill: '#ccc'
|
||||
fill: '#ccc',
|
||||
};
|
||||
svgDraw.drawBackgroundRect(svg, boundingRect);
|
||||
expect(svg.__children.length).toBe(1);
|
||||
|
@@ -1,14 +1,13 @@
|
||||
/* eslint-env jasmine */
|
||||
import { parser } from './parser/stateDiagram';
|
||||
import stateDb from './stateDb';
|
||||
|
||||
describe('state diagram, ', function() {
|
||||
describe('when parsing an info graph it', function() {
|
||||
beforeEach(function() {
|
||||
describe('state diagram, ', function () {
|
||||
describe('when parsing an info graph it', function () {
|
||||
beforeEach(function () {
|
||||
parser.yy = stateDb;
|
||||
});
|
||||
|
||||
it('super simple', function() {
|
||||
it('super simple', function () {
|
||||
const str = `
|
||||
stateDiagram
|
||||
[*] --> State1
|
||||
@@ -17,7 +16,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('simple', function() {
|
||||
it('simple', function () {
|
||||
const str = `stateDiagram\n
|
||||
State1 : this is another string
|
||||
[*] --> State1
|
||||
@@ -26,7 +25,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('simple with directive', function() {
|
||||
it('simple with directive', function () {
|
||||
const str = `%%{init: {'logLevel': 0 }}%%
|
||||
stateDiagram\n
|
||||
State1 : this is another string
|
||||
@@ -36,7 +35,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle relation definitions', function() {
|
||||
it('should handle relation definitions', function () {
|
||||
const str = `stateDiagram\n
|
||||
[*] --> State1
|
||||
State1 --> [*]
|
||||
@@ -49,7 +48,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('hide empty description', function() {
|
||||
it('hide empty description', function () {
|
||||
const str = `stateDiagram\n
|
||||
hide empty description
|
||||
[*] --> State1
|
||||
@@ -64,7 +63,7 @@ describe('state diagram, ', function() {
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('handle "as" in state names', function() {
|
||||
it('handle "as" in state names', function () {
|
||||
const str = `stateDiagram
|
||||
assemble
|
||||
state assemble
|
||||
@@ -72,7 +71,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('handle "as" in state names 1', function() {
|
||||
it('handle "as" in state names 1', function () {
|
||||
const str = `stateDiagram
|
||||
assemble
|
||||
state assemble
|
||||
@@ -80,7 +79,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('handle "as" in state names 2', function() {
|
||||
it('handle "as" in state names 2', function () {
|
||||
const str = `stateDiagram
|
||||
assembleas
|
||||
state assembleas
|
||||
@@ -88,7 +87,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('handle "as" in state names 3', function() {
|
||||
it('handle "as" in state names 3', function () {
|
||||
const str = `stateDiagram
|
||||
state "as" as as
|
||||
`;
|
||||
@@ -96,7 +95,7 @@ describe('state diagram, ', function() {
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('scale', function() {
|
||||
it('scale', function () {
|
||||
const str = `stateDiagram\n
|
||||
scale 350 width
|
||||
[*] --> State1
|
||||
@@ -111,7 +110,7 @@ describe('state diagram, ', function() {
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('description after second state', function() {
|
||||
it('description after second state', function () {
|
||||
const str = `stateDiagram\n
|
||||
scale 350 width
|
||||
[*] --> State1 : This is the description with - in it
|
||||
@@ -120,7 +119,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('shall handle descriptions inkluding minus signs', function() {
|
||||
it('shall handle descriptions inkluding minus signs', function () {
|
||||
const str = `stateDiagram\n
|
||||
scale 350 width
|
||||
[*] --> State1 : This is the description +-!
|
||||
@@ -129,7 +128,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle state statements', function() {
|
||||
it('should handle state statements', function () {
|
||||
const str = `stateDiagram\n
|
||||
state Configuring {
|
||||
[*] --> NewValueSelection
|
||||
@@ -141,7 +140,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle recursive state definitions', function() {
|
||||
it('should handle recursive state definitions', function () {
|
||||
const str = `stateDiagram\n
|
||||
state Configuring {
|
||||
[*] --> NewValueSelection
|
||||
@@ -157,7 +156,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle multiple recursive state definitions', function() {
|
||||
it('should handle multiple recursive state definitions', function () {
|
||||
const str = `stateDiagram\n
|
||||
scale 350 width
|
||||
[*] --> NotShooting
|
||||
@@ -182,14 +181,14 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle state deifintions with separation of id', function() {
|
||||
it('should handle state deifintions with separation of id', function () {
|
||||
const str = `stateDiagram\n
|
||||
state "Long state description" as state1
|
||||
`;
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle state deifintions with separation of id', function() {
|
||||
it('should handle state deifintions with separation of id', function () {
|
||||
const str = `stateDiagram
|
||||
state "Not Shooting State" as NotShooting {
|
||||
state "Idle mode" as Idle
|
||||
@@ -203,7 +202,7 @@ describe('state diagram, ', function() {
|
||||
parser.parse(str);
|
||||
});
|
||||
|
||||
it('should State definition with quotes', function() {
|
||||
it('should State definition with quotes', function () {
|
||||
const str = `stateDiagram\n
|
||||
scale 600 width
|
||||
|
||||
@@ -226,7 +225,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle fork statements', function() {
|
||||
it('should handle fork statements', function () {
|
||||
const str = `stateDiagram\n
|
||||
state fork_state <<fork>>
|
||||
[*] --> fork_state
|
||||
@@ -242,7 +241,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle concurrent state', function() {
|
||||
it('should handle concurrent state', function () {
|
||||
const str = `stateDiagram\n
|
||||
[*] --> Active
|
||||
|
||||
@@ -263,7 +262,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle concurrent state', function() {
|
||||
it('should handle concurrent state', function () {
|
||||
const str = `stateDiagram\n
|
||||
[*] --> Active
|
||||
|
||||
@@ -288,7 +287,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
// parser.parse(str);
|
||||
// });
|
||||
it('should handle note statements', function() {
|
||||
it('should handle note statements', function () {
|
||||
const str = `stateDiagram\n
|
||||
[*] --> Active
|
||||
Active --> Inactive
|
||||
@@ -304,7 +303,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle multiline notes with different line breaks', function() {
|
||||
it('should handle multiline notes with different line breaks', function () {
|
||||
const str = `stateDiagram
|
||||
State1
|
||||
note right of State1
|
||||
@@ -314,7 +313,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle floating notes', function() {
|
||||
it('should handle floating notes', function () {
|
||||
const str = `stateDiagram
|
||||
foo: bar
|
||||
note "This is a floating note" as N1
|
||||
@@ -322,7 +321,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle floating notes', function() {
|
||||
it('should handle floating notes', function () {
|
||||
const str = `stateDiagram\n
|
||||
state foo
|
||||
note "This is a floating note" as N1
|
||||
@@ -330,7 +329,7 @@ describe('state diagram, ', function() {
|
||||
|
||||
parser.parse(str);
|
||||
});
|
||||
it('should handle notes for composit states', function() {
|
||||
it('should handle notes for composit states', function () {
|
||||
const str = `stateDiagram\n
|
||||
[*] --> NotShooting
|
||||
|
||||
@@ -348,9 +347,8 @@ describe('state diagram, ', function() {
|
||||
parser.parse(str);
|
||||
});
|
||||
});
|
||||
describe('when parsing an ignored info graph it', function() {
|
||||
|
||||
xit('should handle if statements', function() {
|
||||
describe('when parsing an ignored info graph it', function () {
|
||||
xit('should handle if statements', function () {
|
||||
const str = `stateDiagram\n
|
||||
[*] --> "Order Submitted"
|
||||
if "Payment Accepted" then
|
||||
|
@@ -85,6 +85,11 @@ const setupNode = (g, parent, node, altFlag) => {
|
||||
);
|
||||
}
|
||||
|
||||
//
|
||||
if (nodeDb[node.id].description.length === 1 && nodeDb[node.id].shape === 'rectWithTitle') {
|
||||
nodeDb[node.id].shape = 'rect';
|
||||
}
|
||||
|
||||
// Save data for description and group so that for instance a statement without description overwrites
|
||||
// one with description
|
||||
|
||||
|
@@ -1,13 +1,12 @@
|
||||
/* eslint-env jasmine */
|
||||
import journeyDb from './journeyDb';
|
||||
|
||||
describe('when using the journeyDb', function() {
|
||||
beforeEach(function() {
|
||||
describe('when using the journeyDb', function () {
|
||||
beforeEach(function () {
|
||||
journeyDb.clear();
|
||||
});
|
||||
|
||||
describe('when calling the clear function', function() {
|
||||
beforeEach(function() {
|
||||
describe('when calling the clear function', function () {
|
||||
beforeEach(function () {
|
||||
journeyDb.addSection('weekends skip test');
|
||||
journeyDb.addTask('test1', '4: id1, id3');
|
||||
journeyDb.addTask('test2', '2: id2');
|
||||
@@ -25,8 +24,8 @@ describe('when using the journeyDb', function() {
|
||||
});
|
||||
});
|
||||
|
||||
describe('when calling the clear function', function() {
|
||||
beforeEach(function() {
|
||||
describe('when calling the clear function', function () {
|
||||
beforeEach(function () {
|
||||
journeyDb.addSection('weekends skip test');
|
||||
journeyDb.addTask('test1', '3: id1, id3');
|
||||
journeyDb.addTask('test2', '1: id2');
|
||||
@@ -43,7 +42,7 @@ describe('when using the journeyDb', function() {
|
||||
});
|
||||
});
|
||||
|
||||
describe('tasks and actors should be added', function() {
|
||||
describe('tasks and actors should be added', function () {
|
||||
journeyDb.setTitle('Shopping');
|
||||
journeyDb.addSection('Journey to the shops');
|
||||
journeyDb.addTask('Get car keys', ':5:Dad');
|
||||
@@ -59,29 +58,29 @@ describe('when using the journeyDb', function() {
|
||||
people: ['Dad'],
|
||||
section: 'Journey to the shops',
|
||||
task: 'Get car keys',
|
||||
type: 'Journey to the shops'
|
||||
type: 'Journey to the shops',
|
||||
},
|
||||
{
|
||||
score: 3,
|
||||
people: ['Dad', 'Mum', 'Child#1', 'Child#2'],
|
||||
section: 'Journey to the shops',
|
||||
task: 'Go to car',
|
||||
type: 'Journey to the shops'
|
||||
type: 'Journey to the shops',
|
||||
},
|
||||
{
|
||||
score: 4,
|
||||
people: ['Dad'],
|
||||
section: 'Journey to the shops',
|
||||
task: 'Drive to supermarket',
|
||||
type: 'Journey to the shops'
|
||||
type: 'Journey to the shops',
|
||||
},
|
||||
{
|
||||
score: 5,
|
||||
people: ['Mum'],
|
||||
section: 'Do shopping',
|
||||
task: 'Go shopping',
|
||||
type: 'Do shopping'
|
||||
}
|
||||
type: 'Do shopping',
|
||||
},
|
||||
]);
|
||||
expect(journeyDb.getActors()).toEqual(['Child#1', 'Child#2', 'Dad', 'Mum']);
|
||||
|
||||
|
@@ -1,27 +1,25 @@
|
||||
/* eslint-env jasmine */
|
||||
/* eslint-disable no-eval */
|
||||
import { parser } from './journey';
|
||||
import journeyDb from '../journeyDb';
|
||||
|
||||
const parserFnConstructor = str => {
|
||||
const parserFnConstructor = (str) => {
|
||||
return () => {
|
||||
parser.parse(str);
|
||||
};
|
||||
};
|
||||
|
||||
describe('when parsing a journey diagram it', function() {
|
||||
beforeEach(function() {
|
||||
describe('when parsing a journey diagram it', function () {
|
||||
beforeEach(function () {
|
||||
parser.yy = journeyDb;
|
||||
parser.yy.clear();
|
||||
});
|
||||
|
||||
it('should handle a title definition', function() {
|
||||
it('should handle a title definition', function () {
|
||||
const str = 'journey\ntitle Adding journey diagram functionality to mermaid';
|
||||
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle a section definition', function() {
|
||||
it('should handle a section definition', function () {
|
||||
const str =
|
||||
'journey\n' +
|
||||
'title Adding journey diagram functionality to mermaid\n' +
|
||||
@@ -29,7 +27,7 @@ describe('when parsing a journey diagram it', function() {
|
||||
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
});
|
||||
it('should handle multiline section titles with different line breaks', function() {
|
||||
it('should handle multiline section titles with different line breaks', function () {
|
||||
const str =
|
||||
'journey\n' +
|
||||
'title Adding gantt diagram functionality to mermaid\n' +
|
||||
@@ -38,7 +36,7 @@ describe('when parsing a journey diagram it', function() {
|
||||
expect(parserFnConstructor(str)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle a task definition', function() {
|
||||
it('should handle a task definition', function () {
|
||||
const str =
|
||||
'journey\n' +
|
||||
'title Adding journey diagram functionality to mermaid\n' +
|
||||
@@ -62,56 +60,56 @@ describe('when parsing a journey diagram it', function() {
|
||||
people: ['Alice', 'Bob', 'Charlie'],
|
||||
section: 'Documentation',
|
||||
task: 'A task',
|
||||
type: 'Documentation'
|
||||
type: 'Documentation',
|
||||
});
|
||||
expect(tasks[1]).toEqual({
|
||||
score: 3,
|
||||
people: ['Bob', 'Charlie'],
|
||||
section: 'Documentation',
|
||||
type: 'Documentation',
|
||||
task: 'B task'
|
||||
task: 'B task',
|
||||
});
|
||||
expect(tasks[2]).toEqual({
|
||||
score: 5,
|
||||
people: [],
|
||||
section: 'Documentation',
|
||||
type: 'Documentation',
|
||||
task: 'C task'
|
||||
task: 'C task',
|
||||
});
|
||||
expect(tasks[3]).toEqual({
|
||||
score: 5,
|
||||
people: ['Charlie', 'Alice'],
|
||||
section: 'Documentation',
|
||||
task: 'D task',
|
||||
type: 'Documentation'
|
||||
type: 'Documentation',
|
||||
});
|
||||
expect(tasks[4]).toEqual({
|
||||
score: 5,
|
||||
people: [''],
|
||||
section: 'Documentation',
|
||||
type: 'Documentation',
|
||||
task: 'E task'
|
||||
task: 'E task',
|
||||
});
|
||||
expect(tasks[5]).toEqual({
|
||||
score: 5,
|
||||
people: [''],
|
||||
section: 'Another section',
|
||||
type: 'Another section',
|
||||
task: 'P task'
|
||||
task: 'P task',
|
||||
});
|
||||
expect(tasks[6]).toEqual({
|
||||
score: 5,
|
||||
people: [''],
|
||||
section: 'Another section',
|
||||
type: 'Another section',
|
||||
task: 'Q task'
|
||||
task: 'Q task',
|
||||
});
|
||||
expect(tasks[7]).toEqual({
|
||||
score: 5,
|
||||
people: [''],
|
||||
section: 'Another section',
|
||||
type: 'Another section',
|
||||
task: 'R task'
|
||||
task: 'R task',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@@ -1,4 +1,3 @@
|
||||
/* eslint-env jasmine */
|
||||
import mermaid from './mermaid';
|
||||
import flowDb from './diagrams/flowchart/flowDb';
|
||||
import flowParser from './diagrams/flowchart/parser/flow';
|
||||
@@ -6,9 +5,9 @@ import flowRenderer from './diagrams/flowchart/flowRenderer';
|
||||
|
||||
const spyOn = jest.spyOn;
|
||||
|
||||
describe('when using mermaid and ', function() {
|
||||
describe('when detecting chart type ', function() {
|
||||
it('should not start rendering with mermaid.startOnLoad set to false', function() {
|
||||
describe('when using mermaid and ', function () {
|
||||
describe('when detecting chart type ', function () {
|
||||
it('should not start rendering with mermaid.startOnLoad set to false', function () {
|
||||
mermaid.startOnLoad = false;
|
||||
document.body.innerHTML = '<div class="mermaid">graph TD;\na;</div>';
|
||||
spyOn(mermaid, 'init');
|
||||
@@ -16,7 +15,7 @@ describe('when using mermaid and ', function() {
|
||||
expect(mermaid.init).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should start rendering with both startOnLoad set', function() {
|
||||
it('should start rendering with both startOnLoad set', function () {
|
||||
mermaid.startOnLoad = true;
|
||||
document.body.innerHTML = '<div class="mermaid">graph TD;\na;</div>';
|
||||
spyOn(mermaid, 'init');
|
||||
@@ -24,7 +23,7 @@ describe('when using mermaid and ', function() {
|
||||
expect(mermaid.init).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should start rendering with mermaid.startOnLoad', function() {
|
||||
it('should start rendering with mermaid.startOnLoad', function () {
|
||||
mermaid.startOnLoad = true;
|
||||
document.body.innerHTML = '<div class="mermaid">graph TD;\na;</div>';
|
||||
spyOn(mermaid, 'init');
|
||||
@@ -32,7 +31,7 @@ describe('when using mermaid and ', function() {
|
||||
expect(mermaid.init).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should start rendering as a default with no changes performed', function() {
|
||||
it('should start rendering as a default with no changes performed', function () {
|
||||
document.body.innerHTML = '<div class="mermaid">graph TD;\na;</div>';
|
||||
spyOn(mermaid, 'init');
|
||||
mermaid.contentLoaded();
|
||||
@@ -40,94 +39,94 @@ describe('when using mermaid and ', function() {
|
||||
});
|
||||
});
|
||||
|
||||
describe('when calling addEdges ', function() {
|
||||
beforeEach(function() {
|
||||
describe('when calling addEdges ', function () {
|
||||
beforeEach(function () {
|
||||
flowParser.parser.yy = flowDb;
|
||||
flowDb.clear();
|
||||
flowDb.setGen('gen-2');
|
||||
});
|
||||
it('it should handle edges with text', function() {
|
||||
it('it should handle edges with text', function () {
|
||||
flowParser.parser.parse('graph TD;A-->|text ex|B;');
|
||||
flowParser.parser.yy.getVertices();
|
||||
const edges = flowParser.parser.yy.getEdges();
|
||||
|
||||
const mockG = {
|
||||
setEdge: function(start, end, options) {
|
||||
setEdge: function (start, end, options) {
|
||||
expect(start).toContain('flowchart-A-');
|
||||
expect(end).toContain('flowchart-B-');
|
||||
expect(options.arrowhead).toBe('normal');
|
||||
expect(options.label.match('text ex')).toBeTruthy();
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
flowRenderer.addEdges(edges, mockG);
|
||||
});
|
||||
|
||||
it('should handle edges without text', function() {
|
||||
it('should handle edges without text', function () {
|
||||
flowParser.parser.parse('graph TD;A-->B;');
|
||||
flowParser.parser.yy.getVertices();
|
||||
const edges = flowParser.parser.yy.getEdges();
|
||||
|
||||
const mockG = {
|
||||
setEdge: function(start, end, options) {
|
||||
setEdge: function (start, end, options) {
|
||||
expect(start).toContain('flowchart-A-');
|
||||
expect(end).toContain('flowchart-B-');
|
||||
expect(options.arrowhead).toBe('normal');
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
flowRenderer.addEdges(edges, mockG);
|
||||
});
|
||||
|
||||
it('should handle open-ended edges', function() {
|
||||
it('should handle open-ended edges', function () {
|
||||
flowParser.parser.parse('graph TD;A---B;');
|
||||
flowParser.parser.yy.getVertices();
|
||||
const edges = flowParser.parser.yy.getEdges();
|
||||
|
||||
const mockG = {
|
||||
setEdge: function(start, end, options) {
|
||||
setEdge: function (start, end, options) {
|
||||
expect(start).toContain('flowchart-A-');
|
||||
expect(end).toContain('flowchart-B-');
|
||||
expect(options.arrowhead).toBe('none');
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
flowRenderer.addEdges(edges, mockG);
|
||||
});
|
||||
|
||||
it('should handle edges with styles defined', function() {
|
||||
it('should handle edges with styles defined', function () {
|
||||
flowParser.parser.parse('graph TD;A---B; linkStyle 0 stroke:val1,stroke-width:val2;');
|
||||
flowParser.parser.yy.getVertices();
|
||||
const edges = flowParser.parser.yy.getEdges();
|
||||
|
||||
const mockG = {
|
||||
setEdge: function(start, end, options) {
|
||||
setEdge: function (start, end, options) {
|
||||
expect(start).toContain('flowchart-A-');
|
||||
expect(end).toContain('flowchart-B-');
|
||||
expect(options.arrowhead).toBe('none');
|
||||
expect(options.style).toBe('stroke:val1;stroke-width:val2;fill:none;');
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
flowRenderer.addEdges(edges, mockG);
|
||||
});
|
||||
it('should handle edges with interpolation defined', function() {
|
||||
it('should handle edges with interpolation defined', function () {
|
||||
flowParser.parser.parse('graph TD;A---B; linkStyle 0 interpolate basis');
|
||||
flowParser.parser.yy.getVertices();
|
||||
const edges = flowParser.parser.yy.getEdges();
|
||||
|
||||
const mockG = {
|
||||
setEdge: function(start, end, options) {
|
||||
setEdge: function (start, end, options) {
|
||||
expect(start).toContain('flowchart-A-');
|
||||
expect(end).toContain('flowchart-B-');
|
||||
expect(options.arrowhead).toBe('none');
|
||||
expect(options.curve).toBe('basis'); // mocked as string
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
flowRenderer.addEdges(edges, mockG);
|
||||
});
|
||||
it('should handle edges with text and styles defined', function() {
|
||||
it('should handle edges with text and styles defined', function () {
|
||||
flowParser.parser.parse(
|
||||
'graph TD;A---|the text|B; linkStyle 0 stroke:val1,stroke-width:val2;'
|
||||
);
|
||||
@@ -135,72 +134,72 @@ describe('when using mermaid and ', function() {
|
||||
const edges = flowParser.parser.yy.getEdges();
|
||||
|
||||
const mockG = {
|
||||
setEdge: function(start, end, options) {
|
||||
setEdge: function (start, end, options) {
|
||||
expect(start).toContain('flowchart-A-');
|
||||
expect(end).toContain('flowchart-B-');
|
||||
expect(options.arrowhead).toBe('none');
|
||||
expect(options.label.match('the text')).toBeTruthy();
|
||||
expect(options.style).toBe('stroke:val1;stroke-width:val2;fill:none;');
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
flowRenderer.addEdges(edges, mockG);
|
||||
});
|
||||
|
||||
it('should set fill to "none" by default when handling edges', function() {
|
||||
it('should set fill to "none" by default when handling edges', function () {
|
||||
flowParser.parser.parse('graph TD;A---B; linkStyle 0 stroke:val1,stroke-width:val2;');
|
||||
flowParser.parser.yy.getVertices();
|
||||
const edges = flowParser.parser.yy.getEdges();
|
||||
|
||||
const mockG = {
|
||||
setEdge: function(start, end, options) {
|
||||
setEdge: function (start, end, options) {
|
||||
expect(start).toContain('flowchart-A-');
|
||||
expect(end).toContain('flowchart-B');
|
||||
expect(options.arrowhead).toBe('none');
|
||||
expect(options.style).toBe('stroke:val1;stroke-width:val2;fill:none;');
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
flowRenderer.addEdges(edges, mockG);
|
||||
});
|
||||
|
||||
it('should not set fill to none if fill is set in linkStyle', function() {
|
||||
it('should not set fill to none if fill is set in linkStyle', function () {
|
||||
flowParser.parser.parse(
|
||||
'graph TD;A---B; linkStyle 0 stroke:val1,stroke-width:val2,fill:blue;'
|
||||
);
|
||||
flowParser.parser.yy.getVertices();
|
||||
const edges = flowParser.parser.yy.getEdges();
|
||||
const mockG = {
|
||||
setEdge: function(start, end, options) {
|
||||
setEdge: function (start, end, options) {
|
||||
expect(start).toContain('flowchart-A-');
|
||||
expect(end).toContain('flowchart-B-');
|
||||
expect(options.arrowhead).toBe('none');
|
||||
expect(options.style).toBe('stroke:val1;stroke-width:val2;fill:blue;');
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
flowRenderer.addEdges(edges, mockG);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checking validity of input ', function() {
|
||||
beforeEach(function() {
|
||||
describe('checking validity of input ', function () {
|
||||
beforeEach(function () {
|
||||
flowParser.parser.yy = flowDb;
|
||||
flowDb.clear();
|
||||
flowDb.setGen('gen-2');
|
||||
});
|
||||
it('it should throw for an invalid definiton', function() {
|
||||
it('it should throw for an invalid definiton', function () {
|
||||
expect(() => mermaid.parse('this is not a mermaid diagram definition')).toThrow();
|
||||
});
|
||||
|
||||
it('it should not throw for a valid flow definition', function() {
|
||||
it('it should not throw for a valid flow definition', function () {
|
||||
expect(() => mermaid.parse('graph TD;A--x|text including URL space|B;')).not.toThrow();
|
||||
});
|
||||
it('it should throw for an invalid flow definition', function() {
|
||||
it('it should throw for an invalid flow definition', function () {
|
||||
expect(() => mermaid.parse('graph TQ;A--x|text including URL space|B;')).toThrow();
|
||||
});
|
||||
|
||||
it('it should not throw for a valid sequenceDiagram definition', function() {
|
||||
it('it should not throw for a valid sequenceDiagram definition', function () {
|
||||
const text =
|
||||
'sequenceDiagram\n' +
|
||||
'Alice->Bob: Hello Bob, how are you?\n\n' +
|
||||
@@ -214,7 +213,7 @@ describe('when using mermaid and ', function() {
|
||||
expect(() => mermaid.parse(text)).not.toThrow();
|
||||
});
|
||||
|
||||
it('it should throw for an invalid sequenceDiagram definition', function() {
|
||||
it('it should throw for an invalid sequenceDiagram definition', function () {
|
||||
const text =
|
||||
'sequenceDiagram\n' +
|
||||
'Alice:->Bob: Hello Bob, how are you?\n\n' +
|
||||
|
@@ -1,15 +1,14 @@
|
||||
/* eslint-env jasmine */
|
||||
import mermaidAPI from './mermaidAPI';
|
||||
import { assignWithDepth } from './utils';
|
||||
|
||||
describe('when using mermaidAPI and ', function() {
|
||||
describe('doing initialize ', function() {
|
||||
beforeEach(function() {
|
||||
describe('when using mermaidAPI and ', function () {
|
||||
describe('doing initialize ', function () {
|
||||
beforeEach(function () {
|
||||
document.body.innerHTML = '';
|
||||
mermaidAPI.globalReset();
|
||||
});
|
||||
|
||||
it('should copy a literal into the configuration', function() {
|
||||
it('should copy a literal into the configuration', function () {
|
||||
const orgConfig = mermaidAPI.getConfig();
|
||||
expect(orgConfig.testLiteral).toBe(undefined);
|
||||
|
||||
@@ -18,13 +17,13 @@ describe('when using mermaidAPI and ', function() {
|
||||
|
||||
expect(config.testLiteral).toBe(true);
|
||||
});
|
||||
it('should copy a an object into the configuration', function() {
|
||||
it('should copy a an object into the configuration', function () {
|
||||
const orgConfig = mermaidAPI.getConfig();
|
||||
expect(orgConfig.testObject).toBe(undefined);
|
||||
|
||||
const object = {
|
||||
test1: 1,
|
||||
test2: false
|
||||
test2: false,
|
||||
};
|
||||
|
||||
mermaidAPI.initialize({ testObject: object });
|
||||
@@ -38,13 +37,13 @@ describe('when using mermaidAPI and ', function() {
|
||||
expect(config.testObject.test2).toBe(false);
|
||||
expect(config.testObject.test3).toBe(true);
|
||||
});
|
||||
it('should reset mermaid config to global defaults', function() {
|
||||
it('should reset mermaid config to global defaults', function () {
|
||||
let config = {
|
||||
logLevel: 0,
|
||||
securityLevel: 'loose'
|
||||
securityLevel: 'loose',
|
||||
};
|
||||
mermaidAPI.initialize(config);
|
||||
mermaidAPI.setConfig({securityLevel:'strict', logLevel: 1});
|
||||
mermaidAPI.setConfig({ securityLevel: 'strict', logLevel: 1 });
|
||||
expect(mermaidAPI.getConfig().logLevel).toBe(1);
|
||||
expect(mermaidAPI.getConfig().securityLevel).toBe('strict');
|
||||
mermaidAPI.globalReset();
|
||||
@@ -52,68 +51,73 @@ describe('when using mermaidAPI and ', function() {
|
||||
expect(mermaidAPI.getConfig().securityLevel).toBe('loose');
|
||||
});
|
||||
|
||||
it('should prevent changes to site defaults (sneaky)', function() {
|
||||
it('should prevent changes to site defaults (sneaky)', function () {
|
||||
let config = {
|
||||
logLevel: 0
|
||||
logLevel: 0,
|
||||
};
|
||||
mermaidAPI.initialize(config);
|
||||
const siteConfig = mermaidAPI.getSiteConfig();
|
||||
expect(mermaidAPI.getConfig().logLevel).toBe(0);
|
||||
config.secure = {
|
||||
toString: function() {
|
||||
toString: function () {
|
||||
mermaidAPI.initialize({ securityLevel: 'loose' });
|
||||
}
|
||||
},
|
||||
};
|
||||
mermaidAPI.reinitialize(config);
|
||||
expect(mermaidAPI.getConfig().secure).toEqual(mermaidAPI.getSiteConfig().secure);
|
||||
expect(mermaidAPI.getConfig().securityLevel).toBe('strict');
|
||||
mermaidAPI.reset();
|
||||
expect(mermaidAPI.getSiteConfig()).toEqual(siteConfig)
|
||||
expect(mermaidAPI.getSiteConfig()).toEqual(siteConfig);
|
||||
expect(mermaidAPI.getConfig()).toEqual(siteConfig);
|
||||
});
|
||||
it('should prevent clobbering global defaults (direct)', function() {
|
||||
it('should prevent clobbering global defaults (direct)', function () {
|
||||
let config = assignWithDepth({}, mermaidAPI.defaultConfig);
|
||||
assignWithDepth(config, { logLevel: 0 });
|
||||
|
||||
let error = { message: '' };
|
||||
try {
|
||||
mermaidAPI['defaultConfig'] = config;
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
expect(error.message).toBe('Cannot assign to read only property \'defaultConfig\' of object \'#<Object>\'');
|
||||
expect(error.message).toBe(
|
||||
"Cannot assign to read only property 'defaultConfig' of object '#<Object>'"
|
||||
);
|
||||
expect(mermaidAPI.defaultConfig['logLevel']).toBe(5);
|
||||
});
|
||||
it('should prevent changes to global defaults (direct)', function() {
|
||||
it('should prevent changes to global defaults (direct)', function () {
|
||||
let error = { message: '' };
|
||||
try {
|
||||
mermaidAPI.defaultConfig['logLevel'] = 0;
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
expect(error.message).toBe('Cannot assign to read only property \'logLevel\' of object \'#<Object>\'');
|
||||
expect(error.message).toBe(
|
||||
"Cannot assign to read only property 'logLevel' of object '#<Object>'"
|
||||
);
|
||||
expect(mermaidAPI.defaultConfig['logLevel']).toBe(5);
|
||||
});
|
||||
it('should prevent sneaky changes to global defaults (assignWithDepth)', function() {
|
||||
it('should prevent sneaky changes to global defaults (assignWithDepth)', function () {
|
||||
let config = {
|
||||
logLevel: 0
|
||||
logLevel: 0,
|
||||
};
|
||||
let error = { message: '' };
|
||||
try {
|
||||
assignWithDepth(mermaidAPI.defaultConfig, config);
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
expect(error.message).toBe('Cannot assign to read only property \'logLevel\' of object \'#<Object>\'');
|
||||
expect(error.message).toBe(
|
||||
"Cannot assign to read only property 'logLevel' of object '#<Object>'"
|
||||
);
|
||||
expect(mermaidAPI.defaultConfig['logLevel']).toBe(5);
|
||||
});
|
||||
|
||||
});
|
||||
describe('checking validity of input ', function() {
|
||||
it('it should throw for an invalid definiton', function() {
|
||||
describe('checking validity of input ', function () {
|
||||
it('it should throw for an invalid definiton', function () {
|
||||
expect(() => mermaidAPI.parse('this is not a mermaid diagram definition')).toThrow();
|
||||
});
|
||||
it('it should not throw for a valid definiton', function() {
|
||||
it('it should not throw for a valid definiton', function () {
|
||||
expect(() => mermaidAPI.parse('graph TD;A--x|text including URL space|B;')).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
@@ -1,100 +1,146 @@
|
||||
/* eslint-env jasmine */
|
||||
import utils from './utils';
|
||||
|
||||
describe('when assignWithDepth: should merge objects within objects', function() {
|
||||
it('should handle simple, depth:1 types (identity)', function() {
|
||||
describe('when assignWithDepth: should merge objects within objects', function () {
|
||||
it('should handle simple, depth:1 types (identity)', function () {
|
||||
let config_0 = { foo: 'bar', bar: 0 };
|
||||
let config_1 = { foo: 'bar', bar: 0 };
|
||||
let result = utils.assignWithDepth(config_0, config_1);
|
||||
expect(result).toEqual(config_1);
|
||||
});
|
||||
it('should handle simple, depth:1 types (dst: undefined)', function() {
|
||||
it('should handle simple, depth:1 types (dst: undefined)', function () {
|
||||
let config_0 = undefined;
|
||||
let config_1 = { foo: 'bar', bar: 0 };
|
||||
let result = utils.assignWithDepth(config_0, config_1);
|
||||
expect(result).toEqual(config_1);
|
||||
});
|
||||
it('should handle simple, depth:1 types (src: undefined)', function() {
|
||||
it('should handle simple, depth:1 types (src: undefined)', function () {
|
||||
let config_0 = { foo: 'bar', bar: 0 };
|
||||
let config_1 = undefined;
|
||||
let result = utils.assignWithDepth(config_0, config_1);
|
||||
expect(result).toEqual(config_0);
|
||||
});
|
||||
it('should handle simple, depth:1 types (merge)', function() {
|
||||
it('should handle simple, depth:1 types (merge)', function () {
|
||||
let config_0 = { foo: 'bar', bar: 0 };
|
||||
let config_1 = { foo: 'foo' };
|
||||
let result = utils.assignWithDepth(config_0, config_1);
|
||||
expect(result).toEqual({ foo: 'foo', bar: 0});
|
||||
expect(result).toEqual({ foo: 'foo', bar: 0 });
|
||||
});
|
||||
it('should handle depth:2 types (dst: orphan)', function() {
|
||||
it('should handle depth:2 types (dst: orphan)', function () {
|
||||
let config_0 = { foo: 'bar', bar: { foo: 'bar' } };
|
||||
let config_1 = { foo: 'bar' };
|
||||
let result = utils.assignWithDepth(config_0, config_1);
|
||||
expect(result).toEqual(config_0);
|
||||
});
|
||||
it('should handle depth:2 types (dst: object, src: simple type)', function() {
|
||||
it('should handle depth:2 types (dst: object, src: simple type)', function () {
|
||||
let config_0 = { foo: 'bar', bar: { foo: 'bar' } };
|
||||
let config_1 = { foo: 'foo', bar: 'should NOT clobber'};
|
||||
let config_1 = { foo: 'foo', bar: 'should NOT clobber' };
|
||||
let result = utils.assignWithDepth(config_0, config_1);
|
||||
expect(result).toEqual({ foo: 'foo', bar: { foo: 'bar' } } );
|
||||
expect(result).toEqual({ foo: 'foo', bar: { foo: 'bar' } });
|
||||
});
|
||||
it('should handle depth:2 types (src: orphan)', function() {
|
||||
it('should handle depth:2 types (src: orphan)', function () {
|
||||
let config_0 = { foo: 'bar' };
|
||||
let config_1 = { foo: 'bar', bar: { foo: 'bar' } };
|
||||
let result = utils.assignWithDepth(config_0, config_1);
|
||||
expect(result).toEqual(config_1);
|
||||
});
|
||||
it('should handle depth:2 types (merge)', function() {
|
||||
it('should handle depth:2 types (merge)', function () {
|
||||
let config_0 = { foo: 'bar', bar: { foo: 'bar' }, boofar: 1 };
|
||||
let config_1 = { foo: 'foo', bar: { bar: 0 }, foobar: 'foobar' };
|
||||
let result = utils.assignWithDepth(config_0, config_1);
|
||||
expect(result).toEqual({ foo: "foo", bar: { foo: "bar", bar: 0 }, foobar: "foobar", boofar: 1 });
|
||||
expect(result).toEqual({
|
||||
foo: 'foo',
|
||||
bar: { foo: 'bar', bar: 0 },
|
||||
foobar: 'foobar',
|
||||
boofar: 1,
|
||||
});
|
||||
});
|
||||
it('should handle depth:3 types (merge with clobber because assignWithDepth::depth == 2)', function() {
|
||||
let config_0 = { foo: 'bar', bar: { foo: 'bar', bar: { foo: { message: 'this', willbe: 'clobbered' } } }, boofar: 1 };
|
||||
let config_1 = { foo: 'foo', bar: { foo: 'foo', bar: { foo: { message: 'clobbered other foo' } } }, foobar: 'foobar' };
|
||||
it('should handle depth:3 types (merge with clobber because assignWithDepth::depth == 2)', function () {
|
||||
let config_0 = {
|
||||
foo: 'bar',
|
||||
bar: { foo: 'bar', bar: { foo: { message: 'this', willbe: 'clobbered' } } },
|
||||
boofar: 1,
|
||||
};
|
||||
let config_1 = {
|
||||
foo: 'foo',
|
||||
bar: { foo: 'foo', bar: { foo: { message: 'clobbered other foo' } } },
|
||||
foobar: 'foobar',
|
||||
};
|
||||
let result = utils.assignWithDepth(config_0, config_1);
|
||||
expect(result).toEqual({ foo: "foo", bar: { foo: 'foo', bar: { foo: { message: 'clobbered other foo' } } }, foobar: "foobar", boofar: 1 });
|
||||
expect(result).toEqual({
|
||||
foo: 'foo',
|
||||
bar: { foo: 'foo', bar: { foo: { message: 'clobbered other foo' } } },
|
||||
foobar: 'foobar',
|
||||
boofar: 1,
|
||||
});
|
||||
});
|
||||
it('should handle depth:3 types (merge with clobber because assignWithDepth::depth == 1)', function() {
|
||||
let config_0 = { foo: 'bar', bar: { foo: 'bar', bar: { foo: { message: '', willNotbe: 'present' }, bar: 'shouldNotBePresent' } }, boofar: 1 };
|
||||
let config_1 = { foo: 'foo', bar: { foo: 'foo', bar: { foo: { message: 'this' } } }, foobar: 'foobar' };
|
||||
it('should handle depth:3 types (merge with clobber because assignWithDepth::depth == 1)', function () {
|
||||
let config_0 = {
|
||||
foo: 'bar',
|
||||
bar: {
|
||||
foo: 'bar',
|
||||
bar: { foo: { message: '', willNotbe: 'present' }, bar: 'shouldNotBePresent' },
|
||||
},
|
||||
boofar: 1,
|
||||
};
|
||||
let config_1 = {
|
||||
foo: 'foo',
|
||||
bar: { foo: 'foo', bar: { foo: { message: 'this' } } },
|
||||
foobar: 'foobar',
|
||||
};
|
||||
let result = utils.assignWithDepth(config_0, config_1, { depth: 1 });
|
||||
expect(result).toEqual({ foo: "foo", bar: { foo: 'foo', bar: { foo: { message: 'this' } } }, foobar: "foobar", boofar: 1 });
|
||||
expect(result).toEqual({
|
||||
foo: 'foo',
|
||||
bar: { foo: 'foo', bar: { foo: { message: 'this' } } },
|
||||
foobar: 'foobar',
|
||||
boofar: 1,
|
||||
});
|
||||
});
|
||||
it('should handle depth:3 types (merge with no clobber because assignWithDepth::depth == 3)', function() {
|
||||
let config_0 = { foo: 'bar', bar: { foo: 'bar', bar: { foo: { message: '', willbe: 'present' } } }, boofar: 1 };
|
||||
let config_1 = { foo: 'foo', bar: { foo: 'foo', bar: { foo: { message: 'this' } } }, foobar: 'foobar' };
|
||||
it('should handle depth:3 types (merge with no clobber because assignWithDepth::depth == 3)', function () {
|
||||
let config_0 = {
|
||||
foo: 'bar',
|
||||
bar: { foo: 'bar', bar: { foo: { message: '', willbe: 'present' } } },
|
||||
boofar: 1,
|
||||
};
|
||||
let config_1 = {
|
||||
foo: 'foo',
|
||||
bar: { foo: 'foo', bar: { foo: { message: 'this' } } },
|
||||
foobar: 'foobar',
|
||||
};
|
||||
let result = utils.assignWithDepth(config_0, config_1, { depth: 3 });
|
||||
expect(result).toEqual({ foo: "foo", bar: { foo: 'foo', bar: { foo: { message: 'this', willbe: 'present' } } }, foobar: "foobar", boofar: 1 });
|
||||
expect(result).toEqual({
|
||||
foo: 'foo',
|
||||
bar: { foo: 'foo', bar: { foo: { message: 'this', willbe: 'present' } } },
|
||||
foobar: 'foobar',
|
||||
boofar: 1,
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('when memoizing', function() {
|
||||
it('should return the same value', function() {
|
||||
const fib = utils.memoize(function(n, canary) {
|
||||
describe('when memoizing', function () {
|
||||
it('should return the same value', function () {
|
||||
const fib = utils.memoize(function (n, canary) {
|
||||
canary.flag = true;
|
||||
if (n < 2){
|
||||
if (n < 2) {
|
||||
return 1;
|
||||
}else{
|
||||
} else {
|
||||
//We'll console.log a loader every time we have to recurse
|
||||
return fib(n-2, canary) + fib(n-1, canary);
|
||||
return fib(n - 2, canary) + fib(n - 1, canary);
|
||||
}
|
||||
});
|
||||
let canary = {flag: false};
|
||||
let canary = { flag: false };
|
||||
fib(10, canary);
|
||||
expect(canary.flag).toBe(true);
|
||||
canary = {flag: false};
|
||||
canary = { flag: false };
|
||||
fib(10, canary);
|
||||
expect(canary.flag).toBe(false);
|
||||
});
|
||||
})
|
||||
describe('when detecting chart type ', function() {
|
||||
it('should handle a graph definition', function() {
|
||||
});
|
||||
describe('when detecting chart type ', function () {
|
||||
it('should handle a graph definition', function () {
|
||||
const str = 'graph TB\nbfs1:queue';
|
||||
const type = utils.detectType(str);
|
||||
expect(type).toBe('flowchart');
|
||||
});
|
||||
it('should handle an initialize definition', function() {
|
||||
it('should handle an initialize definition', function () {
|
||||
const str = `
|
||||
%%{initialize: { 'logLevel': 0, 'theme': 'dark' }}%%
|
||||
sequenceDiagram
|
||||
@@ -102,9 +148,9 @@ Alice->Bob: hi`;
|
||||
const type = utils.detectType(str);
|
||||
const init = utils.detectInit(str);
|
||||
expect(type).toBe('sequence');
|
||||
expect(init).toEqual({logLevel:0,theme:"dark"});
|
||||
expect(init).toEqual({ logLevel: 0, theme: 'dark' });
|
||||
});
|
||||
it('should handle an init definition', function() {
|
||||
it('should handle an init definition', function () {
|
||||
const str = `
|
||||
%%{init: { 'logLevel': 0, 'theme': 'dark' }}%%
|
||||
sequenceDiagram
|
||||
@@ -112,9 +158,9 @@ Alice->Bob: hi`;
|
||||
const type = utils.detectType(str);
|
||||
const init = utils.detectInit(str);
|
||||
expect(type).toBe('sequence');
|
||||
expect(init).toEqual({logLevel:0,theme:"dark"});
|
||||
expect(init).toEqual({ logLevel: 0, theme: 'dark' });
|
||||
});
|
||||
it('should handle an init definition with config converted to the proper diagram configuration', function() {
|
||||
it('should handle an init definition with config converted to the proper diagram configuration', function () {
|
||||
const str = `
|
||||
%%{init: { 'logLevel': 0, 'theme': 'dark', 'config': {'wrap': true} } }%%
|
||||
sequenceDiagram
|
||||
@@ -122,9 +168,9 @@ Alice->Bob: hi`;
|
||||
const type = utils.detectType(str);
|
||||
const init = utils.detectInit(str);
|
||||
expect(type).toBe('sequence');
|
||||
expect(init).toEqual({logLevel:0, theme:"dark", sequence: { wrap: true }});
|
||||
expect(init).toEqual({ logLevel: 0, theme: 'dark', sequence: { wrap: true } });
|
||||
});
|
||||
it('should handle a multiline init definition', function() {
|
||||
it('should handle a multiline init definition', function () {
|
||||
const str = `
|
||||
%%{
|
||||
init: {
|
||||
@@ -137,9 +183,9 @@ Alice->Bob: hi`;
|
||||
const type = utils.detectType(str);
|
||||
const init = utils.detectInit(str);
|
||||
expect(type).toBe('sequence');
|
||||
expect(init).toEqual({logLevel:0,theme:"dark"});
|
||||
expect(init).toEqual({ logLevel: 0, theme: 'dark' });
|
||||
});
|
||||
it('should handle multiple init directives', function() {
|
||||
it('should handle multiple init directives', function () {
|
||||
const str = `
|
||||
%%{ init: { 'logLevel': 0, 'theme': 'forest' } }%%
|
||||
%%{
|
||||
@@ -152,39 +198,39 @@ Alice->Bob: hi`;
|
||||
const type = utils.detectType(str);
|
||||
const init = utils.detectInit(str);
|
||||
expect(type).toBe('sequence');
|
||||
expect(init).toEqual({logLevel:0,theme:"dark"});
|
||||
expect(init).toEqual({ logLevel: 0, theme: 'dark' });
|
||||
});
|
||||
it('should handle a graph definition with leading spaces', function() {
|
||||
it('should handle a graph definition with leading spaces', function () {
|
||||
const str = ' graph TB\nbfs1:queue';
|
||||
const type = utils.detectType(str);
|
||||
expect(type).toBe('flowchart');
|
||||
});
|
||||
|
||||
it('should handle a graph definition with leading spaces and newline', function() {
|
||||
it('should handle a graph definition with leading spaces and newline', function () {
|
||||
const str = ' \n graph TB\nbfs1:queue';
|
||||
const type = utils.detectType(str);
|
||||
expect(type).toBe('flowchart');
|
||||
});
|
||||
it('should handle a graph definition for gitGraph', function() {
|
||||
it('should handle a graph definition for gitGraph', function () {
|
||||
const str = ' \n gitGraph TB:\nbfs1:queue';
|
||||
const type = utils.detectType(str);
|
||||
expect(type).toBe('git');
|
||||
});
|
||||
});
|
||||
describe('when finding substring in array ', function() {
|
||||
it('should return the array index that contains the substring', function() {
|
||||
describe('when finding substring in array ', function () {
|
||||
it('should return the array index that contains the substring', function () {
|
||||
const arr = ['stroke:val1', 'fill:val2'];
|
||||
const result = utils.isSubstringInArray('fill', arr);
|
||||
expect(result).toEqual(1);
|
||||
});
|
||||
it('should return -1 if the substring is not found in the array', function() {
|
||||
it('should return -1 if the substring is not found in the array', function () {
|
||||
const arr = ['stroke:val1', 'stroke-width:val2'];
|
||||
const result = utils.isSubstringInArray('fill', arr);
|
||||
expect(result).toEqual(-1);
|
||||
});
|
||||
});
|
||||
describe('when formatting urls', function() {
|
||||
it('should handle links', function() {
|
||||
describe('when formatting urls', function () {
|
||||
it('should handle links', function () {
|
||||
const url = 'https://mermaid-js.github.io/mermaid/#/';
|
||||
|
||||
let config = { securityLevel: 'loose' };
|
||||
@@ -195,7 +241,7 @@ describe('when formatting urls', function() {
|
||||
result = utils.formatUrl(url, config);
|
||||
expect(result).toEqual(url);
|
||||
});
|
||||
it('should handle anchors', function() {
|
||||
it('should handle anchors', function () {
|
||||
const url = '#interaction';
|
||||
|
||||
let config = { securityLevel: 'loose' };
|
||||
@@ -206,7 +252,7 @@ describe('when formatting urls', function() {
|
||||
result = utils.formatUrl(url, config);
|
||||
expect(result).toEqual('about:blank');
|
||||
});
|
||||
it('should handle mailto', function() {
|
||||
it('should handle mailto', function () {
|
||||
const url = 'mailto:user@user.user';
|
||||
|
||||
let config = { securityLevel: 'loose' };
|
||||
@@ -217,7 +263,7 @@ describe('when formatting urls', function() {
|
||||
result = utils.formatUrl(url, config);
|
||||
expect(result).toEqual(url);
|
||||
});
|
||||
it('should handle other protocols', function() {
|
||||
it('should handle other protocols', function () {
|
||||
const url = 'notes://do-your-thing/id';
|
||||
|
||||
let config = { securityLevel: 'loose' };
|
||||
@@ -228,7 +274,7 @@ describe('when formatting urls', function() {
|
||||
result = utils.formatUrl(url, config);
|
||||
expect(result).toEqual(url);
|
||||
});
|
||||
it('should handle scripts', function() {
|
||||
it('should handle scripts', function () {
|
||||
const url = 'javascript:alert("test")';
|
||||
|
||||
let config = { securityLevel: 'loose' };
|
||||
@@ -240,7 +286,7 @@ describe('when formatting urls', function() {
|
||||
expect(result).toEqual('about:blank');
|
||||
});
|
||||
});
|
||||
describe('when calculating SVG size', function() {
|
||||
describe('when calculating SVG size', function () {
|
||||
it('should return width 100% when useMaxWidth is true', function () {
|
||||
const attrs = utils.calculateSvgSizeAttrs(100, 200, true);
|
||||
expect(attrs.get('height')).toEqual(100);
|
||||
@@ -256,31 +302,30 @@ describe('when calculating SVG size', function() {
|
||||
|
||||
describe('when initializing the id generator', function () {
|
||||
it('should return a random number generator based on Date', function (done) {
|
||||
const idGenerator = new utils.initIdGeneratior(false)
|
||||
expect(typeof idGenerator.next).toEqual('function')
|
||||
const lastId = idGenerator.next()
|
||||
const idGenerator = new utils.initIdGeneratior(false);
|
||||
expect(typeof idGenerator.next).toEqual('function');
|
||||
const lastId = idGenerator.next();
|
||||
setTimeout(() => {
|
||||
expect(idGenerator.next() > lastId).toBe(true)
|
||||
done()
|
||||
}, 5)
|
||||
expect(idGenerator.next() > lastId).toBe(true);
|
||||
done();
|
||||
}, 5);
|
||||
});
|
||||
|
||||
it('should return a non random number generator', function () {
|
||||
const idGenerator = new utils.initIdGeneratior(true)
|
||||
expect(typeof idGenerator.next).toEqual('function')
|
||||
const start = 0
|
||||
const lastId = idGenerator.next()
|
||||
expect(start).toEqual(lastId)
|
||||
expect(idGenerator.next()).toEqual(lastId +1)
|
||||
const idGenerator = new utils.initIdGeneratior(true);
|
||||
expect(typeof idGenerator.next).toEqual('function');
|
||||
const start = 0;
|
||||
const lastId = idGenerator.next();
|
||||
expect(start).toEqual(lastId);
|
||||
expect(idGenerator.next()).toEqual(lastId + 1);
|
||||
});
|
||||
|
||||
it('should return a non random number generator based on seed', function () {
|
||||
const idGenerator = new utils.initIdGeneratior(true, 'thisIsASeed')
|
||||
expect(typeof idGenerator.next).toEqual('function')
|
||||
const start = 11
|
||||
const lastId = idGenerator.next()
|
||||
expect(start).toEqual(lastId)
|
||||
expect(idGenerator.next()).toEqual(lastId +1)
|
||||
const idGenerator = new utils.initIdGeneratior(true, 'thisIsASeed');
|
||||
expect(typeof idGenerator.next).toEqual('function');
|
||||
const start = 11;
|
||||
const lastId = idGenerator.next();
|
||||
expect(start).toEqual(lastId);
|
||||
expect(idGenerator.next()).toEqual(lastId + 1);
|
||||
});
|
||||
|
||||
})
|
||||
});
|
||||
|
Reference in New Issue
Block a user