Compare commits
330 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95967151ae | ||
|
|
f2137e4cdd | ||
|
|
bba6bb6255 | ||
|
|
7d7bc9782b | ||
|
|
5667694652 | ||
|
|
4cb0b9174b | ||
|
|
7493031d11 | ||
|
|
e64d2dcada | ||
|
|
f057bd1e89 | ||
|
|
1e7e719dbf | ||
|
|
5b6e9747b7 | ||
|
|
803c12877c | ||
|
|
db5be3c28a | ||
|
|
434a033411 | ||
|
|
fd2ee3e109 | ||
|
|
0816591cf9 | ||
|
|
836d02d0b9 | ||
|
|
b951d506d9 | ||
|
|
333a13269c | ||
|
|
e0a70bd352 | ||
|
|
fe913ec10f | ||
|
|
50899b845d | ||
|
|
3e6396cc4b | ||
|
|
a8d9b269ff | ||
|
|
df7dc21893 | ||
|
|
750c74eea7 | ||
|
|
2f7da8b577 | ||
|
|
e2dba9e9b8 | ||
|
|
c79fcf7ad5 | ||
|
|
c6abc43ca5 | ||
|
|
2c1913af23 | ||
|
|
ed3d15501a | ||
|
|
fef85cf259 | ||
|
|
00be016979 | ||
|
|
9290aa2b61 | ||
|
|
977491ef8d | ||
|
|
86fcbf9c5e | ||
|
|
e081e4c9c6 | ||
|
|
357e738983 | ||
|
|
7242a98adb | ||
|
|
6974892a11 | ||
|
|
ee0cb8793f | ||
|
|
0cfd3c7e02 | ||
|
|
23693bd57e | ||
|
|
d742211797 | ||
|
|
9191663a1a | ||
|
|
b3dd2a4282 | ||
|
|
f51596eb61 | ||
|
|
c1d252afe0 | ||
|
|
c3f03e4497 | ||
|
|
9e67de8a9f | ||
|
|
6f09b14f44 | ||
|
|
d78b33ca75 | ||
|
|
fa9cf2b345 | ||
|
|
b2c833b879 | ||
|
|
96516d6dd5 | ||
|
|
7071cd7381 | ||
|
|
0a7b8840a4 | ||
|
|
f272cff7f6 | ||
|
|
6ede0f6166 | ||
|
|
c44ff8a70f | ||
|
|
d2eb507e7d | ||
|
|
fa04e3de75 | ||
|
|
63184d53c1 | ||
|
|
1dfff7ac0e | ||
|
|
6c84544521 | ||
|
|
91651ca6ce | ||
|
|
6fd790570a | ||
|
|
b322a964ca | ||
|
|
20b35cbe13 | ||
|
|
d169641c99 | ||
|
|
856591d253 | ||
|
|
4587f5a73d | ||
|
|
10057217cb | ||
|
|
0c66e1b5b9 | ||
|
|
4800460e07 | ||
|
|
67ee649d99 | ||
|
|
57b780a0d7 | ||
|
|
19d9dd6954 | ||
|
|
e298351caa | ||
|
|
752a12bda4 | ||
|
|
afeb3b53c9 | ||
|
|
8ce658416b | ||
|
|
6e846ac3e5 | ||
|
|
ec298185a3 | ||
|
|
58df72984f | ||
|
|
5bfddcc444 | ||
|
|
a99b31a61d | ||
|
|
8a8b7bd48c | ||
|
|
1d04c7e1fe | ||
|
|
9f1c37ecb3 | ||
|
|
8e8651a0e1 | ||
|
|
a8e9f21035 | ||
|
|
997cdfffb4 | ||
|
|
5a9c57ec0f | ||
|
|
418ac501aa | ||
|
|
12b58a17e1 | ||
|
|
7ca9df9357 | ||
|
|
f912f8582d | ||
|
|
a211b6d55d | ||
|
|
c84c154603 | ||
|
|
f903090e0f | ||
|
|
901df242b7 | ||
|
|
9dbb3767e2 | ||
|
|
5565d36ef2 | ||
|
|
48f8c3f85a | ||
|
|
a3eef7298e | ||
|
|
7d3578b31a | ||
|
|
2914d4975b | ||
|
|
2a0fcd99e4 | ||
|
|
d7e0e4afc5 | ||
|
|
de78bb5ffe | ||
|
|
9ecee6b163 | ||
|
|
e25e8af135 | ||
|
|
4ab2fed8d2 | ||
|
|
3e8fea6533 | ||
|
|
a57b90809b | ||
|
|
b097b8bb34 | ||
|
|
349aeb5a26 | ||
|
|
e8aebb00ee | ||
|
|
6ece2838bd | ||
|
|
bcd3aa7d61 | ||
|
|
aca80726d7 | ||
|
|
2fc5745c58 | ||
|
|
ba2f2cb284 | ||
|
|
06aa4a5c07 | ||
|
|
80c7c9a86e | ||
|
|
42d66fd3d9 | ||
|
|
ae8dde981b | ||
|
|
228b832bed | ||
|
|
b80bad0a93 | ||
|
|
ad1763a8f2 | ||
|
|
97e26686f4 | ||
|
|
f476002691 | ||
|
|
a4992963b3 | ||
|
|
44ff608868 | ||
|
|
f2ea725edb | ||
|
|
40a87fa736 | ||
|
|
a5627f97ff | ||
|
|
8b1698b17d | ||
|
|
f8da3c2e48 | ||
|
|
03f9264394 | ||
|
|
33da5bcee1 | ||
|
|
01541a3dc1 | ||
|
|
13ec3f6c0d | ||
|
|
a949c2aaec | ||
|
|
f4324d1de6 | ||
|
|
f1d2808594 | ||
|
|
52d0605066 | ||
|
|
015b976c28 | ||
|
|
b777c8de15 | ||
|
|
93201b2ded | ||
|
|
234d39cf96 | ||
|
|
9913bf654b | ||
|
|
2d560861df | ||
|
|
24786c643b | ||
|
|
3b4e48e32f | ||
|
|
45b81a9db9 | ||
|
|
500dea182f | ||
|
|
274e9f3e42 | ||
|
|
cf589f713d | ||
|
|
07d881cd5e | ||
|
|
0d0f793f81 | ||
|
|
23dfe1e8a9 | ||
|
|
a8e49a0d2d | ||
|
|
594847ba04 | ||
|
|
f3fcfb635b | ||
|
|
011220180c | ||
|
|
3e9215b27d | ||
|
|
6ce2dfeaec | ||
|
|
9176237361 | ||
|
|
9b65abe76a | ||
|
|
d6085fcdb9 | ||
|
|
259d2a9b5c | ||
|
|
f62736c749 | ||
|
|
7ef1b28c83 | ||
|
|
7df9dea72a | ||
|
|
2a061fba07 | ||
|
|
93aa575697 | ||
|
|
8a20a71906 | ||
|
|
37076d3d1f | ||
|
|
acf11bdc6c | ||
|
|
d222196fd1 | ||
|
|
6f43082e38 | ||
|
|
964bc4dc12 | ||
|
|
ecf7870b3e | ||
|
|
30fcab1c63 | ||
|
|
edde501d54 | ||
|
|
35c0e2cea2 | ||
|
|
5cf061cb32 | ||
|
|
5b6dfb01a6 | ||
|
|
a0b589979d | ||
|
|
0d0cdd0542 | ||
|
|
297fa7a069 | ||
|
|
2ed515091f | ||
|
|
b093f9fd41 | ||
|
|
fde6396f48 | ||
|
|
11250290a2 | ||
|
|
be33c41a40 | ||
|
|
e20528ab0d | ||
|
|
a40d89ba0d | ||
|
|
c0c7c96f28 | ||
|
|
003e5575b9 | ||
|
|
d904574ea8 | ||
|
|
cbb884edae | ||
|
|
0a7829fcfe | ||
|
|
7c970d6ff0 | ||
|
|
048b39ae16 | ||
|
|
bb9d5e38fa | ||
|
|
bb365ca1b5 | ||
|
|
53b97b3ffa | ||
|
|
9c101eb8af | ||
|
|
c251270633 | ||
|
|
dfb1095908 | ||
|
|
cf2b415cd3 | ||
|
|
e9f3ad63c6 | ||
|
|
6a9bd22904 | ||
|
|
ebbf3bfa77 | ||
|
|
661aaead35 | ||
|
|
197671960d | ||
|
|
e434ac9a1f | ||
|
|
388ec2cd97 | ||
|
|
1d7589abb9 | ||
|
|
da47ba11f8 | ||
|
|
eaa572aafe | ||
|
|
d4ac9f8114 | ||
|
|
4bc2f4d26d | ||
|
|
7dd174642c | ||
|
|
89f3de169c | ||
|
|
bc04bb28fe | ||
|
|
9df8d319c3 | ||
|
|
3f55200737 | ||
|
|
204b029973 | ||
|
|
3fbe3a3078 | ||
|
|
8848203d90 | ||
|
|
8edd89c665 | ||
|
|
c78d623e93 | ||
|
|
b093fc8a55 | ||
|
|
7e01b2b3fb | ||
|
|
26eaaa8c79 | ||
|
|
8cd0e3bb05 | ||
|
|
e0f74690b0 | ||
|
|
8db7d73dc0 | ||
|
|
48ef5d74e8 | ||
|
|
18117af067 | ||
|
|
b418642359 | ||
|
|
6a2f62a084 | ||
|
|
1ec262becf | ||
|
|
6cd5ffe637 | ||
|
|
20b43d8ad1 | ||
|
|
f16143de7c | ||
|
|
daba43dcba | ||
|
|
7c74107f36 | ||
|
|
2e0bfaeb74 | ||
|
|
d31e03d04e | ||
|
|
8da5f26452 | ||
|
|
d6e22d5f5e | ||
|
|
c52f4d6307 | ||
|
|
77f1a8ecac | ||
|
|
a4348f1963 | ||
|
|
fe0434cb16 | ||
|
|
a6f992ce5b | ||
|
|
04fc5e51fc | ||
|
|
d430019b44 | ||
|
|
72493b7a8b | ||
|
|
0a828ee67f | ||
|
|
385454f3f1 | ||
|
|
f0ff4d087f | ||
|
|
89081ea169 | ||
|
|
e3d753da76 | ||
|
|
1e89aa7d92 | ||
|
|
f979d941b6 | ||
|
|
b1dffc9eec | ||
|
|
5424092189 | ||
|
|
bfa8f75d3b | ||
|
|
75d9e7bc59 | ||
|
|
c5a7eeaa0f | ||
|
|
4947c75ff7 | ||
|
|
d07a7f505b | ||
|
|
7ab5dac5c7 | ||
|
|
4fc082b526 | ||
|
|
2a0bc3b2e8 | ||
|
|
a7d79baf00 | ||
|
|
c9442653e9 | ||
|
|
969faf91fe | ||
|
|
8fe218676a | ||
|
|
f6b518ed49 | ||
|
|
c53c679c6b | ||
|
|
2d91daf858 | ||
|
|
a5b7145527 | ||
|
|
ee912c2b29 | ||
|
|
bdf9b33b10 | ||
|
|
167368d508 | ||
|
|
bb534225b3 | ||
|
|
ab69dacc6d | ||
|
|
8c2690e01f | ||
|
|
57db10a4c0 | ||
|
|
fc5c07027c | ||
|
|
5da8c26cb6 | ||
|
|
626fdfe345 | ||
|
|
c071503f19 | ||
|
|
873f7a591b | ||
|
|
e0f5f10215 | ||
|
|
11a949b78e | ||
|
|
81007f841a | ||
|
|
84f592c417 | ||
|
|
1e3b61a188 | ||
|
|
baeeedcc16 | ||
|
|
70763fd13b | ||
|
|
21c91a8408 | ||
|
|
07af7123c9 | ||
|
|
3fbb5f08b8 | ||
|
|
973f5b2591 | ||
|
|
64b5880444 | ||
|
|
81132bf0ed | ||
|
|
a9c23f205b | ||
|
|
801d1e9eda | ||
|
|
15977695cd | ||
|
|
e0d578920a | ||
|
|
b6c75c8d62 | ||
|
|
e4cb1dcbfd | ||
|
|
ceb8430f9b | ||
|
|
2565b47637 | ||
|
|
73eb98c4c3 | ||
|
|
8c7e8b7a6c | ||
|
|
894616bef9 | ||
|
|
1cfd6b67b3 | ||
|
|
5e4d8a1034 | ||
|
|
00785daac9 | ||
|
|
12bfc9570a |
2
.ackrc
@@ -1,2 +1,4 @@
|
||||
--ignore-dir=dist
|
||||
--ignore-file=match:/^yarn\.lock$/
|
||||
--ignore-file=match:/^yarn-error\.log$/
|
||||
--ignore-dir=coverage
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
engines:
|
||||
duplication:
|
||||
enabled: true
|
||||
config:
|
||||
languages:
|
||||
- javascript
|
||||
eslint:
|
||||
enabled: true
|
||||
ratings:
|
||||
paths:
|
||||
- "**.js"
|
||||
exclude_paths:
|
||||
- node_modules/
|
||||
- dist/
|
||||
1
.gitattributes
vendored
@@ -1 +0,0 @@
|
||||
*.js text eol=lf
|
||||
17
.gitignore
vendored
@@ -1,15 +1,10 @@
|
||||
node_modules
|
||||
bower_components
|
||||
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
|
||||
.DS_Store
|
||||
.idea
|
||||
coverage
|
||||
|
||||
test/tmp_*
|
||||
test/fixtures/samples/*.actual*
|
||||
node_modules/
|
||||
coverage/
|
||||
|
||||
dist/*.js
|
||||
dist/*.css
|
||||
dist/*.map
|
||||
|
||||
yarn-error.log
|
||||
.npmrc
|
||||
17
.travis.yml
@@ -1,19 +1,8 @@
|
||||
sudo: required
|
||||
dist: trusty
|
||||
addons:
|
||||
chrome: stable
|
||||
code_climate:
|
||||
repo_token: e87e6bf1c253e0555437ebd23235fdfe2749b889358e7c6d100e4ea5b4f2e091
|
||||
language: node_js
|
||||
node_js:
|
||||
- "8"
|
||||
before_install:
|
||||
- export DISPLAY=:99.0
|
||||
- sh -e /etc/init.d/xvfb start &
|
||||
- sleep 3
|
||||
before_script:
|
||||
- yarn build
|
||||
script:
|
||||
- yarn test
|
||||
after_script:
|
||||
- cat coverage/lcov.info | codeclimate
|
||||
- yarn test --coverage
|
||||
after_success:
|
||||
- cat ./coverage/lcov.info | ./node_modules/.bin/coveralls
|
||||
|
||||
7
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"typescript.format.enable": false,
|
||||
"typescript.reportStyleChecksAsWarnings": false,
|
||||
"typescript.validate.enable": false,
|
||||
"javascript.validate.enable": false,
|
||||
"editor.formatOnSave": false
|
||||
}
|
||||
@@ -22,6 +22,8 @@
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Adding weekend ignore do Gantt [\$314] (https://github.com/knsv/mermaid/issues/314)
|
||||
|
||||
- Adding init argument to the global API [\#137](https://github.com/knsv/mermaid/pull/137) ([bollwyvl](https://github.com/bollwyvl))
|
||||
|
||||
- Add description of manual calling of init [\#136](https://github.com/knsv/mermaid/pull/136) ([bollwyvl](https://github.com/bollwyvl))
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
# How to contribute
|
||||
|
||||
Great that you want to be involved in this project! Contributing is fun and contributions are GREAT! :)
|
||||
|
||||
This page is currently a starting point and is not so rigorous to start with.
|
||||
|
||||
Some important guidlines:
|
||||
|
||||
* The work will be organized using the issues list
|
||||
* In the list there will be the bugs/enhancements etc we are working with in the project
|
||||
* There will be milestones outlineing the roadmap ahead
|
||||
* There will issues marked with help wanted
|
||||
|
||||
The issue list and the items marked with **help wanted** is a good starting point if you want to do some work.
|
||||
|
||||
|
||||
## Guidelines for avoiding duplicate work
|
||||
|
||||
Contributing is great. It is not so fun when you are done with your issue and just before you're about to push your
|
||||
change you can't because someone else just pushed the same fix so you have wasted your time. The guidelines below are in
|
||||
place to prevent this:
|
||||
|
||||
* Comment in the issue that you are working on it. You will then be added as an assignee (eventually).
|
||||
* When you pick an issue to work on.
|
||||
* Check that the issue not assigned
|
||||
* Also check the comments so that no one has started working on it before beeing officially assigned.
|
||||
|
||||
|
||||
## Submitting changes
|
||||
|
||||
Please send a GitHub Pull Request with a clear list of what you've done (read more about pull requests). When you send
|
||||
a pull request, we will love you forever if you include jasmine tests. We can always use more test coverage.
|
||||
|
||||
Always write a clear log message for your commits. One-line messages are fine for small changes, but bigger changes should look like this:
|
||||
|
||||
$ git commit -m "A brief summary of the commit
|
||||
>
|
||||
> A paragraph describing what changed and its impact."
|
||||
Coding conventions
|
||||
Start reading our code and you'll get the hang of it. We optimize for readability:
|
||||
|
||||
This is open source software. Consider the people who will read your code, and make it look nice for them. It's sort of
|
||||
like driving a car: Perhaps you love doing donuts when you're alone, but with passengers the goal is to make the ride as
|
||||
smooth as possible.
|
||||
|
||||
So that we can consistently serve images from the CDN, always use image_path or image_tag when referring to images.
|
||||
Never prepend "/images/" when using image_path or image_tag.
|
||||
Also for the CDN, always use cwd-relative paths rather than root-relative paths in image URLs in any CSS. So instead of
|
||||
url('/images/blah.gif'), use url('../images/blah.gif').
|
||||
|
||||
# Build instructions
|
||||
|
||||
Fork, then:
|
||||
|
||||
```
|
||||
yarn install
|
||||
```
|
||||
|
||||
Then the dependencies will have been installed. You use gulp and yarn calls as build tools.
|
||||
|
||||
The following targets are probably interesting:
|
||||
|
||||
* jison - compiles the jison grammars to parser files
|
||||
|
||||
for instance:
|
||||
```
|
||||
gulp jison
|
||||
```
|
||||
|
||||
To build:
|
||||
|
||||
```
|
||||
yarn build
|
||||
```
|
||||
|
||||
To run the tests:
|
||||
|
||||
```
|
||||
yarn test
|
||||
```
|
||||
|
||||
Make sure you have Chrome browser installed. We use Chrome headless for testing.
|
||||
|
||||
Manual test:
|
||||
|
||||
```
|
||||
open dist/demo/index.html
|
||||
```
|
||||
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Knut Sveidqvist
|
||||
Copyright (c) 2014 - 2018 Knut Sveidqvist
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
208
README.md
@@ -1,10 +1,10 @@
|
||||
# mermaid
|
||||
|
||||
[](https://travis-ci.org/knsv/mermaid)
|
||||
[](https://codeclimate.com/github/knsv/mermaid)
|
||||
[](https://coveralls.io/github/knsv/mermaid?branch=master)
|
||||
[](https://gitter.im/knsv/mermaid?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||

|
||||

|
||||
|
||||
Generation of diagrams and flowcharts from text in a similar manner as markdown.
|
||||
|
||||
@@ -12,31 +12,39 @@ Ever wanted to simplify documentation and avoid heavy tools like Visio when expl
|
||||
|
||||
This is why mermaid was born, a simple markdown-like script language for generating charts from text via javascript.
|
||||
|
||||
The code below would render the following image
|
||||
<table>
|
||||
<tr><th>Code</th><th>Rendered diagram</th></tr>
|
||||
<tr>
|
||||
<td>
|
||||
<pre>
|
||||
<code>
|
||||
**Mermaid was nomiated and won the JS Open Source Awards (2019) in the catory The most existing use of technology!!! Thanks to all involved, people committing pull requests, people answering questions and special thanks to Tyler Long who is helping me maintin the project.**
|
||||
|
||||
### Are you someone who wants to take an active role in improving mermaid?
|
||||
|
||||
Look at the list of areas we need help with:
|
||||
|
||||
* Development - help solving issues
|
||||
* Development - work with the build environment, with JS we keep updating the tools we use
|
||||
* Development - new diagram types
|
||||
* Development - Handling Pull Requests
|
||||
* Test - testing in connection with realeases, regression testing
|
||||
* Test - verification of fixed issues
|
||||
* Test - test of pull requests and verification testing
|
||||
* Release management - more of a PL role, make roadmap for the project, coordinating the work
|
||||
* Release management - classification and monitoring of incoming issues
|
||||
|
||||
If you think lending a hand to one or more of these areas would be fun, please send an email tp knsv@sveido.com!
|
||||
|
||||
### Flowchart
|
||||
|
||||
```
|
||||
graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;
|
||||
</code>
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<p align="center">
|
||||
<img src='./img/flow.png' alt='Flowchart'>
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<pre>
|
||||
<code>
|
||||
```
|
||||

|
||||
|
||||
|
||||
### Sequence diagram
|
||||
|
||||
```
|
||||
sequenceDiagram
|
||||
participant Alice
|
||||
participant Bob
|
||||
@@ -44,92 +52,75 @@ sequenceDiagram
|
||||
loop Healthcheck
|
||||
John->>John: Fight against hypochondria
|
||||
end
|
||||
Note right of John: Rational thoughts <br/>prevail...
|
||||
Note right of John: Rational thoughts <br/>prevail!
|
||||
John-->>Alice: Great!
|
||||
John->>Bob: How about you?
|
||||
Bob-->>John: Jolly good!
|
||||
</code>
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<img src='./img/sequence.png' alt='Sequence Diagram'>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<pre>
|
||||
<code>
|
||||
```
|
||||

|
||||
|
||||
|
||||
### Gantt diagram
|
||||
|
||||
```
|
||||
gantt
|
||||
dateFormat YYYY-MM-DD
|
||||
title Adding GANTT diagram to mermaid
|
||||
excludes weekdays 2014-01-10
|
||||
|
||||
section A section
|
||||
Completed task :done, des1, 2014-01-06,2014-01-08
|
||||
Active task :active, des2, 2014-01-09, 3d
|
||||
Future task : des3, after des2, 5d
|
||||
Future task2 : des4, after des3, 5d
|
||||
</code>
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<img src='./img/gantt.png' alt='Gantt Diagram'>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<pre>
|
||||
<code>
|
||||
```
|
||||

|
||||
|
||||
|
||||
### Class diagram - :exclamation: experimental
|
||||
|
||||
```
|
||||
classDiagram
|
||||
Class01 <|-- AveryLongClass : Cool
|
||||
Class03 *-- Class04
|
||||
Class05 o-- Class06
|
||||
Class07 .. Class08
|
||||
Class09 --> C2 : Where am i?
|
||||
Class09 --* C3
|
||||
Class09 --|> Class07
|
||||
Class07 : equals()
|
||||
Class07 : Object[] elementData
|
||||
Class01 : size()
|
||||
Class01 : int chimp
|
||||
Class01 : int gorilla
|
||||
Class08 <--> C2: Cool label
|
||||
</code>
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<img src='./img/class.png' alt='Class Diagram'>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<pre>
|
||||
<code>
|
||||
gitGraph :
|
||||
Class01 <|-- AveryLongClass : Cool
|
||||
Class03 *-- Class04
|
||||
Class05 o-- Class06
|
||||
Class07 .. Class08
|
||||
Class09 --> C2 : Where am i?
|
||||
Class09 --* C3
|
||||
Class09 --|> Class07
|
||||
Class07 : equals()
|
||||
Class07 : Object[] elementData
|
||||
Class01 : size()
|
||||
Class01 : int chimp
|
||||
Class01 : int gorilla
|
||||
Class08 <--> C2: Cool label
|
||||
```
|
||||

|
||||
|
||||
|
||||
### Git graph - :exclamation: experimental
|
||||
|
||||
```
|
||||
gitGraph:
|
||||
options
|
||||
{
|
||||
"key": "value",
|
||||
"nodeWidth": 150,
|
||||
"nodeSpacing" : 150
|
||||
"nodeSpacing": 150,
|
||||
"nodeRadius": 10
|
||||
}
|
||||
end
|
||||
commit
|
||||
branch newbranch
|
||||
checkout newbranch
|
||||
commit
|
||||
commit
|
||||
checkout master
|
||||
commit
|
||||
commit
|
||||
merge newbranch
|
||||
</code>
|
||||
</pre>
|
||||
</td>
|
||||
<td>
|
||||
<img src='./img/git.png' alt='Git Graph'>
|
||||
</td>
|
||||
</tr>
|
||||
commit
|
||||
branch newbranch
|
||||
checkout newbranch
|
||||
commit
|
||||
commit
|
||||
checkout master
|
||||
commit
|
||||
commit
|
||||
merge newbranch
|
||||
|
||||
</table>
|
||||
```
|
||||
|
||||

|
||||
|
||||
|
||||
## Installation
|
||||
@@ -142,7 +133,7 @@ https://unpkg.com/mermaid@<version>/dist/
|
||||
|
||||
Replace `<version>` with expected version number.
|
||||
|
||||
Example: https://unpkg.com/mermaid@7.0.4/dist/
|
||||
Example: https://unpkg.com/mermaid@7.1.0/dist/
|
||||
|
||||
### Node.js
|
||||
|
||||
@@ -151,13 +142,17 @@ yarn add mermaid
|
||||
```
|
||||
|
||||
|
||||
## Further reading
|
||||
## Documentation
|
||||
|
||||
* [Usage](https://mermaidjs.github.io/usage.html)
|
||||
* [Flowchart syntax](https://mermaidjs.github.io/flowchart.html)
|
||||
* [Sequence diagram syntax](https://mermaidjs.github.io/sequenceDiagram.html)
|
||||
* [Mermaid CLI](https://mermaidjs.github.io/mermaidCLI.html)
|
||||
* [Demos](https://mermaidjs.github.io/demos.html)
|
||||
https://mermaidjs.github.io
|
||||
|
||||
|
||||
## Sibling projects
|
||||
|
||||
- [mermaid CLI](https://github.com/mermaidjs/mermaid.cli)
|
||||
- [mermaid live editor](https://github.com/mermaidjs/mermaid-live-editor)
|
||||
- [mermaid webpack demo](https://github.com/mermaidjs/mermaid-webpack-demo)
|
||||
- [mermaid Parcel demo](https://github.com/mermaidjs/mermaid-parcel-demo)
|
||||
|
||||
|
||||
# Request for assistance
|
||||
@@ -170,8 +165,7 @@ As part of this team you would get write access to the repository and would
|
||||
represent the project when answering questions and issues.
|
||||
|
||||
Together we could continue the work with things like:
|
||||
* port the code to es6
|
||||
* adding more typers of diagrams like mindmaps, ert digrams etc
|
||||
* adding more types of diagrams like mindmaps, ert diagrams etc
|
||||
* improving existing diagrams
|
||||
|
||||
Don't hesitate to contact me if you want to get involved.
|
||||
@@ -181,18 +175,12 @@ Don't hesitate to contact me if you want to get involved.
|
||||
|
||||
## Setup
|
||||
|
||||
Make sure you have Chrome browser installed, this project uses Chrome headless to running tests.
|
||||
|
||||
yarn install
|
||||
|
||||
|
||||
## Build
|
||||
|
||||
yarn build
|
||||
|
||||
If you want real time incremental build:
|
||||
|
||||
yarn watch
|
||||
yarn build:watch
|
||||
|
||||
|
||||
## Lint
|
||||
@@ -209,15 +197,13 @@ We recommend you installing [editor plugins](https://github.com/feross/standard#
|
||||
|
||||
Manual test in browser:
|
||||
|
||||
open dist/demo/index.html
|
||||
|
||||
Manual test in Node.js:
|
||||
|
||||
node dist/demo/index.js
|
||||
open dist/index.html
|
||||
|
||||
|
||||
## Release
|
||||
|
||||
For those who have the permission to do so:
|
||||
|
||||
Update version number in `package.json`.
|
||||
|
||||
npm publish
|
||||
|
||||
3
__mocks__/MERMAID.js
Normal file
@@ -0,0 +1,3 @@
|
||||
export const curveBasis = 'basis'
|
||||
export const curveLinear = 'linear'
|
||||
export const curveCardinal = 'cardinal'
|
||||
38
__mocks__/d3.js
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
let NewD3 = function () {
|
||||
return {
|
||||
append: function () {
|
||||
return NewD3()
|
||||
},
|
||||
attr: function () {
|
||||
return this
|
||||
},
|
||||
style: function () {
|
||||
return this
|
||||
},
|
||||
text: function () {
|
||||
return this
|
||||
},
|
||||
0: {
|
||||
0: {
|
||||
getBBox: function () {
|
||||
return {
|
||||
height: 10,
|
||||
width: 20
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const select = function () {
|
||||
return new NewD3()
|
||||
}
|
||||
|
||||
export const selectAll = function () {
|
||||
return new NewD3()
|
||||
}
|
||||
|
||||
export const curveBasis = 'basis'
|
||||
export const curveLinear = 'linear'
|
||||
export const curveCardinal = 'cardinal'
|
||||
12
babel.config.js
Normal file
@@ -0,0 +1,12 @@
|
||||
module.exports = {
|
||||
presets: [
|
||||
[
|
||||
'@babel/preset-env',
|
||||
{
|
||||
targets: {
|
||||
node: 'current'
|
||||
}
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
var chalk = require('chalk')
|
||||
var cli = require('../lib/cli.js')
|
||||
var lib = require('../lib')
|
||||
|
||||
cli.parse(process.argv.slice(2), function (err, message, options) {
|
||||
if (err) {
|
||||
console.error(
|
||||
chalk.bold.red('\nYou had errors in your syntax. Use --help for further information.')
|
||||
)
|
||||
err.forEach(function (e) {
|
||||
console.error(e.message)
|
||||
})
|
||||
|
||||
return
|
||||
} else if (message) {
|
||||
console.log(message)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
lib.process(options.files, options, process.exit)
|
||||
})
|
||||
38
bower.json
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"name": "mermaid",
|
||||
"authors": [
|
||||
"knsv <knut@sveido.com>",
|
||||
"Tyler Long (https://github.com/tylerlong)"
|
||||
],
|
||||
"description": "Markdownish syntax for generating flowcharts, sequence diagrams, class diagrams, gantt charts and git graphs.",
|
||||
"main": "dist/mermaid.slim.js",
|
||||
"keywords": [
|
||||
"diagram",
|
||||
"markdown",
|
||||
"flowchart",
|
||||
"sequence diagram",
|
||||
"gantt",
|
||||
"class diagram",
|
||||
"git graph"
|
||||
],
|
||||
"license": "MIT",
|
||||
"ignore": [
|
||||
"*",
|
||||
"!dist/",
|
||||
"!dist/*",
|
||||
"!LICENSE"
|
||||
],
|
||||
"dependencies": {
|
||||
"chalk": "^2.1.0",
|
||||
"d3": "3.5.17",
|
||||
"dagre": "^0.7.4",
|
||||
"dagre-d3-renderer": "^0.1.6",
|
||||
"he": "^1.1.1",
|
||||
"lodash": "^4.17.4",
|
||||
"minimist": "^1.2.0",
|
||||
"mkdirp": "^0.5.1",
|
||||
"moment": "^2.18.1",
|
||||
"semver": "^5.4.1",
|
||||
"which": "^1.3.0"
|
||||
}
|
||||
}
|
||||
106
dist/demo/index.html
vendored
@@ -1,106 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<title>Mermaid Quick Test Page</title>
|
||||
<link rel="icon" type="image/png" href="data:image/png;base64,iVBORw0KGgo=">
|
||||
<link rel="stylesheet" href="../mermaid.css">
|
||||
</head>
|
||||
<body>
|
||||
<div class="mermaid">
|
||||
graph TD
|
||||
A[Christmas] -->|Get money| B(Go shopping)
|
||||
B --> C{Let me think}
|
||||
C -->|One| D[Laptop]
|
||||
C -->|Two| E[iPhone]
|
||||
C -->|Three| F[Car]
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
|
||||
<div class="mermaid">
|
||||
sequenceDiagram
|
||||
loop every day
|
||||
Alice->>John: Hello John, how are you?
|
||||
John-->>Alice: Great!
|
||||
end
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
|
||||
<div class="mermaid">
|
||||
gantt
|
||||
dateFormat YYYY-MM-DD
|
||||
title Adding GANTT diagram to mermaid
|
||||
|
||||
section A section
|
||||
Completed task :done, des1, 2014-01-06,2014-01-08
|
||||
Active task :active, des2, 2014-01-09, 3d
|
||||
Future task : des3, after des2, 5d
|
||||
Future task2 : des4, after des3, 5d
|
||||
|
||||
section Critical tasks
|
||||
Completed task in the critical line :crit, done, 2014-01-06,24h
|
||||
Implement parser and jison :crit, done, after des1, 2d
|
||||
Create tests for parser :crit, active, 3d
|
||||
Future task in critical line :crit, 5d
|
||||
Create tests for renderer :2d
|
||||
Add to mermaid :1d
|
||||
|
||||
section Documentation
|
||||
Describe gantt syntax :active, a1, after des1, 3d
|
||||
Add gantt diagram to demo page :after a1 , 20h
|
||||
Add another diagram to demo page :doc1, after a1 , 48h
|
||||
|
||||
section Last section
|
||||
Describe gantt syntax :after doc1, 3d
|
||||
Add gantt diagram to demo page : 20h
|
||||
Add another diagram to demo page : 48h
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
|
||||
<div class="mermaid">
|
||||
gitGraph:
|
||||
options
|
||||
{
|
||||
"nodeSpacing": 150,
|
||||
"nodeRadius": 10
|
||||
}
|
||||
end
|
||||
commit
|
||||
branch newbranch
|
||||
checkout newbranch
|
||||
commit
|
||||
commit
|
||||
checkout master
|
||||
commit
|
||||
commit
|
||||
merge newbranch
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
|
||||
<div class="mermaid">
|
||||
classDiagram
|
||||
Class01 <|-- AveryLongClass : Cool
|
||||
Class03 *-- Class04
|
||||
Class05 o-- Class06
|
||||
Class07 .. Class08
|
||||
Class09 --> C2 : Where am i?
|
||||
Class09 --* C3
|
||||
Class09 --|> Class07
|
||||
Class07 : equals()
|
||||
Class07 : Object[] elementData
|
||||
Class01 : size()
|
||||
Class01 : int chimp
|
||||
Class01 : int gorilla
|
||||
Class08 <--> C2: Cool label
|
||||
</div>
|
||||
<script src="../mermaid.js"></script>
|
||||
<script>
|
||||
mermaid.initialize({startOnLoad: true});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
9
dist/demo/index.js
vendored
@@ -1,9 +0,0 @@
|
||||
const api = require('../mermaidAPI.js')
|
||||
|
||||
const r = api.parse(`sequenceDiagram
|
||||
loop every day
|
||||
Alice->>John: Hello John, how are you?
|
||||
John-->>Alice: Great!
|
||||
end`)
|
||||
|
||||
console.log(r)
|
||||
401
dist/index.html
vendored
Normal file
@@ -0,0 +1,401 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<title>Mermaid Quick Test Page</title>
|
||||
<link rel="icon" type="image/png" href="data:image/png;base64,iVBORw0KGgo=">
|
||||
</head>
|
||||
<body>
|
||||
<div class="mermaid">
|
||||
graph LR
|
||||
sid-B3655226-6C29-4D00-B685-3D5C734DC7E1["
|
||||
|
||||
提交申请
|
||||
熊大
|
||||
"];
|
||||
class sid-B3655226-6C29-4D00-B685-3D5C734DC7E1 node-executed;
|
||||
sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A["
|
||||
负责人审批
|
||||
强子
|
||||
"];
|
||||
class sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A node-executed;
|
||||
sid-E27C0367-E6D6-497F-9736-3CDC21FDE221["
|
||||
DBA审批
|
||||
强子
|
||||
"];
|
||||
class sid-E27C0367-E6D6-497F-9736-3CDC21FDE221 node-executed;
|
||||
sid-BED98281-9585-4D1B-934E-BD1AC6AC0EFD["
|
||||
SA审批
|
||||
阿美
|
||||
"];
|
||||
class sid-BED98281-9585-4D1B-934E-BD1AC6AC0EFD node-executed;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7["
|
||||
主管审批
|
||||
光头强
|
||||
"];
|
||||
class sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7 node-executed;
|
||||
sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89["
|
||||
DBA确认
|
||||
强子
|
||||
"];
|
||||
class sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89 node-executed;
|
||||
sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937["
|
||||
SA确认
|
||||
阿美
|
||||
"];
|
||||
class sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937 node-executed;
|
||||
sid-4FC27B48-A6F9-460A-A675-021F5854FE22["
|
||||
结束
|
||||
"];
|
||||
class sid-4FC27B48-A6F9-460A-A675-021F5854FE22 node-executed;
|
||||
sid-19DD9E9F-98C1-44EE-B604-842AFEE76F1E["
|
||||
SA执行1
|
||||
强子
|
||||
"];
|
||||
class sid-19DD9E9F-98C1-44EE-B604-842AFEE76F1E node-executed;
|
||||
sid-6C2120F3-D940-4958-A067-0903DCE879C4["
|
||||
SA执行2
|
||||
强子
|
||||
"];
|
||||
class sid-6C2120F3-D940-4958-A067-0903DCE879C4 node-executed;
|
||||
sid-9180E2A0-5C4B-435F-B42F-0D152470A338["
|
||||
DBA执行1
|
||||
强子
|
||||
"];
|
||||
class sid-9180E2A0-5C4B-435F-B42F-0D152470A338 node-executed;
|
||||
sid-03A2C3AC-5337-48A5-B154-BB3FD0EC8DAD["
|
||||
DBA执行3
|
||||
强子
|
||||
"];
|
||||
class sid-03A2C3AC-5337-48A5-B154-BB3FD0EC8DAD node-executed;
|
||||
sid-D5E1F2F4-306C-47A2-BF74-F66E3D769756["
|
||||
DBA执行2
|
||||
强子
|
||||
"];
|
||||
class sid-D5E1F2F4-306C-47A2-BF74-F66E3D769756 node-executed;
|
||||
sid-8C3F2F1D-F014-4F99-B966-095DC1A2BD93["
|
||||
DBA执行4
|
||||
强子
|
||||
"];
|
||||
class sid-8C3F2F1D-F014-4F99-B966-095DC1A2BD93 node-executed;
|
||||
sid-1897B30A-9C5C-4D5B-B80B-76A038785070["
|
||||
负责人确认
|
||||
梁静茹
|
||||
"];
|
||||
class sid-1897B30A-9C5C-4D5B-B80B-76A038785070 node-executed;
|
||||
sid-B3655226-6C29-4D00-B685-3D5C734DC7E1-->sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7;
|
||||
sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A-->sid-1897B30A-9C5C-4D5B-B80B-76A038785070;
|
||||
sid-E27C0367-E6D6-497F-9736-3CDC21FDE221-->sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89;
|
||||
sid-BED98281-9585-4D1B-934E-BD1AC6AC0EFD-->sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937;
|
||||
sid-19DD9E9F-98C1-44EE-B604-842AFEE76F1E-->sid-6C2120F3-D940-4958-A067-0903DCE879C4;
|
||||
sid-9180E2A0-5C4B-435F-B42F-0D152470A338-->sid-D5E1F2F4-306C-47A2-BF74-F66E3D769756;
|
||||
sid-03A2C3AC-5337-48A5-B154-BB3FD0EC8DAD-->sid-8C3F2F1D-F014-4F99-B966-095DC1A2BD93;
|
||||
sid-6C2120F3-D940-4958-A067-0903DCE879C4-->sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A;
|
||||
sid-1897B30A-9C5C-4D5B-B80B-76A038785070-->sid-4FC27B48-A6F9-460A-A675-021F5854FE22;
|
||||
sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937-->sid-19DD9E9F-98C1-44EE-B604-842AFEE76F1E;
|
||||
sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89-->sid-9180E2A0-5C4B-435F-B42F-0D152470A338;
|
||||
sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89-->sid-03A2C3AC-5337-48A5-B154-BB3FD0EC8DAD;
|
||||
sid-D5E1F2F4-306C-47A2-BF74-F66E3D769756-->sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A;
|
||||
sid-8C3F2F1D-F014-4F99-B966-095DC1A2BD93-->sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7-->sid-BED98281-9585-4D1B-934E-BD1AC6AC0EFD;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7-->sid-E27C0367-E6D6-497F-9736-3CDC21FDE221;
|
||||
sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937-->sid-6C2120F3-D940-4958-A067-0903DCE879C4;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7-->sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7-->sid-4FC27B48-A6F9-460A-A675-021F5854FE22;
|
||||
</div>
|
||||
<div class="mermaid">
|
||||
graph TD
|
||||
A[Christmas] -->|Get money| B(Go shopping)
|
||||
B --> C{Let me thinksssss<br/>ssssssssssssssssssssss<br/>sssssssssssssssssssssssssss}
|
||||
C -->|One| D[Laptop]
|
||||
C -->|Two| E[iPhone]
|
||||
C -->|Three| F[Car]
|
||||
</div>
|
||||
<div class="mermaid">
|
||||
graph LR
|
||||
47(SAM.CommonFA.FMESummary)-->48(SAM.CommonFA.CommonFAFinanceBudget)
|
||||
37(SAM.CommonFA.BudgetSubserviceLineVolume)-->48(SAM.CommonFA.CommonFAFinanceBudget)
|
||||
35(SAM.CommonFA.PopulationFME)-->47(SAM.CommonFA.FMESummary)
|
||||
41(SAM.CommonFA.MetricCost)-->47(SAM.CommonFA.FMESummary)
|
||||
44(SAM.CommonFA.MetricOutliers)-->47(SAM.CommonFA.FMESummary)
|
||||
46(SAM.CommonFA.MetricOpportunity)-->47(SAM.CommonFA.FMESummary)
|
||||
40(SAM.CommonFA.OPVisits)-->47(SAM.CommonFA.FMESummary)
|
||||
38(SAM.CommonFA.CommonFAFinanceRefund)-->47(SAM.CommonFA.FMESummary)
|
||||
43(SAM.CommonFA.CommonFAFinancePicuDays)-->47(SAM.CommonFA.FMESummary)
|
||||
42(SAM.CommonFA.CommonFAFinanceNurseryDays)-->47(SAM.CommonFA.FMESummary)
|
||||
45(SAM.CommonFA.MetricPreOpportunity)-->46(SAM.CommonFA.MetricOpportunity)
|
||||
35(SAM.CommonFA.PopulationFME)-->45(SAM.CommonFA.MetricPreOpportunity)
|
||||
41(SAM.CommonFA.MetricCost)-->45(SAM.CommonFA.MetricPreOpportunity)
|
||||
41(SAM.CommonFA.MetricCost)-->44(SAM.CommonFA.MetricOutliers)
|
||||
39(SAM.CommonFA.ChargeDetails)-->43(SAM.CommonFA.CommonFAFinancePicuDays)
|
||||
39(SAM.CommonFA.ChargeDetails)-->42(SAM.CommonFA.CommonFAFinanceNurseryDays)
|
||||
39(SAM.CommonFA.ChargeDetails)-->41(SAM.CommonFA.MetricCost)
|
||||
39(SAM.CommonFA.ChargeDetails)-->40(SAM.CommonFA.OPVisits)
|
||||
35(SAM.CommonFA.PopulationFME)-->39(SAM.CommonFA.ChargeDetails)
|
||||
36(SAM.CommonFA.PremetricCost)-->39(SAM.CommonFA.ChargeDetails)
|
||||
</div>
|
||||
<div class="mermaid">
|
||||
graph TD
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002("fa:fa-creative-commons My System | Test Environment")
|
||||
82072290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Shared Business Logic Server:Service 1")
|
||||
db052290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Shared Business Logic Server:Service 2")
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Shared Report Server:Service 1")
|
||||
30122290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Shared Report Server:Service 2")
|
||||
5e112290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Dedicated Test Business Logic Server:Service 1")
|
||||
c1112290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Dedicated Test Business Logic Server:Service 2")
|
||||
b7042290_1ec3_e711_8c5a_005056ad0002("fa:fa-circle [DBServer\SharedDbInstance].[SupportDb]")
|
||||
8f102290_1ec3_e711_8c5a_005056ad0002("fa:fa-circle [DBServer\SharedDbInstance].[DevelopmentDb]")
|
||||
0e102290_1ec3_e711_8c5a_005056ad0002("fa:fa-circle [DBServer\SharedDbInstance].[TestDb]")
|
||||
07132290_1ec3_e711_8c5a_005056ad0002("fa:fa-circle [DBServer\SharedDbInstance].[SharedReportingDb]")
|
||||
c7072290_1ec3_e711_8c5a_005056ad0002("fa:fa-server Shared Business Logic Server")
|
||||
ca122290_1ec3_e711_8c5a_005056ad0002("fa:fa-server Shared Report Server")
|
||||
68102290_1ec3_e711_8c5a_005056ad0002("fa:fa-server Dedicated Test Business Logic Server")
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002("fa:fa-database [DBServer\SharedDbInstance]")
|
||||
d6072290_1ec3_e711_8c5a_005056ad0002("fa:fa-server DBServer")
|
||||
71082290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs DBServer\:MSSQLSERVER")
|
||||
c0102290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs DBServer\:SQLAgent")
|
||||
9a072290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs DBServer\:SQLBrowser")
|
||||
1d0a2290_1ec3_e711_8c5a_005056ad0002("fa:fa-server VmHost1")
|
||||
200a2290_1ec3_e711_8c5a_005056ad0002("fa:fa-server VmHost2")
|
||||
1c0a2290_1ec3_e711_8c5a_005056ad0002("fa:fa-server VmHost3")
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->82072290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->db052290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->4e112290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->30122290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->5e112290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->c1112290_1ec3_e711_8c5a_005056ad0002
|
||||
82072290_1ec3_e711_8c5a_005056ad0002-->b7042290_1ec3_e711_8c5a_005056ad0002
|
||||
82072290_1ec3_e711_8c5a_005056ad0002-->8f102290_1ec3_e711_8c5a_005056ad0002
|
||||
82072290_1ec3_e711_8c5a_005056ad0002-->0e102290_1ec3_e711_8c5a_005056ad0002
|
||||
82072290_1ec3_e711_8c5a_005056ad0002-->c7072290_1ec3_e711_8c5a_005056ad0002
|
||||
db052290_1ec3_e711_8c5a_005056ad0002-->c7072290_1ec3_e711_8c5a_005056ad0002
|
||||
db052290_1ec3_e711_8c5a_005056ad0002-->82072290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->b7042290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->8f102290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->0e102290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->07132290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->ca122290_1ec3_e711_8c5a_005056ad0002
|
||||
30122290_1ec3_e711_8c5a_005056ad0002-->ca122290_1ec3_e711_8c5a_005056ad0002
|
||||
30122290_1ec3_e711_8c5a_005056ad0002-->4e112290_1ec3_e711_8c5a_005056ad0002
|
||||
5e112290_1ec3_e711_8c5a_005056ad0002-->8f102290_1ec3_e711_8c5a_005056ad0002
|
||||
5e112290_1ec3_e711_8c5a_005056ad0002-->68102290_1ec3_e711_8c5a_005056ad0002
|
||||
c1112290_1ec3_e711_8c5a_005056ad0002-->68102290_1ec3_e711_8c5a_005056ad0002
|
||||
c1112290_1ec3_e711_8c5a_005056ad0002-->5e112290_1ec3_e711_8c5a_005056ad0002
|
||||
b7042290_1ec3_e711_8c5a_005056ad0002-->f4112290_1ec3_e711_8c5a_005056ad0002
|
||||
8f102290_1ec3_e711_8c5a_005056ad0002-->f4112290_1ec3_e711_8c5a_005056ad0002
|
||||
0e102290_1ec3_e711_8c5a_005056ad0002-->f4112290_1ec3_e711_8c5a_005056ad0002
|
||||
07132290_1ec3_e711_8c5a_005056ad0002-->f4112290_1ec3_e711_8c5a_005056ad0002
|
||||
c7072290_1ec3_e711_8c5a_005056ad0002-->1d0a2290_1ec3_e711_8c5a_005056ad0002
|
||||
ca122290_1ec3_e711_8c5a_005056ad0002-->200a2290_1ec3_e711_8c5a_005056ad0002
|
||||
68102290_1ec3_e711_8c5a_005056ad0002-->1c0a2290_1ec3_e711_8c5a_005056ad0002
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002-->d6072290_1ec3_e711_8c5a_005056ad0002
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002-->71082290_1ec3_e711_8c5a_005056ad0002
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002-->c0102290_1ec3_e711_8c5a_005056ad0002
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002-->9a072290_1ec3_e711_8c5a_005056ad0002
|
||||
d6072290_1ec3_e711_8c5a_005056ad0002-->1c0a2290_1ec3_e711_8c5a_005056ad0002
|
||||
71082290_1ec3_e711_8c5a_005056ad0002-->d6072290_1ec3_e711_8c5a_005056ad0002
|
||||
c0102290_1ec3_e711_8c5a_005056ad0002-->d6072290_1ec3_e711_8c5a_005056ad0002
|
||||
c0102290_1ec3_e711_8c5a_005056ad0002-->71082290_1ec3_e711_8c5a_005056ad0002
|
||||
9a072290_1ec3_e711_8c5a_005056ad0002-->d6072290_1ec3_e711_8c5a_005056ad0002
|
||||
9a072290_1ec3_e711_8c5a_005056ad0002-->71082290_1ec3_e711_8c5a_005056ad0002
|
||||
</div>
|
||||
<div class="mermaid">
|
||||
graph TB
|
||||
subgraph One
|
||||
a1-->a2
|
||||
end
|
||||
</div>
|
||||
<div class="mermaid">
|
||||
graph TB
|
||||
A
|
||||
B
|
||||
subgraph foo[Foo SubGraph]
|
||||
C
|
||||
D
|
||||
end
|
||||
subgraph bar[Bar SubGraph]
|
||||
E
|
||||
F
|
||||
end
|
||||
G
|
||||
|
||||
A-->B
|
||||
B-->C
|
||||
C-->D
|
||||
B-->D
|
||||
D-->E
|
||||
E-->A
|
||||
E-->F
|
||||
F-->D
|
||||
F-->G
|
||||
B-->G
|
||||
G-->D
|
||||
|
||||
style foo fill:#F99,stroke-width:2px,stroke:#F0F
|
||||
style bar fill:#999,stroke-width:10px,stroke:#0F0
|
||||
</div>
|
||||
<div class="mermaid">
|
||||
graph LR
|
||||
456ac9b0d15a8b7f1e71073221059886[1051 AAA fa:fa-check]
|
||||
f7f580e11d00a75814d2ded41fe8e8fe[1141 BBB fa:fa-check]
|
||||
81dc9bdb52d04dc20036dbd8313ed055[1234 CCC fa:fa-check]
|
||||
456ac9b0d15a8b7f1e71073221059886 -->|Node| f7f580e11d00a75814d2ded41fe8e8fe
|
||||
f7f580e11d00a75814d2ded41fe8e8fe -->|Node| 81dc9bdb52d04dc20036dbd8313ed055
|
||||
click 456ac9b0d15a8b7f1e71073221059886 "/admin/user/view?id=1051" "AAA
|
||||
6000"
|
||||
click f7f580e11d00a75814d2ded41fe8e8fe "/admin/user/view?id=1141" "BBB
|
||||
600"
|
||||
click 81dc9bdb52d04dc20036dbd8313ed055 "/admin/user/view?id=1234" "CCC
|
||||
3000"
|
||||
style 456ac9b0d15a8b7f1e71073221059886 fill:#f9f,stroke:#333,stroke-width:4px
|
||||
</div>
|
||||
<div class="mermaid">
|
||||
graph TD
|
||||
A[Christmas] -->|Get money| B(Go shopping)
|
||||
B --> C{Let me think}
|
||||
C -->|One| D[Laptop]
|
||||
C -->|Two| E[iPhone]
|
||||
C -->|Three| F[Car]
|
||||
click A "index.html#link-clicked" "link test"
|
||||
click B testClick "click test"
|
||||
classDef someclass fill:#f96;
|
||||
class A someclass;
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
|
||||
<div class="mermaid">
|
||||
sequenceDiagram
|
||||
participant Alice
|
||||
participant Bob
|
||||
participant John as John<br/>Second Line
|
||||
Alice ->> Bob: Hello Bob, how are you?
|
||||
Bob-->>John: How about you John?
|
||||
Bob--x Alice: I am good thanks!
|
||||
Bob-x John: I am good thanks!
|
||||
Note right of John: Bob thinks a long<br/>long time, so long<br/>that the text does<br/>not fit on a row.
|
||||
Bob-->Alice: Checking with John...
|
||||
alt either this
|
||||
Alice->>John: Yes
|
||||
else or this
|
||||
Alice->>John: No
|
||||
else or this will happen
|
||||
Alice->John: Maybe
|
||||
end
|
||||
par this happens in parallel
|
||||
Alice -->> Bob: Parallel message 1
|
||||
and
|
||||
Alice -->> John: Parallel message 2
|
||||
end
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
|
||||
<div class="mermaid">
|
||||
gantt
|
||||
dateFormat YYYY-MM-DD
|
||||
axisFormat %d/%m
|
||||
title Adding GANTT diagram to mermaid
|
||||
excludes weekdays 2014-01-10
|
||||
|
||||
section A section
|
||||
Completed task :done, des1, 2014-01-06,2014-01-08
|
||||
Active task :active, des2, 2014-01-09, 3d
|
||||
Future task : des3, after des2, 5d
|
||||
Future task2 : des4, after des3, 5d
|
||||
|
||||
section Critical tasks
|
||||
Completed task in the critical line :crit, done, 2014-01-06,24h
|
||||
Implement parser and jison :crit, done, after des1, 2d
|
||||
Create tests for parser :crit, active, 3d
|
||||
Future task in critical line :crit, 5d
|
||||
Create tests for renderer :2d
|
||||
Add to mermaid :1d
|
||||
|
||||
section Documentation
|
||||
Describe gantt syntax :active, a1, after des1, 3d
|
||||
Add gantt diagram to demo page :after a1 , 20h
|
||||
Add another diagram to demo page :doc1, after a1 , 48h
|
||||
|
||||
section Clickable
|
||||
Visit mermaidjs :active, cl1, 2014-01-07,2014-01-10
|
||||
Calling a Callback (look at the console log) :cl2, after cl1, 3d
|
||||
|
||||
click cl1 href "https://mermaidjs.github.io/"
|
||||
click cl2 call ganttTestClick("test", test, test)
|
||||
|
||||
section Last section
|
||||
Describe gantt syntax :after doc1, 3d
|
||||
Add gantt diagram to demo page : 20h
|
||||
Add another diagram to demo page : 48h
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
|
||||
<div class="mermaid">
|
||||
gitGraph:
|
||||
options
|
||||
{
|
||||
"nodeSpacing": 150,
|
||||
"nodeRadius": 10
|
||||
}
|
||||
end
|
||||
commit
|
||||
branch newbranch
|
||||
checkout newbranch
|
||||
commit
|
||||
commit
|
||||
checkout master
|
||||
commit
|
||||
commit
|
||||
merge newbranch
|
||||
</div>
|
||||
|
||||
<hr/>
|
||||
|
||||
<div class="mermaid">
|
||||
classDiagram
|
||||
Class01 <|-- AveryLongClass : Cool
|
||||
Class03 "0" *-- "0..n" Class04
|
||||
Class05 "1" o-- "many" Class06
|
||||
Class07 .. Class08
|
||||
Class09 "many" --> "1" C2 : Where am i?
|
||||
Class09 "0" --* "1..n" C3
|
||||
Class09 --|> Class07
|
||||
Class07 : equals()
|
||||
Class07 : Object[] elementData
|
||||
Class01 : size()
|
||||
Class01 : int chimp
|
||||
Class01 : int gorilla
|
||||
Class08 <--> C2: Cool label
|
||||
</div>
|
||||
<script src="./mermaid.js"></script>
|
||||
<script>
|
||||
mermaid.initialize({
|
||||
theme: 'forest',
|
||||
// themeCSS: '.node rect { fill: red; }',
|
||||
logLevel: 3,
|
||||
flowchart: { curve: 'linear' },
|
||||
gantt: { axisFormat: '%m/%d/%Y' },
|
||||
sequence: { actorMargin: 50 },
|
||||
// sequenceDiagram: { actorMargin: 300 } // deprecated
|
||||
});
|
||||
</script>
|
||||
<script>
|
||||
function ganttTestClick(a, b, c){
|
||||
console.log("a:", a)
|
||||
console.log("b:", b)
|
||||
console.log("c:", c)
|
||||
}
|
||||
function testClick(nodeId) {
|
||||
console.log("clicked", nodeId)
|
||||
var originalBgColor = document.querySelector('body').style.backgroundColor
|
||||
document.querySelector('body').style.backgroundColor = 'yellow'
|
||||
setTimeout(function() {
|
||||
document.querySelector('body').style.backgroundColor = originalBgColor
|
||||
}, 100)
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
41
dist/info.html
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<title>Mermaid Quick Test Page</title>
|
||||
<link rel="icon" type="image/png" href="data:image/png;base64,iVBORw0KGgo=">
|
||||
</head>
|
||||
<body>
|
||||
<div class="mermaid">info
|
||||
showInfo
|
||||
</div>
|
||||
<script src="./mermaid.js"></script>
|
||||
<script>
|
||||
mermaid.initialize({
|
||||
theme: 'forest',
|
||||
// themeCSS: '.node rect { fill: red; }',
|
||||
logLevel: 1,
|
||||
flowchart: { curve: 'linear' },
|
||||
gantt: { axisFormat: '%m/%d/%Y' },
|
||||
sequence: { actorMargin: 50 },
|
||||
// sequenceDiagram: { actorMargin: 300 } // deprecated
|
||||
});
|
||||
</script>
|
||||
<script>
|
||||
function ganttTestClick(a, b, c){
|
||||
console.log("a:", a)
|
||||
console.log("b:", b)
|
||||
console.log("c:", c)
|
||||
}
|
||||
function testClick(nodeId) {
|
||||
console.log("clicked", nodeId)
|
||||
var originalBgColor = document.querySelector('body').style.backgroundColor
|
||||
document.querySelector('body').style.backgroundColor = 'yellow'
|
||||
setTimeout(function() {
|
||||
document.querySelector('body').style.backgroundColor = originalBgColor
|
||||
}, 100)
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
9
e2e/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# End to end tests
|
||||
|
||||
These tests are end to end tests in the sense that they actually render a full diagram in the browser. The purpose of these tests is to simplify handling of merge requests and releases by highlighting possible unexpected side-effects.
|
||||
|
||||
Apart from beeing rendered in a browser the tests perform image snapshots of the diagrams. The tests is handled in the same way as regular jest snapshots tests with the difference that an image comparison is performed instead of a comparison of the generated code.
|
||||
|
||||
## To run the tests
|
||||
1. Start the dev server by running **yarn dev**
|
||||
2. Run yarn e2e to run the tests
|
||||
26
e2e/helpers/util.js
Normal file
@@ -0,0 +1,26 @@
|
||||
/* eslint-env jest */
|
||||
import { Base64 } from 'js-base64'
|
||||
|
||||
export const mermaidUrl = (graphStr, options) => {
|
||||
const obj = {
|
||||
code: graphStr,
|
||||
mermaid: options
|
||||
}
|
||||
const objStr = JSON.stringify(obj)
|
||||
// console.log(Base64)
|
||||
return 'http://localhost:9000/e2e.html?graph=' + Base64.encodeURI(objStr)
|
||||
}
|
||||
|
||||
export const imgSnapshotTest = async (page, graphStr, options) => {
|
||||
return new Promise(async resolve => {
|
||||
const url = mermaidUrl(graphStr, options)
|
||||
|
||||
await page.goto(url)
|
||||
|
||||
const image = await page.screenshot()
|
||||
|
||||
expect(image).toMatchImageSnapshot()
|
||||
resolve()
|
||||
})
|
||||
// page.close()
|
||||
}
|
||||
11
e2e/jest.config.js
Normal file
@@ -0,0 +1,11 @@
|
||||
// jest.config.js
|
||||
module.exports = {
|
||||
// verbose: true,
|
||||
transform: {
|
||||
'^.+\\.jsx?$': '../transformer.js'
|
||||
},
|
||||
preset: 'jest-puppeteer',
|
||||
'globalSetup': 'jest-environment-puppeteer/setup',
|
||||
'globalTeardown': 'jest-environment-puppeteer/teardown',
|
||||
'testEnvironment': 'jest-environment-puppeteer'
|
||||
}
|
||||
10
e2e/platform/bundle-test.js
Normal file
@@ -0,0 +1,10 @@
|
||||
import mermaid from '../../dist/mermaid.core'
|
||||
|
||||
mermaid.initialize({
|
||||
theme: 'forest',
|
||||
gantt: { axisFormatter: [
|
||||
['%Y-%m-%d', (d) => {
|
||||
return d.getDay() === 1
|
||||
}]
|
||||
] }
|
||||
})
|
||||
19
e2e/platform/e2e.html
Normal file
@@ -0,0 +1,19 @@
|
||||
<html>
|
||||
<head>
|
||||
<script src="/e2e.js"></script>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css?family=Montserrat&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<style></style>
|
||||
</head>
|
||||
<body>
|
||||
<script src="./mermaid.js"></script>
|
||||
<script>
|
||||
mermaid.initialize({
|
||||
startOnLoad: false,
|
||||
useMaxWidth: true,
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
37
e2e/platform/viewer.js
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Base64 } from 'js-base64'
|
||||
|
||||
/**
|
||||
* ##contentLoaded
|
||||
* Callback function that is called when page is loaded. This functions fetches configuration for mermaid rendering and
|
||||
* calls init for rendering the mermaid diagrams on the page.
|
||||
*/
|
||||
const contentLoaded = function () {
|
||||
let pos = document.location.href.indexOf('?graph=')
|
||||
if (pos > 0) {
|
||||
pos = pos + 7
|
||||
const graphBase64 = document.location.href.substr(pos)
|
||||
const graphObj = JSON.parse(Base64.decode(graphBase64))
|
||||
// const graph = 'hello'
|
||||
console.log(graphObj)
|
||||
const div = document.createElement('div')
|
||||
div.id = 'block'
|
||||
div.className = 'mermaid'
|
||||
div.innerHTML = graphObj.code
|
||||
document.getElementsByTagName('body')[0].appendChild(div)
|
||||
global.mermaid.initialize(graphObj.mermaid)
|
||||
global.mermaid.init()
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof document !== 'undefined') {
|
||||
/*!
|
||||
* Wait for document loaded before starting the execution
|
||||
*/
|
||||
window.addEventListener(
|
||||
'load',
|
||||
function () {
|
||||
contentLoaded()
|
||||
},
|
||||
false
|
||||
)
|
||||
}
|
||||
23
e2e/platform/webpackUsage.html
Normal file
@@ -0,0 +1,23 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
|
||||
<body>
|
||||
<div class="mermaid">
|
||||
graph LR
|
||||
A-->B
|
||||
</div>
|
||||
<div class="mermaid">
|
||||
gantt
|
||||
title A Gantt Diagram
|
||||
dateFormat YYYY-MM-DD
|
||||
section Section
|
||||
A task :a1, 2014-01-01, 30d
|
||||
Another task :after a1 , 20d
|
||||
section Another
|
||||
Task in sec :2014-01-12 , 12d
|
||||
another task : 24d
|
||||
</div>
|
||||
<script src="./bundle-test.js" charset="utf-8"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
After Width: | Height: | Size: 36 KiB |
|
After Width: | Height: | Size: 30 KiB |
|
After Width: | Height: | Size: 37 KiB |
|
After Width: | Height: | Size: 24 KiB |
|
After Width: | Height: | Size: 20 KiB |
|
After Width: | Height: | Size: 21 KiB |
|
After Width: | Height: | Size: 22 KiB |
|
After Width: | Height: | Size: 24 KiB |
|
After Width: | Height: | Size: 24 KiB |
|
After Width: | Height: | Size: 5.7 KiB |
|
After Width: | Height: | Size: 21 KiB |
|
After Width: | Height: | Size: 41 KiB |
|
After Width: | Height: | Size: 3.2 KiB |
|
After Width: | Height: | Size: 6.7 KiB |
|
After Width: | Height: | Size: 38 KiB |
|
After Width: | Height: | Size: 15 KiB |
27
e2e/spec/classDiagram.spec.js
Normal file
@@ -0,0 +1,27 @@
|
||||
/* eslint-env jest */
|
||||
import { imgSnapshotTest } from '../helpers/util.js'
|
||||
const { toMatchImageSnapshot } = require('jest-image-snapshot')
|
||||
|
||||
expect.extend({ toMatchImageSnapshot })
|
||||
|
||||
describe('Sequencediagram', () => {
|
||||
it('should render a simple class diagrams', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
classDiagram
|
||||
Class01 <|-- AveryLongClass : Cool
|
||||
Class03 *-- Class04
|
||||
Class05 o-- Class06
|
||||
Class07 .. Class08
|
||||
Class09 --> C2 : Where am i?
|
||||
Class09 --* C3
|
||||
Class09 --|> Class07
|
||||
Class07 : equals()
|
||||
Class07 : Object[] elementData
|
||||
Class01 : size()
|
||||
Class01 : int chimp
|
||||
Class01 : int gorilla
|
||||
Class08 <--> C2: Cool label
|
||||
`,
|
||||
{})
|
||||
})
|
||||
})
|
||||
267
e2e/spec/flowchart.spec.js
Normal file
@@ -0,0 +1,267 @@
|
||||
/* eslint-env jest */
|
||||
import { imgSnapshotTest } from '../helpers/util.js'
|
||||
const { toMatchImageSnapshot } = require('jest-image-snapshot')
|
||||
|
||||
expect.extend({ toMatchImageSnapshot })
|
||||
|
||||
describe('Flowcart', () => {
|
||||
it('should render a simple flowchart', async () => {
|
||||
await imgSnapshotTest(page, `graph TD
|
||||
A[Christmas] -->|Get money| B(Go shopping)
|
||||
B --> C{Let me think}
|
||||
C -->|One| D[Laptop]
|
||||
C -->|Two| E[iPhone]
|
||||
C -->|Three| F[fa:fa-car Car]
|
||||
`,
|
||||
{})
|
||||
})
|
||||
it('should render a simple flowchart with line breaks', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
graph TD
|
||||
A[Christmas] -->|Get money| B(Go shopping)
|
||||
B --> C{Let me thinksssss<br/>ssssssssssssssssssssss<br/>sssssssssssssssssssssssssss}
|
||||
C -->|One| D[Laptop]
|
||||
C -->|Two| E[iPhone]
|
||||
C -->|Three| F[Car]
|
||||
`,
|
||||
{})
|
||||
})
|
||||
|
||||
it('should render a flowchart full of circles', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
graph LR
|
||||
47(SAM.CommonFA.FMESummary)-->48(SAM.CommonFA.CommonFAFinanceBudget)
|
||||
37(SAM.CommonFA.BudgetSubserviceLineVolume)-->48(SAM.CommonFA.CommonFAFinanceBudget)
|
||||
35(SAM.CommonFA.PopulationFME)-->47(SAM.CommonFA.FMESummary)
|
||||
41(SAM.CommonFA.MetricCost)-->47(SAM.CommonFA.FMESummary)
|
||||
44(SAM.CommonFA.MetricOutliers)-->47(SAM.CommonFA.FMESummary)
|
||||
46(SAM.CommonFA.MetricOpportunity)-->47(SAM.CommonFA.FMESummary)
|
||||
40(SAM.CommonFA.OPVisits)-->47(SAM.CommonFA.FMESummary)
|
||||
38(SAM.CommonFA.CommonFAFinanceRefund)-->47(SAM.CommonFA.FMESummary)
|
||||
43(SAM.CommonFA.CommonFAFinancePicuDays)-->47(SAM.CommonFA.FMESummary)
|
||||
42(SAM.CommonFA.CommonFAFinanceNurseryDays)-->47(SAM.CommonFA.FMESummary)
|
||||
45(SAM.CommonFA.MetricPreOpportunity)-->46(SAM.CommonFA.MetricOpportunity)
|
||||
35(SAM.CommonFA.PopulationFME)-->45(SAM.CommonFA.MetricPreOpportunity)
|
||||
41(SAM.CommonFA.MetricCost)-->45(SAM.CommonFA.MetricPreOpportunity)
|
||||
41(SAM.CommonFA.MetricCost)-->44(SAM.CommonFA.MetricOutliers)
|
||||
39(SAM.CommonFA.ChargeDetails)-->43(SAM.CommonFA.CommonFAFinancePicuDays)
|
||||
39(SAM.CommonFA.ChargeDetails)-->42(SAM.CommonFA.CommonFAFinanceNurseryDays)
|
||||
39(SAM.CommonFA.ChargeDetails)-->41(SAM.CommonFA.MetricCost)
|
||||
39(SAM.CommonFA.ChargeDetails)-->40(SAM.CommonFA.OPVisits)
|
||||
35(SAM.CommonFA.PopulationFME)-->39(SAM.CommonFA.ChargeDetails)
|
||||
36(SAM.CommonFA.PremetricCost)-->39(SAM.CommonFA.ChargeDetails)
|
||||
`,
|
||||
{})
|
||||
})
|
||||
it('should render a flowchart full of icons', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
graph TD
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002("fa:fa-creative-commons My System | Test Environment")
|
||||
82072290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Shared Business Logic Server:Service 1")
|
||||
db052290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Shared Business Logic Server:Service 2")
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Shared Report Server:Service 1")
|
||||
30122290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Shared Report Server:Service 2")
|
||||
5e112290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Dedicated Test Business Logic Server:Service 1")
|
||||
c1112290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs Dedicated Test Business Logic Server:Service 2")
|
||||
b7042290_1ec3_e711_8c5a_005056ad0002("fa:fa-circle [DBServer\\SharedDbInstance].[SupportDb]")
|
||||
8f102290_1ec3_e711_8c5a_005056ad0002("fa:fa-circle [DBServer\\SharedDbInstance].[DevelopmentDb]")
|
||||
0e102290_1ec3_e711_8c5a_005056ad0002("fa:fa-circle [DBServer\\SharedDbInstance].[TestDb]")
|
||||
07132290_1ec3_e711_8c5a_005056ad0002("fa:fa-circle [DBServer\\SharedDbInstance].[SharedReportingDb]")
|
||||
c7072290_1ec3_e711_8c5a_005056ad0002("fa:fa-server Shared Business Logic Server")
|
||||
ca122290_1ec3_e711_8c5a_005056ad0002("fa:fa-server Shared Report Server")
|
||||
68102290_1ec3_e711_8c5a_005056ad0002("fa:fa-server Dedicated Test Business Logic Server")
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002("fa:fa-database [DBServer\\SharedDbInstance]")
|
||||
d6072290_1ec3_e711_8c5a_005056ad0002("fa:fa-server DBServer")
|
||||
71082290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs DBServer\\:MSSQLSERVER")
|
||||
c0102290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs DBServer\\:SQLAgent")
|
||||
9a072290_1ec3_e711_8c5a_005056ad0002("fa:fa-cogs DBServer\\:SQLBrowser")
|
||||
1d0a2290_1ec3_e711_8c5a_005056ad0002("fa:fa-server VmHost1")
|
||||
200a2290_1ec3_e711_8c5a_005056ad0002("fa:fa-server VmHost2")
|
||||
1c0a2290_1ec3_e711_8c5a_005056ad0002("fa:fa-server VmHost3")
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->82072290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->db052290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->4e112290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->30122290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->5e112290_1ec3_e711_8c5a_005056ad0002
|
||||
9e122290_1ec3_e711_8c5a_005056ad0002-->c1112290_1ec3_e711_8c5a_005056ad0002
|
||||
82072290_1ec3_e711_8c5a_005056ad0002-->b7042290_1ec3_e711_8c5a_005056ad0002
|
||||
82072290_1ec3_e711_8c5a_005056ad0002-->8f102290_1ec3_e711_8c5a_005056ad0002
|
||||
82072290_1ec3_e711_8c5a_005056ad0002-->0e102290_1ec3_e711_8c5a_005056ad0002
|
||||
82072290_1ec3_e711_8c5a_005056ad0002-->c7072290_1ec3_e711_8c5a_005056ad0002
|
||||
db052290_1ec3_e711_8c5a_005056ad0002-->c7072290_1ec3_e711_8c5a_005056ad0002
|
||||
db052290_1ec3_e711_8c5a_005056ad0002-->82072290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->b7042290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->8f102290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->0e102290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->07132290_1ec3_e711_8c5a_005056ad0002
|
||||
4e112290_1ec3_e711_8c5a_005056ad0002-->ca122290_1ec3_e711_8c5a_005056ad0002
|
||||
30122290_1ec3_e711_8c5a_005056ad0002-->ca122290_1ec3_e711_8c5a_005056ad0002
|
||||
30122290_1ec3_e711_8c5a_005056ad0002-->4e112290_1ec3_e711_8c5a_005056ad0002
|
||||
5e112290_1ec3_e711_8c5a_005056ad0002-->8f102290_1ec3_e711_8c5a_005056ad0002
|
||||
5e112290_1ec3_e711_8c5a_005056ad0002-->68102290_1ec3_e711_8c5a_005056ad0002
|
||||
c1112290_1ec3_e711_8c5a_005056ad0002-->68102290_1ec3_e711_8c5a_005056ad0002
|
||||
c1112290_1ec3_e711_8c5a_005056ad0002-->5e112290_1ec3_e711_8c5a_005056ad0002
|
||||
b7042290_1ec3_e711_8c5a_005056ad0002-->f4112290_1ec3_e711_8c5a_005056ad0002
|
||||
8f102290_1ec3_e711_8c5a_005056ad0002-->f4112290_1ec3_e711_8c5a_005056ad0002
|
||||
0e102290_1ec3_e711_8c5a_005056ad0002-->f4112290_1ec3_e711_8c5a_005056ad0002
|
||||
07132290_1ec3_e711_8c5a_005056ad0002-->f4112290_1ec3_e711_8c5a_005056ad0002
|
||||
c7072290_1ec3_e711_8c5a_005056ad0002-->1d0a2290_1ec3_e711_8c5a_005056ad0002
|
||||
ca122290_1ec3_e711_8c5a_005056ad0002-->200a2290_1ec3_e711_8c5a_005056ad0002
|
||||
68102290_1ec3_e711_8c5a_005056ad0002-->1c0a2290_1ec3_e711_8c5a_005056ad0002
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002-->d6072290_1ec3_e711_8c5a_005056ad0002
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002-->71082290_1ec3_e711_8c5a_005056ad0002
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002-->c0102290_1ec3_e711_8c5a_005056ad0002
|
||||
f4112290_1ec3_e711_8c5a_005056ad0002-->9a072290_1ec3_e711_8c5a_005056ad0002
|
||||
d6072290_1ec3_e711_8c5a_005056ad0002-->1c0a2290_1ec3_e711_8c5a_005056ad0002
|
||||
71082290_1ec3_e711_8c5a_005056ad0002-->d6072290_1ec3_e711_8c5a_005056ad0002
|
||||
c0102290_1ec3_e711_8c5a_005056ad0002-->d6072290_1ec3_e711_8c5a_005056ad0002
|
||||
c0102290_1ec3_e711_8c5a_005056ad0002-->71082290_1ec3_e711_8c5a_005056ad0002
|
||||
9a072290_1ec3_e711_8c5a_005056ad0002-->d6072290_1ec3_e711_8c5a_005056ad0002
|
||||
9a072290_1ec3_e711_8c5a_005056ad0002-->71082290_1ec3_e711_8c5a_005056ad0002
|
||||
`,
|
||||
{})
|
||||
})
|
||||
|
||||
it('should render subgraphs', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
graph TB
|
||||
subgraph One
|
||||
a1-->a2
|
||||
end
|
||||
`,
|
||||
{})
|
||||
})
|
||||
|
||||
it('should render styled subgraphs', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
graph TB
|
||||
A
|
||||
B
|
||||
subgraph foo[Foo SubGraph]
|
||||
C
|
||||
D
|
||||
end
|
||||
subgraph bar[Bar SubGraph]
|
||||
E
|
||||
F
|
||||
end
|
||||
G
|
||||
|
||||
A-->B
|
||||
B-->C
|
||||
C-->D
|
||||
B-->D
|
||||
D-->E
|
||||
E-->A
|
||||
E-->F
|
||||
F-->D
|
||||
F-->G
|
||||
B-->G
|
||||
G-->D
|
||||
|
||||
style foo fill:#F99,stroke-width:2px,stroke:#F0F
|
||||
style bar fill:#999,stroke-width:10px,stroke:#0F0
|
||||
`,
|
||||
{})
|
||||
})
|
||||
|
||||
it('should render a flowchart with ling sames and class definitoins', async () => {
|
||||
await imgSnapshotTest(page, `graph LR
|
||||
sid-B3655226-6C29-4D00-B685-3D5C734DC7E1["
|
||||
|
||||
提交申请
|
||||
熊大
|
||||
"];
|
||||
class sid-B3655226-6C29-4D00-B685-3D5C734DC7E1 node-executed;
|
||||
sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A["
|
||||
负责人审批
|
||||
强子
|
||||
"];
|
||||
class sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A node-executed;
|
||||
sid-E27C0367-E6D6-497F-9736-3CDC21FDE221["
|
||||
DBA审批
|
||||
强子
|
||||
"];
|
||||
class sid-E27C0367-E6D6-497F-9736-3CDC21FDE221 node-executed;
|
||||
sid-BED98281-9585-4D1B-934E-BD1AC6AC0EFD["
|
||||
SA审批
|
||||
阿美
|
||||
"];
|
||||
class sid-BED98281-9585-4D1B-934E-BD1AC6AC0EFD node-executed;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7["
|
||||
主管审批
|
||||
光头强
|
||||
"];
|
||||
class sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7 node-executed;
|
||||
sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89["
|
||||
DBA确认
|
||||
强子
|
||||
"];
|
||||
class sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89 node-executed;
|
||||
sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937["
|
||||
SA确认
|
||||
阿美
|
||||
"];
|
||||
class sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937 node-executed;
|
||||
sid-4FC27B48-A6F9-460A-A675-021F5854FE22["
|
||||
结束
|
||||
"];
|
||||
class sid-4FC27B48-A6F9-460A-A675-021F5854FE22 node-executed;
|
||||
sid-19DD9E9F-98C1-44EE-B604-842AFEE76F1E["
|
||||
SA执行1
|
||||
强子
|
||||
"];
|
||||
class sid-19DD9E9F-98C1-44EE-B604-842AFEE76F1E node-executed;
|
||||
sid-6C2120F3-D940-4958-A067-0903DCE879C4["
|
||||
SA执行2
|
||||
强子
|
||||
"];
|
||||
class sid-6C2120F3-D940-4958-A067-0903DCE879C4 node-executed;
|
||||
sid-9180E2A0-5C4B-435F-B42F-0D152470A338["
|
||||
DBA执行1
|
||||
强子
|
||||
"];
|
||||
class sid-9180E2A0-5C4B-435F-B42F-0D152470A338 node-executed;
|
||||
sid-03A2C3AC-5337-48A5-B154-BB3FD0EC8DAD["
|
||||
DBA执行3
|
||||
强子
|
||||
"];
|
||||
class sid-03A2C3AC-5337-48A5-B154-BB3FD0EC8DAD node-executed;
|
||||
sid-D5E1F2F4-306C-47A2-BF74-F66E3D769756["
|
||||
DBA执行2
|
||||
强子
|
||||
"];
|
||||
class sid-D5E1F2F4-306C-47A2-BF74-F66E3D769756 node-executed;
|
||||
sid-8C3F2F1D-F014-4F99-B966-095DC1A2BD93["
|
||||
DBA执行4
|
||||
强子
|
||||
"];
|
||||
class sid-8C3F2F1D-F014-4F99-B966-095DC1A2BD93 node-executed;
|
||||
sid-1897B30A-9C5C-4D5B-B80B-76A038785070["
|
||||
负责人确认
|
||||
梁静茹
|
||||
"];
|
||||
class sid-1897B30A-9C5C-4D5B-B80B-76A038785070 node-executed;
|
||||
sid-B3655226-6C29-4D00-B685-3D5C734DC7E1-->sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7;
|
||||
sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A-->sid-1897B30A-9C5C-4D5B-B80B-76A038785070;
|
||||
sid-E27C0367-E6D6-497F-9736-3CDC21FDE221-->sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89;
|
||||
sid-BED98281-9585-4D1B-934E-BD1AC6AC0EFD-->sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937;
|
||||
sid-19DD9E9F-98C1-44EE-B604-842AFEE76F1E-->sid-6C2120F3-D940-4958-A067-0903DCE879C4;
|
||||
sid-9180E2A0-5C4B-435F-B42F-0D152470A338-->sid-D5E1F2F4-306C-47A2-BF74-F66E3D769756;
|
||||
sid-03A2C3AC-5337-48A5-B154-BB3FD0EC8DAD-->sid-8C3F2F1D-F014-4F99-B966-095DC1A2BD93;
|
||||
sid-6C2120F3-D940-4958-A067-0903DCE879C4-->sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A;
|
||||
sid-1897B30A-9C5C-4D5B-B80B-76A038785070-->sid-4FC27B48-A6F9-460A-A675-021F5854FE22;
|
||||
sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937-->sid-19DD9E9F-98C1-44EE-B604-842AFEE76F1E;
|
||||
sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89-->sid-9180E2A0-5C4B-435F-B42F-0D152470A338;
|
||||
sid-A1B3CD96-7697-4D7C-BEAA-73D187B1BE89-->sid-03A2C3AC-5337-48A5-B154-BB3FD0EC8DAD;
|
||||
sid-D5E1F2F4-306C-47A2-BF74-F66E3D769756-->sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A;
|
||||
sid-8C3F2F1D-F014-4F99-B966-095DC1A2BD93-->sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7-->sid-BED98281-9585-4D1B-934E-BD1AC6AC0EFD;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7-->sid-E27C0367-E6D6-497F-9736-3CDC21FDE221;
|
||||
sid-3E35A7FF-A2F4-4E07-9247-DBF884C81937-->sid-6C2120F3-D940-4958-A067-0903DCE879C4;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7-->sid-4DA958A0-26D9-4D47-93A7-70F39FD7D51A;
|
||||
sid-7CE72B24-E0C1-46D3-8132-8BA66BE05AA7-->sid-4FC27B48-A6F9-460A-A675-021F5854FE22;
|
||||
`,
|
||||
{})
|
||||
})
|
||||
})
|
||||
42
e2e/spec/gantt.spec.js
Normal file
@@ -0,0 +1,42 @@
|
||||
/* eslint-env jest */
|
||||
import { imgSnapshotTest } from '../helpers/util.js'
|
||||
const { toMatchImageSnapshot } = require('jest-image-snapshot')
|
||||
|
||||
expect.extend({ toMatchImageSnapshot })
|
||||
|
||||
describe('Sequencediagram', () => {
|
||||
it('should render a gantt chart', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
gantt
|
||||
dateFormat YYYY-MM-DD
|
||||
axisFormat %d/%m
|
||||
title Adding GANTT diagram to mermaid
|
||||
excludes weekdays 2014-01-10
|
||||
|
||||
section A section
|
||||
Completed task :done, des1, 2014-01-06,2014-01-08
|
||||
Active task :active, des2, 2014-01-09, 3d
|
||||
Future task : des3, after des2, 5d
|
||||
Future task2 : des4, after des3, 5d
|
||||
|
||||
section Critical tasks
|
||||
Completed task in the critical line :crit, done, 2014-01-06,24h
|
||||
Implement parser and jison :crit, done, after des1, 2d
|
||||
Create tests for parser :crit, active, 3d
|
||||
Future task in critical line :crit, 5d
|
||||
Create tests for renderer :2d
|
||||
Add to mermaid :1d
|
||||
|
||||
section Documentation
|
||||
Describe gantt syntax :active, a1, after des1, 3d
|
||||
Add gantt diagram to demo page :after a1 , 20h
|
||||
Add another diagram to demo page :doc1, after a1 , 48h
|
||||
|
||||
section Last section
|
||||
Describe gantt syntax :after doc1, 3d
|
||||
Add gantt diagram to demo page : 20h
|
||||
Add another diagram to demo page : 48h
|
||||
`,
|
||||
{})
|
||||
})
|
||||
})
|
||||
29
e2e/spec/gitGraph.spec.js
Normal file
@@ -0,0 +1,29 @@
|
||||
/* eslint-env jest */
|
||||
import { imgSnapshotTest } from '../helpers/util.js'
|
||||
const { toMatchImageSnapshot } = require('jest-image-snapshot')
|
||||
|
||||
expect.extend({ toMatchImageSnapshot })
|
||||
|
||||
describe('Sequencediagram', () => {
|
||||
it('should render a simple git graph', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
gitGraph:
|
||||
options
|
||||
{
|
||||
"nodeSpacing": 150,
|
||||
"nodeRadius": 10
|
||||
}
|
||||
end
|
||||
commit
|
||||
branch newbranch
|
||||
checkout newbranch
|
||||
commit
|
||||
commit
|
||||
checkout master
|
||||
commit
|
||||
commit
|
||||
merge newbranch
|
||||
`,
|
||||
{})
|
||||
})
|
||||
})
|
||||
15
e2e/spec/info.spec.js
Normal file
@@ -0,0 +1,15 @@
|
||||
/* eslint-env jest */
|
||||
import { imgSnapshotTest } from '../helpers/util.js'
|
||||
const { toMatchImageSnapshot } = require('jest-image-snapshot')
|
||||
|
||||
expect.extend({ toMatchImageSnapshot })
|
||||
|
||||
describe('Sequencediagram', () => {
|
||||
it('should render a simple info diagrams', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
info
|
||||
showInfo
|
||||
`,
|
||||
{})
|
||||
})
|
||||
})
|
||||
35
e2e/spec/sequencediagram.spec.js
Normal file
@@ -0,0 +1,35 @@
|
||||
/* eslint-env jest */
|
||||
import { imgSnapshotTest } from '../helpers/util.js'
|
||||
const { toMatchImageSnapshot } = require('jest-image-snapshot')
|
||||
|
||||
expect.extend({ toMatchImageSnapshot })
|
||||
|
||||
describe('Sequencediagram', () => {
|
||||
it('should render a simple sequence diagrams', async () => {
|
||||
await imgSnapshotTest(page, `
|
||||
sequenceDiagram
|
||||
participant Alice
|
||||
participant Bob
|
||||
participant John as John<br/>Second Line
|
||||
Alice ->> Bob: Hello Bob, how are you?
|
||||
Bob-->>John: How about you John?
|
||||
Bob--x Alice: I am good thanks!
|
||||
Bob-x John: I am good thanks!
|
||||
Note right of John: Bob thinks a long<br/>long time, so long<br/>that the text does<br/>not fit on a row.
|
||||
Bob-->Alice: Checking with John...
|
||||
alt either this
|
||||
Alice->>John: Yes
|
||||
else or this
|
||||
Alice->>John: No
|
||||
else or this will happen
|
||||
Alice->John: Maybe
|
||||
end
|
||||
par this happens in parallel
|
||||
Alice -->> Bob: Parallel message 1
|
||||
and
|
||||
Alice -->> John: Parallel message 2
|
||||
end
|
||||
`,
|
||||
{})
|
||||
})
|
||||
})
|
||||
16
e2e/spec/webpackUsage.spec.js
Normal file
@@ -0,0 +1,16 @@
|
||||
/* eslint-env jest */
|
||||
const { toMatchImageSnapshot } = require('jest-image-snapshot')
|
||||
|
||||
expect.extend({ toMatchImageSnapshot })
|
||||
|
||||
describe('Sequencediagram', () => {
|
||||
it('should render a simple sequence diagrams', async () => {
|
||||
const url = 'http://localhost:9000/webpackUsage.html'
|
||||
|
||||
await page.goto(url)
|
||||
|
||||
const image = await page.screenshot()
|
||||
|
||||
expect(image).toMatchImageSnapshot()
|
||||
})
|
||||
})
|
||||
@@ -1,26 +0,0 @@
|
||||
/**
|
||||
* Created by knut on 2015-12-26.
|
||||
*/
|
||||
var gulp = require('gulp')
|
||||
var shell = require('gulp-shell')
|
||||
var liveServer = require('live-server')
|
||||
|
||||
var params = {
|
||||
port: 8080, // Set the server port. Defaults to 8080.
|
||||
host: '0.0.0.0', // Set the address to bind to. Defaults to 0.0.0.0.
|
||||
root: './test/examples', // Set root directory that's being server. Defaults to cwd.
|
||||
open: true, // When false, it won't load your browser by default.
|
||||
ignore: 'scss,my/templates', // comma-separated string for paths to ignore
|
||||
// file: "index.html", // When set, serve this file for every 404 (useful for single-page applications)
|
||||
wait: 1000, // Waits for all changes, before reloading. Defaults to 0 sec.
|
||||
mount: [['/dist', './dist']] // Mount a directory to a route.
|
||||
}
|
||||
gulp.task('live-server', function () {
|
||||
liveServer.start(params)
|
||||
})
|
||||
|
||||
gulp.task('watch2', ['live-server'], function () {
|
||||
return shell.task([
|
||||
'yarn build -- --watch'
|
||||
])
|
||||
})
|
||||
@@ -1,23 +0,0 @@
|
||||
var gulp = require('gulp')
|
||||
var shell = require('gulp-shell')
|
||||
var jison = require('gulp-jison')
|
||||
var filelog = require('gulp-filelog')
|
||||
|
||||
gulp.task('jison', function () {
|
||||
return gulp.src('./src/**/*.jison')
|
||||
.pipe(filelog('Jison file:'))
|
||||
.pipe(jison({ moduleType: 'commonjs' }))
|
||||
.pipe(gulp.dest('./src/'))
|
||||
})
|
||||
|
||||
gulp.task('jison_legacy', function () {
|
||||
shell.task([
|
||||
'node node_modules/jison/lib/cli.js src/diagrams/classDiagram/parser/classDiagram.jison -o src/diagrams/classDiagram/parser/classDiagram.js',
|
||||
'node node_modules/jison/lib/cli.js src/diagrams/sequenceDiagram/parser/sequenceDiagram.jison -o src/diagrams/sequenceDiagram/parser/sequenceDiagram.js',
|
||||
'node node_modules/jison/lib/cli.js src/diagrams/example/parser/example.jison -o src/diagrams/example/parser/example.js',
|
||||
'node node_modules/jison/lib/cli.js src/diagrams/flowchart/parser/flow.jison -o src/diagrams/flowchart/parser/flow.js',
|
||||
'node node_modules/jison/lib/cli.js src/diagrams/flowchart/parser/dot.jison -o src/diagrams/flowchart/parser/dot.js',
|
||||
'node node_modules/jison/lib/cli.js src/diagrams/gitGraph/parser/gitGraph.jison -o src/diagrams/gitGraph/parser/gitGraph.js',
|
||||
'node node_modules/jison/lib/cli.js src/diagrams/gantt/parser/gantt.jison -o src/diagrams/gantt/parser/gantt.js'
|
||||
])
|
||||
})
|
||||
11
gulpfile.js
@@ -1,3 +1,10 @@
|
||||
var requireDir = require('require-dir')
|
||||
import gulp from 'gulp'
|
||||
import jison from 'gulp-jison'
|
||||
import filelog from 'gulp-filelog'
|
||||
|
||||
requireDir('./gulp/tasks')
|
||||
gulp.task('jison', function () {
|
||||
return gulp.src('./src/**/*.jison')
|
||||
.pipe(filelog('Jison file:'))
|
||||
.pipe(jison({ 'token-stack': true }))
|
||||
.pipe(gulp.dest('./src/'))
|
||||
})
|
||||
|
||||
9
jest.config.js
Normal file
@@ -0,0 +1,9 @@
|
||||
module.exports = {
|
||||
transform: {
|
||||
'^.+\\.jsx?$': './transformer.js'
|
||||
},
|
||||
transformIgnorePatterns: ['/node_modules/(?!dagre-d3-renderer/lib).*\\.js'],
|
||||
moduleNameMapper: {
|
||||
'\\.(css|scss)$': 'identity-obj-proxy'
|
||||
}
|
||||
}
|
||||
@@ -1,99 +0,0 @@
|
||||
// Karma configuration
|
||||
// Generated on Mon Nov 03 2014 07:53:38 GMT+0100 (CET)
|
||||
|
||||
module.exports = function (config) {
|
||||
config.set({
|
||||
|
||||
// base path that will be used to resolve all patterns (eg. files, exclude)
|
||||
basePath: '.',
|
||||
|
||||
// frameworks to use
|
||||
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
|
||||
frameworks: ['jasmine'],
|
||||
|
||||
// list of files / patterns to load in the browser
|
||||
files: [
|
||||
'./src/*.spec.js',
|
||||
'./src/diagrams/flowchart/**/*.spec.js',
|
||||
'./src/diagrams/example/**/*.spec.js',
|
||||
'./src/diagrams/sequenceDiagram/**/*.spec.js',
|
||||
'./src/diagrams/classDiagram/**/*.spec.js',
|
||||
'./src/diagrams/gantt/**/*.spec.js',
|
||||
'./src/diagrams/gitGraph/**/*.spec.js'
|
||||
],
|
||||
|
||||
preprocessors: {
|
||||
'src/**/*.spec.js': ['webpack']
|
||||
},
|
||||
|
||||
webpack: {
|
||||
externals: ['fs'],
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.js$/,
|
||||
use: {
|
||||
loader: 'babel-loader',
|
||||
options: {
|
||||
presets: [
|
||||
['env', {
|
||||
'targets': {
|
||||
'browsers': ['last 3 versions']
|
||||
}
|
||||
}]
|
||||
],
|
||||
plugins: [
|
||||
'transform-remove-strict-mode'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
// test results reporter to use
|
||||
// possible values: 'dots', 'progress'
|
||||
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
|
||||
reporters: ['dots'],
|
||||
|
||||
// web server port
|
||||
port: 9876,
|
||||
|
||||
// enable / disable colors in the output (reporters and logs)
|
||||
colors: true,
|
||||
|
||||
// level of logging
|
||||
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
|
||||
logLevel: config.LOG_INFO,
|
||||
|
||||
// enable / disable watching file and executing tests whenever any file changes
|
||||
autoWatch: true,
|
||||
|
||||
customLaunchers: {
|
||||
ChromeHeadless: {
|
||||
base: 'Chrome',
|
||||
flags: [
|
||||
'--incognito',
|
||||
// '--headless',
|
||||
'--disable-gpu',
|
||||
'--no-sandbox',
|
||||
// Without a remote debugging port, Google Chrome exits immediately.
|
||||
'--remote-debugging-port=9222'
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
// start these browsers
|
||||
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
|
||||
browsers: ['ChromeHeadless'],
|
||||
plugins: [
|
||||
'karma-jasmine',
|
||||
'karma-chrome-launcher',
|
||||
'karma-webpack'
|
||||
],
|
||||
// Continuous Integration mode
|
||||
// if true, Karma captures browsers, runs the tests and exits
|
||||
singleRun: false
|
||||
})
|
||||
}
|
||||
196
lib/cli.js
@@ -1,196 +0,0 @@
|
||||
var fs = require('fs')
|
||||
var exec = require('child_process').exec
|
||||
var chalk = require('chalk')
|
||||
var which = require('which')
|
||||
var parseArgs = require('minimist')
|
||||
var semver = require('semver')
|
||||
var path = require('path')
|
||||
|
||||
var PHANTOM_VERSION = '^2.1.0'
|
||||
|
||||
var info = chalk.blue.bold
|
||||
|
||||
module.exports = (function () {
|
||||
return new Cli()
|
||||
}())
|
||||
|
||||
function Cli (options) {
|
||||
this.options = {
|
||||
alias: {
|
||||
help: 'h',
|
||||
png: 'p',
|
||||
outputDir: 'o',
|
||||
outputSuffix: 'O',
|
||||
svg: 's',
|
||||
verbose: 'v',
|
||||
phantomPath: 'e',
|
||||
sequenceConfig: 'c',
|
||||
ganttConfig: 'g',
|
||||
css: 't',
|
||||
width: 'w'
|
||||
},
|
||||
'boolean': ['help', 'png', 'svg', 'verbose'],
|
||||
'string': ['outputDir', 'outputSuffix']
|
||||
}
|
||||
|
||||
this.errors = []
|
||||
this.message = null
|
||||
|
||||
this.helpMessage = [
|
||||
info('Usage: mermaid [options] <file>...'),
|
||||
'',
|
||||
'file The mermaid description file to be rendered',
|
||||
'',
|
||||
'Options:',
|
||||
' -s --svg Output SVG instead of PNG (experimental)',
|
||||
' -p --png If SVG was selected, and you also want PNG, set this flag',
|
||||
' -o --outputDir Directory to save files, will be created automatically, defaults to `cwd`',
|
||||
" -O --outputSuffix Suffix to output filenames in front of '.svg' or '.png', defaults to ''",
|
||||
' -e --phantomPath Specify the path to the phantomjs executable',
|
||||
' -t --css Specify the path to a CSS file to be included when processing output',
|
||||
' -c --sequenceConfig Specify the path to the file with the configuration to be applied in the sequence diagram',
|
||||
' -g --ganttConfig Specify the path to the file with the configuration to be applied in the gantt diagram',
|
||||
' -h --help Show this message',
|
||||
' -v --verbose Show logging',
|
||||
' -w --width width of the generated png (number)',
|
||||
' --version Print version and quit'
|
||||
]
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
Cli.prototype.parse = function (argv, next) {
|
||||
this.errors = [] // clear errors
|
||||
var options = parseArgs(argv, this.options)
|
||||
|
||||
if (options.version) {
|
||||
var pkg = require('../package.json')
|
||||
this.message = '' + pkg.version
|
||||
next(null, this.message)
|
||||
} else if (options.help) {
|
||||
this.message = this.helpMessage.join('\n')
|
||||
next(null, this.message)
|
||||
} else {
|
||||
options.files = options._
|
||||
|
||||
if (!options.files.length) {
|
||||
this.errors.push(new Error('You must specify at least one source file.'))
|
||||
}
|
||||
|
||||
// ensure that parameter-expecting options have parameters
|
||||
;['outputDir', 'outputSuffix', 'phantomPath', 'sequenceConfig', 'ganttConfig', 'css'].forEach(function (i) {
|
||||
if (typeof options[i] !== 'undefined') {
|
||||
if (typeof options[i] !== 'string' || options[i].length < 1) {
|
||||
this.errors.push(new Error(i + ' expects a value.'))
|
||||
}
|
||||
}
|
||||
}.bind(this))
|
||||
|
||||
// set svg/png flags appropriately
|
||||
if (options.svg && !options.png) {
|
||||
options.png = false
|
||||
} else {
|
||||
options.png = true
|
||||
}
|
||||
|
||||
if (options.sequenceConfig) {
|
||||
try {
|
||||
fs.accessSync(options.sequenceConfig, fs.R_OK)
|
||||
} catch (err) {
|
||||
this.errors.push(err)
|
||||
}
|
||||
} else {
|
||||
options.sequenceConfig = null
|
||||
}
|
||||
|
||||
if (options.ganttConfig) {
|
||||
try {
|
||||
fs.accessSync(options.ganttConfig, fs.R_OK)
|
||||
} catch (err) {
|
||||
this.errors.push(err)
|
||||
}
|
||||
} else {
|
||||
options.ganttConfig = null
|
||||
}
|
||||
|
||||
if (options.css) {
|
||||
try {
|
||||
fs.accessSync(options.css, fs.R_OK)
|
||||
} catch (err) {
|
||||
this.errors.push(err)
|
||||
}
|
||||
} else {
|
||||
options.css = path.join(__dirname, '..', 'dist', 'mermaid.css')
|
||||
}
|
||||
|
||||
// set svg/png flags appropriately
|
||||
if (!options.width) {
|
||||
options.width = 1200
|
||||
}
|
||||
|
||||
this.checkPhantom = createCheckPhantom(options.phantomPath)
|
||||
|
||||
this.checkPhantom(function (err, path) {
|
||||
if (err) {
|
||||
this.errors.push(err)
|
||||
}
|
||||
options.phantomPath = path
|
||||
next(
|
||||
this.errors.length > 0 ? this.errors : null
|
||||
, this.message
|
||||
, options
|
||||
)
|
||||
}.bind(this))
|
||||
}
|
||||
}
|
||||
|
||||
function createCheckPhantom (_phantomPath) {
|
||||
var phantomPath = _phantomPath
|
||||
|
||||
return function checkPhantom (_next) {
|
||||
var next = _next || function () { }
|
||||
var err
|
||||
|
||||
if (typeof phantomPath === 'undefined') {
|
||||
try {
|
||||
var phantom = require('phantomjs')
|
||||
phantomPath = phantom.path
|
||||
} catch (e) {
|
||||
try {
|
||||
phantomPath = which.sync('phantomjs')
|
||||
} catch (e) {
|
||||
if (!phantomPath) {
|
||||
phantomPath = null
|
||||
err = new Error(
|
||||
[
|
||||
'Cannot find phantomjs in your PATH. If phantomjs is installed',
|
||||
"you may need to specify its path manually with the '-e' option.",
|
||||
"Run this executable with '--help' or view the README for more",
|
||||
'details.'
|
||||
].join('\n')
|
||||
)
|
||||
|
||||
next(err)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we have phantompath, see if its version satisfies our requirements
|
||||
exec('"' + phantomPath + '" --version', function (err, stdout, stderr) {
|
||||
if (err) {
|
||||
next(new Error('Could not find phantomjs at the specified path.'))
|
||||
} else if (!semver.satisfies(stdout, PHANTOM_VERSION)) {
|
||||
next(new Error(
|
||||
'mermaid requires phantomjs ' +
|
||||
PHANTOM_VERSION +
|
||||
' to be installed, found version ' +
|
||||
stdout
|
||||
))
|
||||
} else {
|
||||
next(null, phantomPath)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
43
lib/index.js
@@ -1,43 +0,0 @@
|
||||
var path = require('path')
|
||||
var spawn = require('child_process').spawn
|
||||
|
||||
var mkdirp = require('mkdirp')
|
||||
|
||||
var phantomscript = path.join(__dirname, 'phantomscript.js')
|
||||
|
||||
module.exports = { process: processMermaid }
|
||||
|
||||
function processMermaid (files, _options, _next) {
|
||||
var options = _options || {}
|
||||
var outputDir = options.outputDir || process.cwd()
|
||||
var outputSuffix = options.outputSuffix || ''
|
||||
var next = _next || function () { }
|
||||
var phantomArgs = [
|
||||
phantomscript,
|
||||
outputDir,
|
||||
options.png,
|
||||
options.svg,
|
||||
options.css,
|
||||
options.sequenceConfig,
|
||||
options.ganttConfig,
|
||||
options.verbose,
|
||||
options.width,
|
||||
outputSuffix
|
||||
]
|
||||
|
||||
files.forEach(function (file) {
|
||||
phantomArgs.push(file)
|
||||
})
|
||||
|
||||
mkdirp(outputDir, function (err) {
|
||||
if (err) {
|
||||
throw err
|
||||
}
|
||||
var phantom = spawn(options.phantomPath, phantomArgs)
|
||||
|
||||
phantom.on('exit', next)
|
||||
|
||||
phantom.stderr.pipe(process.stderr)
|
||||
phantom.stdout.pipe(process.stdout)
|
||||
})
|
||||
}
|
||||
@@ -1,231 +0,0 @@
|
||||
/**
|
||||
* Credits:
|
||||
* - SVG Processing from the NYTimes svg-crowbar, under an MIT license
|
||||
* https://github.com/NYTimes/svg-crowbar
|
||||
* - Thanks to the grunticon project for some guidance
|
||||
* https://github.com/filamentgroup/grunticon
|
||||
*/
|
||||
|
||||
window.phantom.onError = function (msg, trace) {
|
||||
var msgStack = ['PHANTOM ERROR: ' + msg]
|
||||
if (trace && trace.length) {
|
||||
msgStack.push('TRACE:')
|
||||
trace.forEach(function (t) {
|
||||
msgStack.push(
|
||||
' -> ' +
|
||||
(t.file || t.sourceURL) +
|
||||
': ' +
|
||||
t.line +
|
||||
(t.function ? ' (in function ' + t.function + ')' : '')
|
||||
)
|
||||
})
|
||||
}
|
||||
system.stderr.write(msgStack.join('\n'))
|
||||
window.phantom.exit(1)
|
||||
}
|
||||
|
||||
var system = require('system')
|
||||
var fs = require('fs')
|
||||
var webpage = require('webpage')
|
||||
|
||||
var page = webpage.create()
|
||||
var files = system.args.slice(10, system.args.length)
|
||||
var width = system.args[8]
|
||||
|
||||
if (typeof width === 'undefined' || width === 'undefined') {
|
||||
width = 1200
|
||||
}
|
||||
var options = {
|
||||
outputDir: system.args[1],
|
||||
png: system.args[2] === 'true',
|
||||
svg: system.args[3] === 'true',
|
||||
css: fs.read(system.args[4]),
|
||||
sequenceConfig: system.args[5] !== 'null' ? JSON.parse(fs.read(system.args[5])) : {},
|
||||
ganttConfig: system.args[6] !== 'null' ? JSON.parse(fs.read(system.args[6])) : {},
|
||||
verbose: system.args[7] === 'true',
|
||||
width: width,
|
||||
outputSuffix: system.args[9]
|
||||
}
|
||||
var log = logger(options.verbose)
|
||||
options.sequenceConfig.useMaxWidth = false
|
||||
|
||||
page.content = [
|
||||
'<html>',
|
||||
'<head>',
|
||||
'<style type="text/css">body {background:white;font-family: Arial;}',
|
||||
options.css,
|
||||
'</style>',
|
||||
'</head>',
|
||||
'<body>',
|
||||
'</body>',
|
||||
'</html>'
|
||||
].join('\n')
|
||||
|
||||
page.injectJs('../dist/mermaid.js')
|
||||
page.onConsoleMessage = function (msg, lineNum, sourceId) {
|
||||
log('CONSOLE: ' + msg + ' (from line #' + lineNum + ' in "' + sourceId + '")')
|
||||
}
|
||||
|
||||
log('Num files to execute : ' + files.length)
|
||||
|
||||
files.forEach(function (file) {
|
||||
var contents = fs.read(file)
|
||||
var filename = file.split(fs.separator).slice(-1)
|
||||
var oParser = new window.DOMParser()
|
||||
var oDOM
|
||||
var svgContent
|
||||
|
||||
log('ready to execute: ' + file)
|
||||
|
||||
// this JS is executed in this statement is sandboxed, even though it doesn't
|
||||
// look like it. we need to serialize then unserialize the svgContent that's
|
||||
// taken from the DOM
|
||||
svgContent = page.evaluate(executeInPage, {
|
||||
contents: contents,
|
||||
ganttConfig: options.ganttConfig,
|
||||
sequenceConfig: options.sequenceConfig,
|
||||
confWidth: options.width
|
||||
})
|
||||
|
||||
oDOM = oParser.parseFromString(svgContent, 'text/xml')
|
||||
|
||||
resolveSVGElement(oDOM.firstChild)
|
||||
setSVGStyle(oDOM.firstChild, options.css)
|
||||
|
||||
var outputPath = options.outputDir + fs.separator + filename + options.outputSuffix
|
||||
if (options.png) {
|
||||
page.viewportSize = {
|
||||
width: ~~oDOM.documentElement.attributes.getNamedItem('width').value,
|
||||
height: ~~oDOM.documentElement.attributes.getNamedItem('height').value
|
||||
}
|
||||
|
||||
page.render(outputPath + '.png')
|
||||
log('saved png: ' + outputPath + '.png')
|
||||
}
|
||||
|
||||
if (options.svg) {
|
||||
var serialize = new window.XMLSerializer()
|
||||
fs.write(outputPath + '.svg'
|
||||
, serialize.serializeToString(oDOM) + '\n'
|
||||
, 'w'
|
||||
)
|
||||
log('saved svg: ' + outputPath + '.svg')
|
||||
}
|
||||
})
|
||||
|
||||
window.phantom.exit()
|
||||
|
||||
function logger (_verbose) {
|
||||
var verbose = _verbose
|
||||
|
||||
return function (_message, _level) {
|
||||
var level = _level
|
||||
var message = _message
|
||||
var log
|
||||
|
||||
log = level === 'error' ? system.stderr : system.stdout
|
||||
|
||||
if (verbose) {
|
||||
log.write(message + '\n')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function resolveSVGElement (element) {
|
||||
var prefix = {
|
||||
xmlns: 'http://www.w3.org/2000/xmlns/',
|
||||
xlink: 'http://www.w3.org/1999/xlink',
|
||||
svg: 'http://www.w3.org/2000/svg'
|
||||
}
|
||||
|
||||
element.setAttribute('version', '1.1')
|
||||
// removing attributes so they aren't doubled up
|
||||
element.removeAttribute('xmlns')
|
||||
element.removeAttribute('xlink')
|
||||
// These are needed for the svg
|
||||
if (!element.hasAttributeNS(prefix.xmlns, 'xmlns')) {
|
||||
element.setAttributeNS(prefix.xmlns, 'xmlns', prefix.svg)
|
||||
}
|
||||
if (!element.hasAttributeNS(prefix.xmlns, 'xmlns:xlink')) {
|
||||
element.setAttributeNS(prefix.xmlns, 'xmlns:xlink', prefix.xlink)
|
||||
}
|
||||
}
|
||||
|
||||
function setSVGStyle (svg, css) {
|
||||
if (!css || !svg) { return }
|
||||
var styles = svg.getElementsByTagName('style')
|
||||
if (!styles || styles.length === 0) { return }
|
||||
styles[0].textContent = css
|
||||
}
|
||||
|
||||
// The sandboxed function that's executed in-page by phantom
|
||||
function executeInPage (data) {
|
||||
var xmlSerializer = new window.XMLSerializer()
|
||||
var contents = data.contents
|
||||
var sequenceConfig = JSON.stringify(data.sequenceConfig)
|
||||
var ganttConfig = JSON.stringify(data.ganttConfig).replace(/"(function.*})"/, '$1')
|
||||
var svg
|
||||
var svgValue
|
||||
var boundingBox
|
||||
var width
|
||||
var height
|
||||
var confWidth = data.confWidth
|
||||
|
||||
var toRemove = document.getElementsByClassName('mermaid')
|
||||
if (toRemove && toRemove.length) {
|
||||
for (var i = 0, len = toRemove.length; i < len; i++) {
|
||||
toRemove[i].parentNode.removeChild(toRemove[i])
|
||||
}
|
||||
}
|
||||
|
||||
var el = document.createElement('div')
|
||||
el.className = 'mermaid'
|
||||
el.appendChild(document.createTextNode(contents))
|
||||
document.body.appendChild(el)
|
||||
|
||||
var config = {
|
||||
sequenceDiagram: JSON.parse(sequenceConfig),
|
||||
flowchart: { useMaxWidth: false },
|
||||
logLevel: 1
|
||||
}
|
||||
|
||||
window.mermaid.initialize(config)
|
||||
|
||||
var sc = document.createElement('script')
|
||||
sc.appendChild(document.createTextNode('mermaid.ganttConfig = ' + ganttConfig + ';'))
|
||||
document.body.appendChild(sc)
|
||||
|
||||
window.mermaid.init()
|
||||
|
||||
svg = document.querySelector('svg')
|
||||
|
||||
boundingBox = svg.getBoundingClientRect() // the initial bonding box of the svg
|
||||
width = boundingBox.width * 1.5 // adding the scale factor for consistency with output in chrome browser
|
||||
height = boundingBox.height * 1.5 // adding the scale factor for consistency with output in chrome browser
|
||||
|
||||
var scalefactor = confWidth / (width - 8)
|
||||
|
||||
// resizing the body to fit the svg
|
||||
document.body.setAttribute(
|
||||
'style'
|
||||
, 'width: ' + (confWidth - 8) + '; height: ' + (height * scalefactor) + ';'
|
||||
)
|
||||
// resizing the svg via css for consistent display
|
||||
svg.setAttribute(
|
||||
'style'
|
||||
, 'width: ' + (confWidth - 8) + '; height: ' + (height * scalefactor) + ';'
|
||||
)
|
||||
|
||||
// set witdth and height attributes used to set the viewport when rending png image
|
||||
svg.setAttribute(
|
||||
'width'
|
||||
, confWidth
|
||||
)
|
||||
svg.setAttribute(
|
||||
'height'
|
||||
, height * scalefactor
|
||||
)
|
||||
|
||||
svgValue = xmlSerializer.serializeToString(svg) + '\n'
|
||||
return svgValue
|
||||
}
|
||||
157
package.json
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"name": "mermaid",
|
||||
"version": "7.0.5",
|
||||
"version": "8.1.0",
|
||||
"description": "Markdownish syntax for generating flowcharts, sequence diagrams, class diagrams, gantt charts and git graphs.",
|
||||
"main": "src/mermaid.js",
|
||||
"main": "dist/mermaid.core.js",
|
||||
"keywords": [
|
||||
"diagram",
|
||||
"markdown",
|
||||
@@ -12,22 +12,19 @@
|
||||
"class diagram",
|
||||
"git graph"
|
||||
],
|
||||
"bin": {
|
||||
"mermaid": "./bin/mermaid.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "node -r babel-register ./node_modules/.bin/webpack --progress --colors",
|
||||
"watch": "yarn build -- --watch",
|
||||
"release": "yarn build -- -p --config webpack.config.prod.js",
|
||||
"upgrade": "yarn-upgrade-all && yarn remove d3 && yarn add d3@3.5.17",
|
||||
"lint": "node_modules/.bin/standard",
|
||||
"karma": "node node_modules/karma/bin/karma start karma.conf.js --single-run",
|
||||
"tape": "node node_modules/tape/bin/tape test/cli_test-*.js",
|
||||
"test": "yarn lint && yarn tape && yarn karma",
|
||||
"live": "live-server ./test/examples",
|
||||
"jison": "gulp jison_legacy",
|
||||
"live_server": "gulp live-server",
|
||||
"prepublishOnly": "yarn build && yarn release && yarn test"
|
||||
"build": "webpack --progress --colors",
|
||||
"build:watch": "yarn build --watch",
|
||||
"minify": "minify ./dist/mermaid.js > ./dist/mermaid.min.js",
|
||||
"release": "yarn build -p --config webpack.config.prod.babel.js",
|
||||
"lint": "standard",
|
||||
"e2e": "yarn lint && jest e2e --config e2e/jest.config.js",
|
||||
"dev": "yarn lint && webpack-dev-server --config webpack.config.e2e.js",
|
||||
"test": "yarn lint && jest src",
|
||||
"test:watch": "jest --watch src",
|
||||
"jison": "node -r @babel/register node_modules/.bin/gulp jison",
|
||||
"prepublishOnly": "yarn build && yarn release && yarn test",
|
||||
"prepush": "yarn test"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -39,91 +36,59 @@
|
||||
"ignore": [
|
||||
"**/parser/*.js",
|
||||
"dist/**/*.js"
|
||||
],
|
||||
"globals": [
|
||||
"page"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^2.1.0",
|
||||
"d3": "3.5.17",
|
||||
"dagre": "^0.7.4",
|
||||
"dagre-d3-renderer": "^0.1.6",
|
||||
"he": "^1.1.1",
|
||||
"lodash": "^4.17.4",
|
||||
"minimist": "^1.2.0",
|
||||
"mkdirp": "^0.5.1",
|
||||
"moment": "^2.18.1",
|
||||
"semver": "^5.4.1",
|
||||
"which": "^1.3.0"
|
||||
"d3": "^5.7.0",
|
||||
"dagre-d3-renderer": "^0.5.8",
|
||||
"dagre-layout": "^0.8.8",
|
||||
"graphlibrary": "^2.2.0",
|
||||
"he": "^1.2.0",
|
||||
"moment-mini": "^2.22.1",
|
||||
"lodash": "^4.17.11",
|
||||
"minify": "^4.1.1",
|
||||
"scope-css": "^1.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"async": "^2.5.0",
|
||||
"babel-core": "^6.26.0",
|
||||
"babel-loader": "^7.1.2",
|
||||
"babel-plugin-transform-remove-strict-mode": "^0.0.2",
|
||||
"babel-preset-env": "^1.6.0",
|
||||
"babel-preset-es2015": "^6.24.1",
|
||||
"clone": "^2.1.1",
|
||||
"codeclimate-test-reporter": "^0.5.0",
|
||||
"css-loader": "^0.28.7",
|
||||
"dox": "^0.9.0",
|
||||
"event-stream": "^3.3.4",
|
||||
"extract-text-webpack-plugin": "^3.0.0",
|
||||
"front-matter": "^2.2.0",
|
||||
"gulp": "^3.9.1",
|
||||
"gulp-bower": "^0.0.13",
|
||||
"gulp-bump": "^2.7.0",
|
||||
"gulp-concat": "^2.6.1",
|
||||
"gulp-data": "^1.2.1",
|
||||
"gulp-dox": "^0.1.6",
|
||||
"gulp-ext-replace": "^0.3.0",
|
||||
"@babel/core": "^7.2.2",
|
||||
"@babel/preset-env": "^7.2.0",
|
||||
"@babel/register": "^7.0.0",
|
||||
"babel-core": "7.0.0-bridge.0",
|
||||
"babel-jest": "^23.6.0",
|
||||
"babel-loader": "^8.0.4",
|
||||
"coveralls": "^3.0.2",
|
||||
"css-loader": "^2.0.1",
|
||||
"css-to-string-loader": "^0.1.3",
|
||||
"gulp": "^4.0.0",
|
||||
"gulp-filelog": "^0.4.1",
|
||||
"gulp-front-matter": "^1.3.0",
|
||||
"gulp-hogan": "^2.0.0",
|
||||
"gulp-if": "^2.0.2",
|
||||
"gulp-insert": "^0.5.0",
|
||||
"gulp-istanbul": "^1.1.2",
|
||||
"gulp-jasmine": "^2.4.2",
|
||||
"gulp-jasmine-browser": "^1.9.0",
|
||||
"gulp-jison": "^1.2.0",
|
||||
"gulp-less": "^3.3.2",
|
||||
"gulp-livereload": "^3.8.1",
|
||||
"gulp-marked": "^1.0.0",
|
||||
"gulp-mdvars": "^2.0.0",
|
||||
"gulp-qunit": "^1.5.0",
|
||||
"gulp-rename": "^1.2.2",
|
||||
"gulp-shell": "^0.6.3",
|
||||
"gulp-tag-version": "^1.3.0",
|
||||
"gulp-util": "^3.0.8",
|
||||
"gulp-vartree": "^2.0.1",
|
||||
"hogan.js": "^3.0.2",
|
||||
"inject-loader": "^3.0.1",
|
||||
"jasmine": "^2.8.0",
|
||||
"jasmine-es6": "^0.4.1",
|
||||
"jison": "^0.4.17",
|
||||
"jsdom": "^11.2.0",
|
||||
"karma": "^1.7.1",
|
||||
"karma-chrome-launcher": "^2.2.0",
|
||||
"karma-jasmine": "^1.1.0",
|
||||
"karma-webpack": "^2.0.4",
|
||||
"less": "^2.7.2",
|
||||
"less-loader": "^4.0.5",
|
||||
"live-server": "^1.2.0",
|
||||
"map-stream": "^0.0.7",
|
||||
"marked": "^0.3.6",
|
||||
"mock-browser": "^0.92.14",
|
||||
"phantomjs-prebuilt": "^2.1.15",
|
||||
"require-dir": "^0.3.2",
|
||||
"rimraf": "^2.6.1",
|
||||
"standard": "^10.0.3",
|
||||
"style-loader": "^0.18.2",
|
||||
"tape": "^4.8.0",
|
||||
"webpack": "^3.5.5",
|
||||
"webpack-node-externals": "^1.6.0",
|
||||
"yarn-upgrade-all": "^0.1.8"
|
||||
"husky": "^1.2.1",
|
||||
"identity-obj-proxy": "^3.0.0",
|
||||
"jest": "^23.6.0",
|
||||
"jest-environment-puppeteer": "^4.2.0",
|
||||
"jest-image-snapshot": "^2.8.2",
|
||||
"jest-puppeteer": "^4.2.0",
|
||||
"jison": "^0.4.18",
|
||||
"moment": "^2.23.0",
|
||||
"node-sass": "^4.11.0",
|
||||
"puppeteer": "^1.17.0",
|
||||
"sass-loader": "^7.1.0",
|
||||
"standard": "^12.0.1",
|
||||
"webpack": "^4.27.1",
|
||||
"webpack-cli": "^3.1.2",
|
||||
"webpack-dev-server": "^3.4.1",
|
||||
"webpack-node-externals": "^1.7.2",
|
||||
"yarn-upgrade-all": "^0.5.0"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"dist",
|
||||
"lib",
|
||||
"src"
|
||||
]
|
||||
}
|
||||
"dist"
|
||||
],
|
||||
"yarn-upgrade-all": {
|
||||
"ignore": [
|
||||
"babel-core"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
node node_modules/jison/lib/cli.js src/diagrams/classDiagram/parser/classDiagram.jison -o src/diagrams/classDiagram/parser/classDiagram.js
|
||||
node node_modules/jison/lib/cli.js src/diagrams/sequenceDiagram/parser/sequenceDiagram.jison -o src/diagrams/sequenceDiagram/parser/sequenceDiagram.js
|
||||
node node_modules/jison/lib/cli.js src/diagrams/example/parser/example.jison -o src/diagrams/example/parser/example.js
|
||||
node node_modules/jison/lib/cli.js src/diagrams/flowchart/parser/flow.jison -o src/diagrams/flowchart/parser/flow.js
|
||||
node node_modules/jison/lib/cli.js src/diagrams/flowchart/parser/dot.jison -o src/diagrams/flowchart/parser/dot.js
|
||||
node node_modules/jison/lib/cli.js src/diagrams/gantt/parser/gantt.jison -o src/diagrams/gantt/parser/gantt.js
|
||||
node node_modules/jison/lib/cli.js src/diagrams/gitGraph/parser/gitGraph.jison -o src/diagrams/gitGraph/parser/gitGraph.js
|
||||
447
src/d3.js
vendored
@@ -1,447 +0,0 @@
|
||||
const d3 = require('d3')
|
||||
|
||||
module.exports = d3;
|
||||
|
||||
/*
|
||||
D3 Text Wrap
|
||||
By Vijith Assar
|
||||
http://www.vijithassar.com
|
||||
http://www.github.com/vijithassar
|
||||
@vijithassar
|
||||
|
||||
Detailed instructions at http://www.github.com/vijithassar/d3textwrap
|
||||
*/
|
||||
|
||||
(function () {
|
||||
// set this variable to a string value to always force a particular
|
||||
// wrap method for development purposes, for example to check tspan
|
||||
// rendering using a foreignobject-enabled browser. set to 'tspan' to
|
||||
// use tspans and 'foreignobject' to use foreignobject
|
||||
var forceWrapMethod = false // by default no wrap method is forced
|
||||
forceWrapMethod = 'tspans' // uncomment this statement to force tspans
|
||||
// force_wrap_method = 'foreignobjects'; // uncomment this statement to force foreignobjects
|
||||
|
||||
// exit immediately if something in this location
|
||||
// has already been defined; the plugin will defer to whatever
|
||||
// else you're doing in your code
|
||||
if (d3.selection.prototype.textwrap) {
|
||||
return false
|
||||
}
|
||||
|
||||
// double check the force_wrap_method flag
|
||||
// and reset if someone screwed up the above
|
||||
// settings
|
||||
if (typeof forceWrapMethod === 'undefined') {
|
||||
forceWrapMethod = false
|
||||
}
|
||||
|
||||
// create the plugin method twice, both for regular use
|
||||
// and again for use inside the enter() selection
|
||||
d3.selection.prototype.textwrap = d3.selection.enter.prototype.textwrap = function (bounds, padding) {
|
||||
// default value of padding is zero if it's undefined
|
||||
padding = parseInt(padding) || 0
|
||||
|
||||
// save callee into a variable so we can continue to refer to it
|
||||
// as the function scope changes
|
||||
var selection = this
|
||||
|
||||
// create a variable to store desired return values in
|
||||
var returnValue
|
||||
|
||||
// extract wrap boundaries from any d3-selected rect and return them
|
||||
// in a format that matches the simpler object argument option
|
||||
var extractBounds = function (bounds) {
|
||||
// discard the nested array wrappers added by d3
|
||||
var boundingRect = bounds[0][0]
|
||||
// sanitize the svg element name so we can test against it
|
||||
var elementType = boundingRect.tagName.toString()
|
||||
// if it's not a rect, exit
|
||||
if (elementType !== 'rect') {
|
||||
return false
|
||||
// if it's a rect, proceed to extracting the position attributes
|
||||
} else {
|
||||
var boundsExtracted = {}
|
||||
boundsExtracted.x = d3.select(boundingRect).attr('x') || 0
|
||||
boundsExtracted.y = d3.select(boundingRect).attr('y') || 0
|
||||
boundsExtracted.width = d3.select(boundingRect).attr('width') || 0
|
||||
boundsExtracted.height = d3.select(boundingRect).attr('height') || 0
|
||||
// also pass along the getter function
|
||||
boundsExtracted.attr = bounds.attr
|
||||
}
|
||||
return boundsExtracted
|
||||
}
|
||||
|
||||
// double check the input argument for the wrapping
|
||||
// boundaries to make sure it actually contains all
|
||||
// the information we'll need in order to wrap successfully
|
||||
var verifyBounds = function (bounds) {
|
||||
// quickly add a simple getter method so you can use either
|
||||
// bounds.x or bounds.attr('x') as your notation,
|
||||
// the latter being a common convention among D3
|
||||
// developers
|
||||
if (!bounds.attr) {
|
||||
bounds.attr = function (property) {
|
||||
if (this[property]) {
|
||||
return this[property]
|
||||
}
|
||||
}
|
||||
}
|
||||
// if it's an associative array, make sure it has all the
|
||||
// necessary properties represented directly
|
||||
if (
|
||||
(typeof bounds === 'object') &&
|
||||
(typeof bounds.x !== 'undefined') &&
|
||||
(typeof bounds.y !== 'undefined') &&
|
||||
(typeof bounds.width !== 'undefined') &&
|
||||
(typeof bounds.height !== 'undefined')
|
||||
// if that's the case, then the bounds are fine
|
||||
) {
|
||||
// return the lightly modified bounds
|
||||
return bounds
|
||||
// if it's a numerically indexed array, assume it's a
|
||||
// d3-selected rect and try to extract the positions
|
||||
} else if (
|
||||
// first try to make sure it's an array using Array.isArray
|
||||
(
|
||||
(typeof Array.isArray === 'function') &&
|
||||
(Array.isArray(bounds))
|
||||
) ||
|
||||
// but since Array.isArray isn't always supported, fall
|
||||
// back to casting to the object to string when it's not
|
||||
(Object.prototype.toString.call(bounds) === '[object Array]')
|
||||
) {
|
||||
// once you're sure it's an array, extract the boundaries
|
||||
// from the rect
|
||||
var extractedBounds = extractBounds(bounds)
|
||||
return extractedBounds
|
||||
} else {
|
||||
// but if the bounds are neither an object nor a numerical
|
||||
// array, then the bounds argument is invalid and you'll
|
||||
// need to fix it
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
var applyPadding = function (bounds, padding) {
|
||||
var paddedBounds = bounds
|
||||
if (padding !== 0) {
|
||||
paddedBounds.x = parseInt(paddedBounds.x) + padding
|
||||
paddedBounds.y = parseInt(paddedBounds.y) + padding
|
||||
paddedBounds.width -= padding * 2
|
||||
paddedBounds.height -= padding * 2
|
||||
}
|
||||
return paddedBounds
|
||||
}
|
||||
|
||||
// verify bounds
|
||||
var verifiedBounds = verifyBounds(bounds)
|
||||
|
||||
// modify bounds if a padding value is provided
|
||||
if (padding) {
|
||||
verifiedBounds = applyPadding(verifiedBounds, padding)
|
||||
}
|
||||
|
||||
// check that we have the necessary conditions for this function to operate properly
|
||||
if (
|
||||
// selection it's operating on cannot be not empty
|
||||
(selection.length === 0) ||
|
||||
// d3 must be available
|
||||
(!d3) ||
|
||||
// desired wrapping bounds must be provided as an input argument
|
||||
(!bounds) ||
|
||||
// input bounds must validate
|
||||
(!verifiedBounds)
|
||||
) {
|
||||
// try to return the calling selection if possible
|
||||
// so as not to interfere with methods downstream in the
|
||||
// chain
|
||||
if (selection) {
|
||||
return selection
|
||||
// if all else fails, just return false. if you hit this point then you're
|
||||
// almost certainly trying to call the textwrap() method on something that
|
||||
// doesn't make sense!
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
// if we've validated everything then we can finally proceed
|
||||
// to the meat of this operation
|
||||
} else {
|
||||
// reassign the verified bounds as the set we want
|
||||
// to work with from here on; this ensures that we're
|
||||
// using the same data structure for our bounds regardless
|
||||
// of whether the input argument was a simple object or
|
||||
// a d3 selection
|
||||
bounds = verifiedBounds
|
||||
|
||||
// wrap using html and foreignObjects if they are supported
|
||||
var wrapWithForeignobjects = function (item) {
|
||||
// establish variables to quickly reference target nodes later
|
||||
var parent = d3.select(item[0].parentNode)
|
||||
var textNode = parent.select('text')
|
||||
var styledLineHeight = textNode.style('line-height')
|
||||
// extract our desired content from the single text element
|
||||
var textToWrap = textNode.text()
|
||||
// remove the text node and replace with a foreign object
|
||||
textNode.remove()
|
||||
var foreignObject = parent.append('foreignObject')
|
||||
// add foreign object and set dimensions, position, etc
|
||||
foreignObject
|
||||
.attr('requiredFeatures', 'http://www.w3.org/TR/SVG11/feature#Extensibility')
|
||||
.attr('x', bounds.x)
|
||||
.attr('y', bounds.y)
|
||||
.attr('width', bounds.width)
|
||||
.attr('height', bounds.height)
|
||||
// insert an HTML div
|
||||
var wrapDiv = foreignObject
|
||||
.append('xhtml:div')
|
||||
// this class is currently hardcoded
|
||||
// probably not necessary but easy to
|
||||
// override using .classed() and for now
|
||||
// it's nice to avoid a litany of input
|
||||
// arguments
|
||||
.attr('class', 'wrapped')
|
||||
// set div to same dimensions as foreign object
|
||||
wrapDiv
|
||||
.style('height', bounds.height)
|
||||
.style('width', bounds.width)
|
||||
// insert text content
|
||||
.html(textToWrap)
|
||||
if (styledLineHeight) {
|
||||
wrapDiv.style('line-height', styledLineHeight)
|
||||
}
|
||||
returnValue = parent.select('foreignObject')
|
||||
}
|
||||
|
||||
// wrap with tspans if foreignObject is undefined
|
||||
var wrapWithTspans = function (item) {
|
||||
// operate on the first text item in the selection
|
||||
var textNode = item[0]
|
||||
var parent = textNode.parentNode
|
||||
var textNodeSelected = d3.select(textNode)
|
||||
// measure initial size of the text node as rendered
|
||||
var textNodeHeight = textNode.getBBox().height
|
||||
var textNodeWidth = textNode.getBBox().width
|
||||
// figure out the line height, either from rendered height
|
||||
// of the font or attached styling
|
||||
var lineHeight
|
||||
var renderedLineHeight = textNodeHeight
|
||||
var styledLineHeight = textNodeSelected.style('line-height')
|
||||
if (
|
||||
(styledLineHeight) &&
|
||||
(parseInt(styledLineHeight))
|
||||
) {
|
||||
lineHeight = parseInt(styledLineHeight.replace('px', ''))
|
||||
} else {
|
||||
lineHeight = renderedLineHeight
|
||||
}
|
||||
// only fire the rest of this if the text content
|
||||
// overflows the desired dimensions
|
||||
if (textNodeWidth > bounds.width) {
|
||||
// store whatever is inside the text node
|
||||
// in a variable and then zero out the
|
||||
// initial content; we'll reinsert in a moment
|
||||
// using tspan elements.
|
||||
var textToWrap = textNodeSelected.text()
|
||||
textNodeSelected.text('')
|
||||
if (textToWrap) {
|
||||
// keep track of whether we are splitting by spaces
|
||||
// so we know whether to reinsert those spaces later
|
||||
var breakDelimiter
|
||||
// split at spaces to create an array of individual words
|
||||
var textToWrapArray
|
||||
if (textToWrap.indexOf(' ') !== -1) {
|
||||
breakDelimiter = ' '
|
||||
textToWrapArray = textToWrap.split(' ')
|
||||
} else {
|
||||
// if there are no spaces, figure out the split
|
||||
// points by comparing rendered text width against
|
||||
// bounds and translating that into character position
|
||||
// cuts
|
||||
breakDelimiter = ''
|
||||
var stringLength = textToWrap.length
|
||||
var numberOfSubstrings = Math.ceil(textNodeWidth / bounds.width)
|
||||
var spliceInterval = Math.floor(stringLength / numberOfSubstrings)
|
||||
if (
|
||||
!(spliceInterval * numberOfSubstrings >= stringLength)
|
||||
) {
|
||||
numberOfSubstrings++
|
||||
}
|
||||
textToWrapArray = []
|
||||
var substring
|
||||
var startPosition
|
||||
for (var i = 0; i < numberOfSubstrings; i++) {
|
||||
startPosition = i * spliceInterval
|
||||
substring = textToWrap.substr(startPosition, spliceInterval)
|
||||
textToWrapArray.push(substring)
|
||||
}
|
||||
}
|
||||
|
||||
// new array where we'll store the words re-assembled into
|
||||
// substrings that have been tested against the desired
|
||||
// maximum wrapping width
|
||||
var substrings = []
|
||||
// computed text length is arguably incorrectly reported for
|
||||
// all tspans after the first one, in that they will include
|
||||
// the width of previous separate tspans. to compensate we need
|
||||
// to manually track the computed text length of all those
|
||||
// previous tspans and substrings, and then use that to offset
|
||||
// the miscalculation. this then gives us the actual correct
|
||||
// position we want to use in rendering the text in the SVG.
|
||||
var totalOffset = 0
|
||||
// object for storing the results of text length computations later
|
||||
var temp = {}
|
||||
// loop through the words and test the computed text length
|
||||
// of the string against the maximum desired wrapping width
|
||||
for (i = 0; i < textToWrapArray.length; i++) {
|
||||
var word = textToWrapArray[i]
|
||||
var previousString = textNodeSelected.text()
|
||||
var previousWidth = textNode.getComputedTextLength()
|
||||
// initialize the current word as the first word
|
||||
// or append to the previous string if one exists
|
||||
var newstring
|
||||
if (previousString) {
|
||||
newstring = previousString + breakDelimiter + word
|
||||
} else {
|
||||
newstring = word
|
||||
}
|
||||
// add the newest substring back to the text node and
|
||||
// measure the length
|
||||
textNodeSelected.text(newstring)
|
||||
var newWidth = textNode.getComputedTextLength()
|
||||
// adjust the length by the offset we've tracked
|
||||
// due to the misreported length discussed above
|
||||
|
||||
// if our latest version of the string is too
|
||||
// big for the bounds, use the previous
|
||||
// version of the string (without the newest word
|
||||
// added) and use the latest word to restart the
|
||||
// process with a new tspan
|
||||
if (newWidth > bounds.width) {
|
||||
if (
|
||||
(previousString) &&
|
||||
(previousString !== '')
|
||||
) {
|
||||
totalOffset = totalOffset + previousWidth
|
||||
temp = { string: previousString, width: previousWidth, offset: totalOffset }
|
||||
substrings.push(temp)
|
||||
textNodeSelected.text('')
|
||||
textNodeSelected.text(word)
|
||||
// Handle case where there is just one more word to be wrapped
|
||||
if (i === textToWrapArray.length - 1) {
|
||||
newstring = word
|
||||
textNodeSelected.text(newstring)
|
||||
newWidth = textNode.getComputedTextLength()
|
||||
}
|
||||
}
|
||||
}
|
||||
// if we're up to the last word in the array,
|
||||
// get the computed length as is without
|
||||
// appending anything further to it
|
||||
if (i === textToWrapArray.length - 1) {
|
||||
textNodeSelected.text('')
|
||||
var finalString = newstring
|
||||
if (
|
||||
(finalString) &&
|
||||
(finalString !== '')
|
||||
) {
|
||||
if ((newWidth - totalOffset) > 0) { newWidth = newWidth - totalOffset }
|
||||
temp = { string: finalString, width: newWidth, offset: totalOffset }
|
||||
substrings.push(temp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// append each substring as a tspan
|
||||
var currentTspan
|
||||
// var tspanCount
|
||||
// double check that the text content has been removed
|
||||
// before we start appending tspans
|
||||
textNodeSelected.text('')
|
||||
for (i = 0; i < substrings.length; i++) {
|
||||
substring = substrings[i].string
|
||||
// only append if we're sure it won't make the tspans
|
||||
// overflow the bounds.
|
||||
if ((i) * lineHeight < bounds.height - (lineHeight * 1.5)) {
|
||||
currentTspan = textNodeSelected.append('tspan')
|
||||
.text(substring)
|
||||
// vertical shift to all tspans after the first one
|
||||
currentTspan
|
||||
.attr('dy', function (d) {
|
||||
if (i > 0) {
|
||||
return lineHeight
|
||||
}
|
||||
})
|
||||
// shift left from default position, which
|
||||
// is probably based on the full length of the
|
||||
// text string until we make this adjustment
|
||||
currentTspan
|
||||
.attr('x', function () {
|
||||
var xOffset = bounds.x
|
||||
if (padding) { xOffset += padding }
|
||||
return xOffset
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// position the overall text node, whether wrapped or not
|
||||
textNodeSelected.attr('y', function () {
|
||||
var yOffset = bounds.y
|
||||
// shift by line-height to move the baseline into
|
||||
// the bounds – otherwise the text baseline would be
|
||||
// at the top of the bounds
|
||||
if (lineHeight) { yOffset += lineHeight }
|
||||
// shift by padding, if it's there
|
||||
if (padding) { yOffset += padding }
|
||||
return yOffset
|
||||
})
|
||||
// shift to the right by the padding value
|
||||
textNodeSelected.attr('x', function () {
|
||||
var xOffset = bounds.x
|
||||
if (padding) { xOffset += padding }
|
||||
return xOffset
|
||||
})
|
||||
|
||||
// assign our modified text node with tspans
|
||||
// to the return value
|
||||
returnValue = d3.select(parent).selectAll('text')
|
||||
}
|
||||
|
||||
// variable used to hold the functions that let us
|
||||
// switch between the wrap methods
|
||||
var wrapMethod
|
||||
|
||||
// if a wrap method if being forced, assign that
|
||||
// function
|
||||
if (forceWrapMethod) {
|
||||
if (forceWrapMethod === 'foreignobjects') {
|
||||
wrapMethod = wrapWithForeignobjects
|
||||
} else if (forceWrapMethod === 'tspans') {
|
||||
wrapMethod = wrapWithTspans
|
||||
}
|
||||
}
|
||||
|
||||
// if no wrap method is being forced, then instead
|
||||
// test for browser support of foreignobject and
|
||||
// use whichever wrap method makes sense accordingly
|
||||
if (!forceWrapMethod) {
|
||||
if (typeof SVGForeignObjectElement !== 'undefined') {
|
||||
wrapMethod = wrapWithForeignobjects
|
||||
} else {
|
||||
wrapMethod = wrapWithTspans
|
||||
}
|
||||
}
|
||||
|
||||
// run the desired wrap function for each item
|
||||
// in the d3 selection that called .textwrap()
|
||||
for (var i = 0; i < selection.length; i++) {
|
||||
var item = selection[i]
|
||||
wrapMethod(item)
|
||||
}
|
||||
|
||||
// return the modified nodes so we can chain other
|
||||
// methods to them.
|
||||
return returnValue
|
||||
}
|
||||
}
|
||||
})()
|
||||
96
src/diagrams/class/classDb.js
Normal file
@@ -0,0 +1,96 @@
|
||||
|
||||
import { logger } from '../../logger'
|
||||
|
||||
let relations = []
|
||||
let classes = {}
|
||||
|
||||
/**
|
||||
* Function called by parser when a node definition has been found.
|
||||
* @param id
|
||||
* @param text
|
||||
* @param type
|
||||
* @param style
|
||||
*/
|
||||
export const addClass = function (id) {
|
||||
if (typeof classes[id] === 'undefined') {
|
||||
classes[id] = {
|
||||
id: id,
|
||||
methods: [],
|
||||
members: []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const clear = function () {
|
||||
relations = []
|
||||
classes = {}
|
||||
}
|
||||
|
||||
export const getClass = function (id) {
|
||||
return classes[id]
|
||||
}
|
||||
export const getClasses = function () {
|
||||
return classes
|
||||
}
|
||||
|
||||
export const getRelations = function () {
|
||||
return relations
|
||||
}
|
||||
|
||||
export const addRelation = function (relation) {
|
||||
logger.debug('Adding relation: ' + JSON.stringify(relation))
|
||||
addClass(relation.id1)
|
||||
addClass(relation.id2)
|
||||
relations.push(relation)
|
||||
}
|
||||
|
||||
export const addMember = function (className, member) {
|
||||
const theClass = classes[className]
|
||||
if (typeof member === 'string') {
|
||||
if (member.substr(-1) === ')') {
|
||||
theClass.methods.push(member)
|
||||
} else {
|
||||
theClass.members.push(member)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const addMembers = function (className, MembersArr) {
|
||||
if (Array.isArray(MembersArr)) {
|
||||
MembersArr.forEach(member => addMember(className, member))
|
||||
}
|
||||
}
|
||||
|
||||
export const cleanupLabel = function (label) {
|
||||
if (label.substring(0, 1) === ':') {
|
||||
return label.substr(2).trim()
|
||||
} else {
|
||||
return label.trim()
|
||||
}
|
||||
}
|
||||
|
||||
export const lineType = {
|
||||
LINE: 0,
|
||||
DOTTED_LINE: 1
|
||||
}
|
||||
|
||||
export const relationType = {
|
||||
AGGREGATION: 0,
|
||||
EXTENSION: 1,
|
||||
COMPOSITION: 2,
|
||||
DEPENDENCY: 3
|
||||
}
|
||||
|
||||
export default {
|
||||
addClass,
|
||||
clear,
|
||||
getClass,
|
||||
getClasses,
|
||||
getRelations,
|
||||
addRelation,
|
||||
addMember,
|
||||
addMembers,
|
||||
cleanupLabel,
|
||||
lineType,
|
||||
relationType
|
||||
}
|
||||
@@ -1,66 +1,63 @@
|
||||
/* eslint-env jasmine */
|
||||
/**
|
||||
* Created by knut on 14-11-18.
|
||||
*/
|
||||
import { parser } from './parser/classDiagram'
|
||||
import classDb from './classDb'
|
||||
|
||||
describe('class diagram, ', function () {
|
||||
describe('when parsing an info graph it', function () {
|
||||
var cd, cDDb
|
||||
beforeEach(function () {
|
||||
cd = require('./parser/classDiagram').parser
|
||||
cDDb = require('./classDb')
|
||||
cd.yy = cDDb
|
||||
parser.yy = classDb
|
||||
})
|
||||
|
||||
it('should handle relation definitions', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Class01 <|-- Class02\n' +
|
||||
'Class03 *-- Class04\n' +
|
||||
'Class05 o-- Class06\n' +
|
||||
'Class07 .. Class08\n' +
|
||||
'Class09 -- Class1'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
})
|
||||
it('should handle relation definition of different types and directions', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Class11 <|.. Class12\n' +
|
||||
'Class13 --> Class14\n' +
|
||||
'Class15 ..> Class16\n' +
|
||||
'Class17 ..|> Class18\n' +
|
||||
'Class19 <--* Class20'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
})
|
||||
|
||||
it('should handle cardinality and labels', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Class01 "1" *-- "many" Class02 : contains\n' +
|
||||
'Class03 o-- Class04 : aggregation\n' +
|
||||
'Class05 --> "1" Class06'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
})
|
||||
it('should handle class definitions', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'class Car\n' +
|
||||
'Driver -- Car : drives >\n' +
|
||||
'Car *-- Wheel : have 4 >\n' +
|
||||
'Car -- Person : < owns'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
})
|
||||
|
||||
it('should handle method statements', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Object <|-- ArrayList\n' +
|
||||
'Object : equals()\n' +
|
||||
'ArrayList : Object[] elementData\n' +
|
||||
'ArrayList : size()'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
})
|
||||
it('should handle parsing of method statements grouped by brackets', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'class Dummy {\n' +
|
||||
'String data\n' +
|
||||
' void methods()\n' +
|
||||
@@ -71,11 +68,11 @@ describe('class diagram, ', function () {
|
||||
' departureTime : Date\n' +
|
||||
'}'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
})
|
||||
|
||||
it('should handle parsing of separators', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'class Foo1 {\n' +
|
||||
' You can use\n' +
|
||||
' several lines\n' +
|
||||
@@ -103,112 +100,109 @@ describe('class diagram, ', function () {
|
||||
'String password\n' +
|
||||
'}'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when fetching data from an classDiagram graph it', function () {
|
||||
var cd, cDDb
|
||||
beforeEach(function () {
|
||||
cd = require('./parser/classDiagram').parser
|
||||
cDDb = require('./classDb')
|
||||
cd.yy = cDDb
|
||||
cd.yy.clear()
|
||||
parser.yy = classDb
|
||||
parser.yy.clear()
|
||||
})
|
||||
it('should handle relation definitions EXTENSION', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Class01 <|-- Class02'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
|
||||
var relations = cd.yy.getRelations()
|
||||
const relations = parser.yy.getRelations()
|
||||
|
||||
expect(cd.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(cd.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(relations[0].relation.type1).toBe(cDDb.relationType.EXTENSION)
|
||||
expect(parser.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(parser.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(relations[0].relation.type1).toBe(classDb.relationType.EXTENSION)
|
||||
expect(relations[0].relation.type2).toBe('none')
|
||||
expect(relations[0].relation.lineType).toBe(cDDb.lineType.LINE)
|
||||
expect(relations[0].relation.lineType).toBe(classDb.lineType.LINE)
|
||||
})
|
||||
it('should handle relation definitions AGGREGATION and dotted line', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Class01 o.. Class02'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
|
||||
var relations = cd.yy.getRelations()
|
||||
const relations = parser.yy.getRelations()
|
||||
|
||||
expect(cd.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(cd.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(relations[0].relation.type1).toBe(cDDb.relationType.AGGREGATION)
|
||||
expect(parser.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(parser.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(relations[0].relation.type1).toBe(classDb.relationType.AGGREGATION)
|
||||
expect(relations[0].relation.type2).toBe('none')
|
||||
expect(relations[0].relation.lineType).toBe(cDDb.lineType.DOTTED_LINE)
|
||||
expect(relations[0].relation.lineType).toBe(classDb.lineType.DOTTED_LINE)
|
||||
})
|
||||
it('should handle relation definitions COMPOSITION on both sides', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Class01 *--* Class02'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
|
||||
var relations = cd.yy.getRelations()
|
||||
const relations = parser.yy.getRelations()
|
||||
|
||||
expect(cd.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(cd.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(relations[0].relation.type1).toBe(cDDb.relationType.COMPOSITION)
|
||||
expect(relations[0].relation.type2).toBe(cDDb.relationType.COMPOSITION)
|
||||
expect(relations[0].relation.lineType).toBe(cDDb.lineType.LINE)
|
||||
expect(parser.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(parser.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(relations[0].relation.type1).toBe(classDb.relationType.COMPOSITION)
|
||||
expect(relations[0].relation.type2).toBe(classDb.relationType.COMPOSITION)
|
||||
expect(relations[0].relation.lineType).toBe(classDb.lineType.LINE)
|
||||
})
|
||||
it('should handle relation definitions no types', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Class01 -- Class02'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
|
||||
var relations = cd.yy.getRelations()
|
||||
const relations = parser.yy.getRelations()
|
||||
|
||||
expect(cd.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(cd.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(parser.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(parser.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(relations[0].relation.type1).toBe('none')
|
||||
expect(relations[0].relation.type2).toBe('none')
|
||||
expect(relations[0].relation.lineType).toBe(cDDb.lineType.LINE)
|
||||
expect(relations[0].relation.lineType).toBe(classDb.lineType.LINE)
|
||||
})
|
||||
it('should handle relation definitions with type only on right side', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Class01 --|> Class02'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
|
||||
var relations = cd.yy.getRelations()
|
||||
const relations = parser.yy.getRelations()
|
||||
|
||||
expect(cd.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(cd.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(parser.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(parser.yy.getClass('Class02').id).toBe('Class02')
|
||||
expect(relations[0].relation.type1).toBe('none')
|
||||
expect(relations[0].relation.type2).toBe(cDDb.relationType.EXTENSION)
|
||||
expect(relations[0].relation.lineType).toBe(cDDb.lineType.LINE)
|
||||
expect(relations[0].relation.type2).toBe(classDb.relationType.EXTENSION)
|
||||
expect(relations[0].relation.lineType).toBe(classDb.lineType.LINE)
|
||||
})
|
||||
|
||||
it('should handle multiple classes and relation definitions', function () {
|
||||
var str = 'classDiagram\n' +
|
||||
const str = 'classDiagram\n' +
|
||||
'Class01 <|-- Class02\n' +
|
||||
'Class03 *-- Class04\n' +
|
||||
'Class05 o-- Class06\n' +
|
||||
'Class07 .. Class08\n' +
|
||||
'Class09 -- Class10'
|
||||
|
||||
cd.parse(str)
|
||||
parser.parse(str)
|
||||
|
||||
var relations = cd.yy.getRelations()
|
||||
const relations = parser.yy.getRelations()
|
||||
|
||||
expect(cd.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(cd.yy.getClass('Class10').id).toBe('Class10')
|
||||
expect(parser.yy.getClass('Class01').id).toBe('Class01')
|
||||
expect(parser.yy.getClass('Class10').id).toBe('Class10')
|
||||
|
||||
expect(relations.length).toBe(5)
|
||||
|
||||
expect(relations[0].relation.type1).toBe(cDDb.relationType.EXTENSION)
|
||||
expect(relations[0].relation.type1).toBe(classDb.relationType.EXTENSION)
|
||||
expect(relations[0].relation.type2).toBe('none')
|
||||
expect(relations[0].relation.lineType).toBe(cDDb.lineType.LINE)
|
||||
expect(relations[0].relation.lineType).toBe(classDb.lineType.LINE)
|
||||
expect(relations[3].relation.type1).toBe('none')
|
||||
expect(relations[3].relation.type2).toBe('none')
|
||||
expect(relations[3].relation.lineType).toBe(cDDb.lineType.DOTTED_LINE)
|
||||
expect(relations[3].relation.lineType).toBe(classDb.lineType.DOTTED_LINE)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,31 +1,26 @@
|
||||
/**
|
||||
* Created by knut on 14-11-23.
|
||||
*/
|
||||
import * as d3 from 'd3'
|
||||
import dagre from 'dagre-layout'
|
||||
import graphlib from 'graphlibrary'
|
||||
import { logger } from '../../logger'
|
||||
import classDb from './classDb'
|
||||
import { parser } from './parser/classDiagram'
|
||||
|
||||
var cd = require('./parser/classDiagram').parser
|
||||
var cDDb = require('./classDb')
|
||||
cd.yy = cDDb
|
||||
var d3 = require('../../d3')
|
||||
var Logger = require('../../logger')
|
||||
var log = Logger.Log
|
||||
var dagre = require('dagre')
|
||||
parser.yy = classDb
|
||||
|
||||
var idCache
|
||||
idCache = {}
|
||||
const idCache = {}
|
||||
|
||||
var classCnt = 0
|
||||
var conf = {
|
||||
let classCnt = 0
|
||||
const conf = {
|
||||
dividerMargin: 10,
|
||||
padding: 5,
|
||||
textHeight: 10
|
||||
}
|
||||
|
||||
// Todo optimize
|
||||
var getGraphId = function (label) {
|
||||
var keys = Object.keys(idCache)
|
||||
const getGraphId = function (label) {
|
||||
const keys = Object.keys(idCache)
|
||||
|
||||
var i
|
||||
for (i = 0; i < keys.length; i++) {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
if (idCache[keys[i]].label === label) {
|
||||
return keys[i]
|
||||
}
|
||||
@@ -37,8 +32,10 @@ var getGraphId = function (label) {
|
||||
/**
|
||||
* Setup arrow head and define the marker. The result is appended to the svg.
|
||||
*/
|
||||
var insertMarkers = function (elem) {
|
||||
elem.append('defs').append('marker')
|
||||
const insertMarkers = function (elem) {
|
||||
elem
|
||||
.append('defs')
|
||||
.append('marker')
|
||||
.attr('id', 'extensionStart')
|
||||
.attr('class', 'extension')
|
||||
.attr('refX', 0)
|
||||
@@ -49,7 +46,9 @@ var insertMarkers = function (elem) {
|
||||
.append('path')
|
||||
.attr('d', 'M 1,7 L18,13 V 1 Z')
|
||||
|
||||
elem.append('defs').append('marker')
|
||||
elem
|
||||
.append('defs')
|
||||
.append('marker')
|
||||
.attr('id', 'extensionEnd')
|
||||
.attr('refX', 19)
|
||||
.attr('refY', 7)
|
||||
@@ -59,7 +58,9 @@ var insertMarkers = function (elem) {
|
||||
.append('path')
|
||||
.attr('d', 'M 1,1 V 13 L18,7 Z') // this is actual shape for arrowhead
|
||||
|
||||
elem.append('defs').append('marker')
|
||||
elem
|
||||
.append('defs')
|
||||
.append('marker')
|
||||
.attr('id', 'compositionStart')
|
||||
.attr('class', 'extension')
|
||||
.attr('refX', 0)
|
||||
@@ -70,7 +71,9 @@ var insertMarkers = function (elem) {
|
||||
.append('path')
|
||||
.attr('d', 'M 18,7 L9,13 L1,7 L9,1 Z')
|
||||
|
||||
elem.append('defs').append('marker')
|
||||
elem
|
||||
.append('defs')
|
||||
.append('marker')
|
||||
.attr('id', 'compositionEnd')
|
||||
.attr('refX', 19)
|
||||
.attr('refY', 7)
|
||||
@@ -80,7 +83,9 @@ var insertMarkers = function (elem) {
|
||||
.append('path')
|
||||
.attr('d', 'M 18,7 L9,13 L1,7 L9,1 Z')
|
||||
|
||||
elem.append('defs').append('marker')
|
||||
elem
|
||||
.append('defs')
|
||||
.append('marker')
|
||||
.attr('id', 'aggregationStart')
|
||||
.attr('class', 'extension')
|
||||
.attr('refX', 0)
|
||||
@@ -91,7 +96,9 @@ var insertMarkers = function (elem) {
|
||||
.append('path')
|
||||
.attr('d', 'M 18,7 L9,13 L1,7 L9,1 Z')
|
||||
|
||||
elem.append('defs').append('marker')
|
||||
elem
|
||||
.append('defs')
|
||||
.append('marker')
|
||||
.attr('id', 'aggregationEnd')
|
||||
.attr('refX', 19)
|
||||
.attr('refY', 7)
|
||||
@@ -101,7 +108,9 @@ var insertMarkers = function (elem) {
|
||||
.append('path')
|
||||
.attr('d', 'M 18,7 L9,13 L1,7 L9,1 Z')
|
||||
|
||||
elem.append('defs').append('marker')
|
||||
elem
|
||||
.append('defs')
|
||||
.append('marker')
|
||||
.attr('id', 'dependencyStart')
|
||||
.attr('class', 'extension')
|
||||
.attr('refX', 0)
|
||||
@@ -112,7 +121,9 @@ var insertMarkers = function (elem) {
|
||||
.append('path')
|
||||
.attr('d', 'M 5,7 L9,13 L1,7 L9,1 Z')
|
||||
|
||||
elem.append('defs').append('marker')
|
||||
elem
|
||||
.append('defs')
|
||||
.append('marker')
|
||||
.attr('id', 'dependencyEnd')
|
||||
.attr('refX', 19)
|
||||
.attr('refY', 7)
|
||||
@@ -123,69 +134,95 @@ var insertMarkers = function (elem) {
|
||||
.attr('d', 'M 18,7 L9,13 L14,7 L9,1 Z')
|
||||
}
|
||||
|
||||
var edgeCount = 0
|
||||
var drawEdge = function (elem, path, relation) {
|
||||
var getRelationType = function (type) {
|
||||
let edgeCount = 0
|
||||
let total = 0
|
||||
const drawEdge = function (elem, path, relation) {
|
||||
const getRelationType = function (type) {
|
||||
switch (type) {
|
||||
case cDDb.relationType.AGGREGATION:
|
||||
case classDb.relationType.AGGREGATION:
|
||||
return 'aggregation'
|
||||
case cDDb.relationType.EXTENSION:
|
||||
case classDb.relationType.EXTENSION:
|
||||
return 'extension'
|
||||
case cDDb.relationType.COMPOSITION:
|
||||
case classDb.relationType.COMPOSITION:
|
||||
return 'composition'
|
||||
case cDDb.relationType.DEPENDENCY:
|
||||
case classDb.relationType.DEPENDENCY:
|
||||
return 'dependency'
|
||||
}
|
||||
}
|
||||
|
||||
path.points = path.points.filter(p => !Number.isNaN(p.y))
|
||||
|
||||
// The data for our line
|
||||
var lineData = path.points
|
||||
const lineData = path.points
|
||||
|
||||
// This is the accessor function we talked about above
|
||||
var lineFunction = d3.svg.line()
|
||||
const lineFunction = d3
|
||||
.line()
|
||||
.x(function (d) {
|
||||
return d.x
|
||||
})
|
||||
.y(function (d) {
|
||||
return d.y
|
||||
})
|
||||
.interpolate('basis')
|
||||
.curve(d3.curveBasis)
|
||||
|
||||
var svgPath = elem.append('path')
|
||||
const svgPath = elem
|
||||
.append('path')
|
||||
.attr('d', lineFunction(lineData))
|
||||
.attr('id', 'edge' + edgeCount)
|
||||
.attr('class', 'relation')
|
||||
var url = ''
|
||||
let url = ''
|
||||
if (conf.arrowMarkerAbsolute) {
|
||||
url = window.location.protocol + '//' + window.location.host + window.location.pathname + window.location.search
|
||||
url =
|
||||
window.location.protocol +
|
||||
'//' +
|
||||
window.location.host +
|
||||
window.location.pathname +
|
||||
window.location.search
|
||||
url = url.replace(/\(/g, '\\(')
|
||||
url = url.replace(/\)/g, '\\)')
|
||||
}
|
||||
|
||||
if (relation.relation.type1 !== 'none') {
|
||||
svgPath.attr('marker-start', 'url(' + url + '#' + getRelationType(relation.relation.type1) + 'Start' + ')')
|
||||
svgPath.attr(
|
||||
'marker-start',
|
||||
'url(' +
|
||||
url +
|
||||
'#' +
|
||||
getRelationType(relation.relation.type1) +
|
||||
'Start' +
|
||||
')'
|
||||
)
|
||||
}
|
||||
if (relation.relation.type2 !== 'none') {
|
||||
svgPath.attr('marker-end', 'url(' + url + '#' + getRelationType(relation.relation.type2) + 'End' + ')')
|
||||
svgPath.attr(
|
||||
'marker-end',
|
||||
'url(' +
|
||||
url +
|
||||
'#' +
|
||||
getRelationType(relation.relation.type2) +
|
||||
'End' +
|
||||
')'
|
||||
)
|
||||
}
|
||||
|
||||
var x, y
|
||||
var l = path.points.length
|
||||
if ((l % 2) !== 0) {
|
||||
var p1 = path.points[Math.floor(l / 2)]
|
||||
var p2 = path.points[Math.ceil(l / 2)]
|
||||
let x, y
|
||||
const l = path.points.length
|
||||
if (l % 2 !== 0 && l > 1) {
|
||||
const p1 = path.points[Math.floor(l / 2)]
|
||||
const p2 = path.points[Math.ceil(l / 2)]
|
||||
x = (p1.x + p2.x) / 2
|
||||
y = (p1.y + p2.y) / 2
|
||||
} else {
|
||||
var p = path.points[Math.floor(l / 2)]
|
||||
const p = path.points[Math.floor(l / 2)]
|
||||
x = p.x
|
||||
y = p.y
|
||||
}
|
||||
|
||||
if (typeof relation.title !== 'undefined') {
|
||||
var g = elem.append('g')
|
||||
.attr('class', 'classLabel')
|
||||
var label = g.append('text')
|
||||
const g = elem.append('g').attr('class', 'classLabel')
|
||||
const label = g
|
||||
.append('text')
|
||||
.attr('class', 'label')
|
||||
.attr('x', x)
|
||||
.attr('y', y)
|
||||
@@ -194,7 +231,7 @@ var drawEdge = function (elem, path, relation) {
|
||||
.text(relation.title)
|
||||
|
||||
window.label = label
|
||||
var bounds = label.node().getBBox()
|
||||
const bounds = label.node().getBBox()
|
||||
|
||||
g.insert('rect', ':first-child')
|
||||
.attr('class', 'box')
|
||||
@@ -207,11 +244,12 @@ var drawEdge = function (elem, path, relation) {
|
||||
edgeCount++
|
||||
}
|
||||
|
||||
var drawClass = function (elem, classDef) {
|
||||
log.info('Rendering class ' + classDef)
|
||||
const drawClass = function (elem, classDef) {
|
||||
logger.info('Rendering class ' + classDef)
|
||||
|
||||
var addTspan = function (textEl, txt, isFirst) {
|
||||
var tSpan = textEl.append('tspan')
|
||||
const addTspan = function (textEl, txt, isFirst) {
|
||||
const tSpan = textEl
|
||||
.append('tspan')
|
||||
.attr('x', conf.padding)
|
||||
.text(txt)
|
||||
if (!isFirst) {
|
||||
@@ -219,52 +257,66 @@ var drawClass = function (elem, classDef) {
|
||||
}
|
||||
}
|
||||
|
||||
var id = 'classId' + classCnt
|
||||
var classInfo = {
|
||||
const id = 'classId' + (classCnt % total)
|
||||
const classInfo = {
|
||||
id: id,
|
||||
label: classDef.id,
|
||||
width: 0,
|
||||
height: 0
|
||||
}
|
||||
|
||||
var g = elem.append('g')
|
||||
const g = elem
|
||||
.append('g')
|
||||
.attr('id', id)
|
||||
.attr('class', 'classGroup')
|
||||
var title = g.append('text')
|
||||
const title = g
|
||||
.append('text')
|
||||
.attr('x', conf.padding)
|
||||
.attr('y', conf.textHeight + conf.padding)
|
||||
.text(classDef.id)
|
||||
|
||||
var titleHeight = title.node().getBBox().height
|
||||
const titleHeight = title.node().getBBox().height
|
||||
|
||||
var membersLine = g.append('line') // text label for the x axis
|
||||
const membersLine = g
|
||||
.append('line') // text label for the x axis
|
||||
.attr('x1', 0)
|
||||
.attr('y1', conf.padding + titleHeight + conf.dividerMargin / 2)
|
||||
.attr('y2', conf.padding + titleHeight + conf.dividerMargin / 2)
|
||||
|
||||
var members = g.append('text') // text label for the x axis
|
||||
const members = g
|
||||
.append('text') // text label for the x axis
|
||||
.attr('x', conf.padding)
|
||||
.attr('y', titleHeight + (conf.dividerMargin) + conf.textHeight)
|
||||
.attr('y', titleHeight + conf.dividerMargin + conf.textHeight)
|
||||
.attr('fill', 'white')
|
||||
.attr('class', 'classText')
|
||||
|
||||
var isFirst = true
|
||||
|
||||
let isFirst = true
|
||||
classDef.members.forEach(function (member) {
|
||||
addTspan(members, member, isFirst)
|
||||
isFirst = false
|
||||
})
|
||||
|
||||
var membersBox = members.node().getBBox()
|
||||
const membersBox = members.node().getBBox()
|
||||
|
||||
var methodsLine = g.append('line') // text label for the x axis
|
||||
const methodsLine = g
|
||||
.append('line') // text label for the x axis
|
||||
.attr('x1', 0)
|
||||
.attr('y1', conf.padding + titleHeight + conf.dividerMargin + membersBox.height)
|
||||
.attr('y2', conf.padding + titleHeight + conf.dividerMargin + membersBox.height)
|
||||
.attr(
|
||||
'y1',
|
||||
conf.padding + titleHeight + conf.dividerMargin + membersBox.height
|
||||
)
|
||||
.attr(
|
||||
'y2',
|
||||
conf.padding + titleHeight + conf.dividerMargin + membersBox.height
|
||||
)
|
||||
|
||||
var methods = g.append('text') // text label for the x axis
|
||||
const methods = g
|
||||
.append('text') // text label for the x axis
|
||||
.attr('x', conf.padding)
|
||||
.attr('y', titleHeight + 2 * conf.dividerMargin + membersBox.height + conf.textHeight)
|
||||
.attr(
|
||||
'y',
|
||||
titleHeight + 2 * conf.dividerMargin + membersBox.height + conf.textHeight
|
||||
)
|
||||
.attr('fill', 'white')
|
||||
.attr('class', 'classText')
|
||||
|
||||
@@ -275,7 +327,7 @@ var drawClass = function (elem, classDef) {
|
||||
isFirst = false
|
||||
})
|
||||
|
||||
var classBox = g.node().getBBox()
|
||||
const classBox = g.node().getBBox()
|
||||
g.insert('rect', ':first-child')
|
||||
.attr('x', 0)
|
||||
.attr('y', 0)
|
||||
@@ -293,8 +345,8 @@ var drawClass = function (elem, classDef) {
|
||||
return classInfo
|
||||
}
|
||||
|
||||
module.exports.setConf = function (cnf) {
|
||||
var keys = Object.keys(cnf)
|
||||
export const setConf = function (cnf) {
|
||||
const keys = Object.keys(cnf)
|
||||
|
||||
keys.forEach(function (key) {
|
||||
conf[key] = cnf[key]
|
||||
@@ -305,18 +357,18 @@ module.exports.setConf = function (cnf) {
|
||||
* @param text
|
||||
* @param id
|
||||
*/
|
||||
module.exports.draw = function (text, id) {
|
||||
cd.yy.clear()
|
||||
cd.parse(text)
|
||||
export const draw = function (text, id) {
|
||||
parser.yy.clear()
|
||||
parser.parse(text)
|
||||
|
||||
log.info('Rendering diagram ' + text)
|
||||
logger.info('Rendering diagram ' + text)
|
||||
|
||||
/// / Fetch the default direction, use TD if none was found
|
||||
var diagram = d3.select('#' + id)
|
||||
const diagram = d3.select(`[id='${id}']`)
|
||||
insertMarkers(diagram)
|
||||
|
||||
// Layout graph, Create a new directed graph
|
||||
var g = new dagre.graphlib.Graph({
|
||||
const g = new graphlib.Graph({
|
||||
multigraph: true
|
||||
})
|
||||
|
||||
@@ -330,37 +382,63 @@ module.exports.draw = function (text, id) {
|
||||
return {}
|
||||
})
|
||||
|
||||
var classes = cDDb.getClasses()
|
||||
var keys = Object.keys(classes)
|
||||
var i
|
||||
for (i = 0; i < keys.length; i++) {
|
||||
var classDef = classes[keys[i]]
|
||||
var node = drawClass(diagram, classDef)
|
||||
const classes = classDb.getClasses()
|
||||
const keys = Object.keys(classes)
|
||||
total = keys.length
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const classDef = classes[keys[i]]
|
||||
const node = drawClass(diagram, classDef)
|
||||
// Add nodes to the graph. The first argument is the node id. The second is
|
||||
// metadata about the node. In this case we're going to add labels to each of
|
||||
// our nodes.
|
||||
g.setNode(node.id, node)
|
||||
log.info('Org height: ' + node.height)
|
||||
logger.info('Org height: ' + node.height)
|
||||
}
|
||||
|
||||
var relations = cDDb.getRelations()
|
||||
const relations = classDb.getRelations()
|
||||
relations.forEach(function (relation) {
|
||||
log.info('tjoho' + getGraphId(relation.id1) + getGraphId(relation.id2) + JSON.stringify(relation))
|
||||
g.setEdge(getGraphId(relation.id1), getGraphId(relation.id2), { relation: relation })
|
||||
logger.info(
|
||||
'tjoho' +
|
||||
getGraphId(relation.id1) +
|
||||
getGraphId(relation.id2) +
|
||||
JSON.stringify(relation)
|
||||
)
|
||||
g.setEdge(getGraphId(relation.id1), getGraphId(relation.id2), {
|
||||
relation: relation
|
||||
})
|
||||
})
|
||||
dagre.layout(g)
|
||||
g.nodes().forEach(function (v) {
|
||||
if (typeof v !== 'undefined') {
|
||||
log.debug('Node ' + v + ': ' + JSON.stringify(g.node(v)))
|
||||
d3.select('#' + v).attr('transform', 'translate(' + (g.node(v).x - (g.node(v).width / 2)) + ',' + (g.node(v).y - (g.node(v).height / 2)) + ' )')
|
||||
if (typeof v !== 'undefined' && typeof g.node(v) !== 'undefined') {
|
||||
logger.debug('Node ' + v + ': ' + JSON.stringify(g.node(v)))
|
||||
d3.select('#' + v).attr(
|
||||
'transform',
|
||||
'translate(' +
|
||||
(g.node(v).x - g.node(v).width / 2) +
|
||||
',' +
|
||||
(g.node(v).y - g.node(v).height / 2) +
|
||||
' )'
|
||||
)
|
||||
}
|
||||
})
|
||||
g.edges().forEach(function (e) {
|
||||
log.debug('Edge ' + e.v + ' -> ' + e.w + ': ' + JSON.stringify(g.edge(e)))
|
||||
drawEdge(diagram, g.edge(e), g.edge(e).relation)
|
||||
if (typeof e !== 'undefined' && typeof g.edge(e) !== 'undefined') {
|
||||
logger.debug(
|
||||
'Edge ' + e.v + ' -> ' + e.w + ': ' + JSON.stringify(g.edge(e))
|
||||
)
|
||||
drawEdge(diagram, g.edge(e), g.edge(e).relation)
|
||||
}
|
||||
})
|
||||
|
||||
diagram.attr('height', '100%')
|
||||
diagram.attr('width', '100%')
|
||||
diagram.attr('viewBox', '0 0 ' + (g.graph().width + 20) + ' ' + (g.graph().height + 20))
|
||||
diagram.attr(
|
||||
'viewBox',
|
||||
'0 0 ' + (g.graph().width + 20) + ' ' + (g.graph().height + 20)
|
||||
)
|
||||
}
|
||||
|
||||
export default {
|
||||
setConf,
|
||||
draw
|
||||
}
|
||||
@@ -127,6 +127,7 @@ graphConfig
|
||||
|
||||
statements
|
||||
: statement
|
||||
| statement NEWLINE
|
||||
| statement NEWLINE statements
|
||||
;
|
||||
|
||||
@@ -144,8 +145,8 @@ statement
|
||||
;
|
||||
|
||||
classStatement
|
||||
: CLASS className
|
||||
| CLASS className STRUCT_START members STRUCT_STOP {/*console.log($2,JSON.stringify($4));*/yy.addMembers($2,$4);}
|
||||
: CLASS className {yy.addClass($2);}
|
||||
| CLASS className STRUCT_START members STRUCT_STOP {/*console.log($2,JSON.stringify($4));*/yy.addClass($2);yy.addMembers($2,$4);}
|
||||
;
|
||||
|
||||
members
|
||||
@@ -155,7 +156,7 @@ members
|
||||
|
||||
methodStatement
|
||||
: className {/*console.log('Rel found',$1);*/}
|
||||
| className LABEL {yy.addMembers($1,yy.cleanupLabel($2));}
|
||||
| className LABEL {yy.addMember($1,yy.cleanupLabel($2));}
|
||||
| MEMBER {console.warn('Member',$1);}
|
||||
| SEPARATOR {/*console.log('sep found',$1);*/}
|
||||
;
|
||||
743
src/diagrams/class/parser/classDiagram.js
Normal file
@@ -0,0 +1,743 @@
|
||||
/* parser generated by jison 0.4.18 */
|
||||
/*
|
||||
Returns a Parser object of the following structure:
|
||||
|
||||
Parser: {
|
||||
yy: {}
|
||||
}
|
||||
|
||||
Parser.prototype: {
|
||||
yy: {},
|
||||
trace: function(),
|
||||
symbols_: {associative list: name ==> number},
|
||||
terminals_: {associative list: number ==> name},
|
||||
productions_: [...],
|
||||
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
|
||||
table: [...],
|
||||
defaultActions: {...},
|
||||
parseError: function(str, hash),
|
||||
parse: function(input),
|
||||
|
||||
lexer: {
|
||||
EOF: 1,
|
||||
parseError: function(str, hash),
|
||||
setInput: function(input),
|
||||
input: function(),
|
||||
unput: function(str),
|
||||
more: function(),
|
||||
less: function(n),
|
||||
pastInput: function(),
|
||||
upcomingInput: function(),
|
||||
showPosition: function(),
|
||||
test_match: function(regex_match_array, rule_index),
|
||||
next: function(),
|
||||
lex: function(),
|
||||
begin: function(condition),
|
||||
popState: function(),
|
||||
_currentRules: function(),
|
||||
topState: function(),
|
||||
pushState: function(condition),
|
||||
|
||||
options: {
|
||||
ranges: boolean (optional: true ==> token location info will include a .range[] member)
|
||||
flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
|
||||
backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
|
||||
},
|
||||
|
||||
performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
|
||||
rules: [...],
|
||||
conditions: {associative list: name ==> set},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
token location info (@$, _$, etc.): {
|
||||
first_line: n,
|
||||
last_line: n,
|
||||
first_column: n,
|
||||
last_column: n,
|
||||
range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based)
|
||||
}
|
||||
|
||||
|
||||
the parseError function receives a 'hash' object with these members for lexer and parser errors: {
|
||||
text: (matched text)
|
||||
token: (the produced terminal token, if any)
|
||||
line: (yylineno)
|
||||
}
|
||||
while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
|
||||
loc: (yylloc)
|
||||
expected: (string describing the set of expected tokens)
|
||||
recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
|
||||
}
|
||||
*/
|
||||
var parser = (function(){
|
||||
var o=function(k,v,o,l){for(o=o||{},l=k.length;l--;o[k[l]]=v);return o},$V0=[1,11],$V1=[1,12],$V2=[1,13],$V3=[1,15],$V4=[1,16],$V5=[1,17],$V6=[6,8],$V7=[1,26],$V8=[1,27],$V9=[1,28],$Va=[1,29],$Vb=[1,30],$Vc=[1,31],$Vd=[6,8,13,17,23,26,27,28,29,30,31],$Ve=[6,8,13,17,23,26,27,28,29,30,31,45,46,47],$Vf=[23,45,46,47],$Vg=[23,30,31,45,46,47],$Vh=[23,26,27,28,29,45,46,47],$Vi=[6,8,13],$Vj=[1,46];
|
||||
var parser = {trace: function trace () { },
|
||||
yy: {},
|
||||
symbols_: {"error":2,"mermaidDoc":3,"graphConfig":4,"CLASS_DIAGRAM":5,"NEWLINE":6,"statements":7,"EOF":8,"statement":9,"className":10,"alphaNumToken":11,"relationStatement":12,"LABEL":13,"classStatement":14,"methodStatement":15,"CLASS":16,"STRUCT_START":17,"members":18,"STRUCT_STOP":19,"MEMBER":20,"SEPARATOR":21,"relation":22,"STR":23,"relationType":24,"lineType":25,"AGGREGATION":26,"EXTENSION":27,"COMPOSITION":28,"DEPENDENCY":29,"LINE":30,"DOTTED_LINE":31,"commentToken":32,"textToken":33,"graphCodeTokens":34,"textNoTagsToken":35,"TAGSTART":36,"TAGEND":37,"==":38,"--":39,"PCT":40,"DEFAULT":41,"SPACE":42,"MINUS":43,"keywords":44,"UNICODE_TEXT":45,"NUM":46,"ALPHA":47,"$accept":0,"$end":1},
|
||||
terminals_: {2:"error",5:"CLASS_DIAGRAM",6:"NEWLINE",8:"EOF",13:"LABEL",16:"CLASS",17:"STRUCT_START",19:"STRUCT_STOP",20:"MEMBER",21:"SEPARATOR",23:"STR",26:"AGGREGATION",27:"EXTENSION",28:"COMPOSITION",29:"DEPENDENCY",30:"LINE",31:"DOTTED_LINE",34:"graphCodeTokens",36:"TAGSTART",37:"TAGEND",38:"==",39:"--",40:"PCT",41:"DEFAULT",42:"SPACE",43:"MINUS",44:"keywords",45:"UNICODE_TEXT",46:"NUM",47:"ALPHA"},
|
||||
productions_: [0,[3,1],[4,4],[7,1],[7,2],[7,3],[10,2],[10,1],[9,1],[9,2],[9,1],[9,1],[14,2],[14,5],[18,1],[18,2],[15,1],[15,2],[15,1],[15,1],[12,3],[12,4],[12,4],[12,5],[22,3],[22,2],[22,2],[22,1],[24,1],[24,1],[24,1],[24,1],[25,1],[25,1],[32,1],[32,1],[33,1],[33,1],[33,1],[33,1],[33,1],[33,1],[33,1],[35,1],[35,1],[35,1],[35,1],[11,1],[11,1],[11,1]],
|
||||
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
|
||||
/* this == yyval */
|
||||
|
||||
var $0 = $$.length - 1;
|
||||
switch (yystate) {
|
||||
case 6:
|
||||
this.$=$$[$0-1]+$$[$0];
|
||||
break;
|
||||
case 7:
|
||||
this.$=$$[$0];
|
||||
break;
|
||||
case 8:
|
||||
yy.addRelation($$[$0]);
|
||||
break;
|
||||
case 9:
|
||||
$$[$0-1].title = yy.cleanupLabel($$[$0]); yy.addRelation($$[$0-1]);
|
||||
break;
|
||||
case 12:
|
||||
yy.addClass($$[$0]);
|
||||
break;
|
||||
case 13:
|
||||
/*console.log($$[$0-3],JSON.stringify($$[$0-1]));*/yy.addClass($$[$0-3]);yy.addMembers($$[$0-3],$$[$0-1]);
|
||||
break;
|
||||
case 14:
|
||||
this.$ = [$$[$0]];
|
||||
break;
|
||||
case 15:
|
||||
$$[$0].push($$[$0-1]);this.$=$$[$0];
|
||||
break;
|
||||
case 16:
|
||||
/*console.log('Rel found',$$[$0]);*/
|
||||
break;
|
||||
case 17:
|
||||
yy.addMember($$[$0-1],yy.cleanupLabel($$[$0]));
|
||||
break;
|
||||
case 18:
|
||||
console.warn('Member',$$[$0]);
|
||||
break;
|
||||
case 19:
|
||||
/*console.log('sep found',$$[$0]);*/
|
||||
break;
|
||||
case 20:
|
||||
this.$ = {'id1':$$[$0-2],'id2':$$[$0], relation:$$[$0-1], relationTitle1:'none', relationTitle2:'none'};
|
||||
break;
|
||||
case 21:
|
||||
this.$ = {id1:$$[$0-3], id2:$$[$0], relation:$$[$0-1], relationTitle1:$$[$0-2], relationTitle2:'none'}
|
||||
break;
|
||||
case 22:
|
||||
this.$ = {id1:$$[$0-3], id2:$$[$0], relation:$$[$0-2], relationTitle1:'none', relationTitle2:$$[$0-1]};
|
||||
break;
|
||||
case 23:
|
||||
this.$ = {id1:$$[$0-4], id2:$$[$0], relation:$$[$0-2], relationTitle1:$$[$0-3], relationTitle2:$$[$0-1]}
|
||||
break;
|
||||
case 24:
|
||||
this.$={type1:$$[$0-2],type2:$$[$0],lineType:$$[$0-1]};
|
||||
break;
|
||||
case 25:
|
||||
this.$={type1:'none',type2:$$[$0],lineType:$$[$0-1]};
|
||||
break;
|
||||
case 26:
|
||||
this.$={type1:$$[$0-1],type2:'none',lineType:$$[$0]};
|
||||
break;
|
||||
case 27:
|
||||
this.$={type1:'none',type2:'none',lineType:$$[$0]};
|
||||
break;
|
||||
case 28:
|
||||
this.$=yy.relationType.AGGREGATION;
|
||||
break;
|
||||
case 29:
|
||||
this.$=yy.relationType.EXTENSION;
|
||||
break;
|
||||
case 30:
|
||||
this.$=yy.relationType.COMPOSITION;
|
||||
break;
|
||||
case 31:
|
||||
this.$=yy.relationType.DEPENDENCY;
|
||||
break;
|
||||
case 32:
|
||||
this.$=yy.lineType.LINE;
|
||||
break;
|
||||
case 33:
|
||||
this.$=yy.lineType.DOTTED_LINE;
|
||||
break;
|
||||
}
|
||||
},
|
||||
table: [{3:1,4:2,5:[1,3]},{1:[3]},{1:[2,1]},{6:[1,4]},{7:5,9:6,10:10,11:14,12:7,14:8,15:9,16:$V0,20:$V1,21:$V2,45:$V3,46:$V4,47:$V5},{8:[1,18]},{6:[1,19],8:[2,3]},o($V6,[2,8],{13:[1,20]}),o($V6,[2,10]),o($V6,[2,11]),o($V6,[2,16],{22:21,24:24,25:25,13:[1,23],23:[1,22],26:$V7,27:$V8,28:$V9,29:$Va,30:$Vb,31:$Vc}),{10:32,11:14,45:$V3,46:$V4,47:$V5},o($V6,[2,18]),o($V6,[2,19]),o($Vd,[2,7],{11:14,10:33,45:$V3,46:$V4,47:$V5}),o($Ve,[2,47]),o($Ve,[2,48]),o($Ve,[2,49]),{1:[2,2]},{7:34,8:[2,4],9:6,10:10,11:14,12:7,14:8,15:9,16:$V0,20:$V1,21:$V2,45:$V3,46:$V4,47:$V5},o($V6,[2,9]),{10:35,11:14,23:[1,36],45:$V3,46:$V4,47:$V5},{22:37,24:24,25:25,26:$V7,27:$V8,28:$V9,29:$Va,30:$Vb,31:$Vc},o($V6,[2,17]),{25:38,30:$Vb,31:$Vc},o($Vf,[2,27],{24:39,26:$V7,27:$V8,28:$V9,29:$Va}),o($Vg,[2,28]),o($Vg,[2,29]),o($Vg,[2,30]),o($Vg,[2,31]),o($Vh,[2,32]),o($Vh,[2,33]),o($V6,[2,12],{17:[1,40]}),o($Vd,[2,6]),{8:[2,5]},o($Vi,[2,20]),{10:41,11:14,45:$V3,46:$V4,47:$V5},{10:42,11:14,23:[1,43],45:$V3,46:$V4,47:$V5},o($Vf,[2,26],{24:44,26:$V7,27:$V8,28:$V9,29:$Va}),o($Vf,[2,25]),{18:45,20:$Vj},o($Vi,[2,22]),o($Vi,[2,21]),{10:47,11:14,45:$V3,46:$V4,47:$V5},o($Vf,[2,24]),{19:[1,48]},{18:49,19:[2,14],20:$Vj},o($Vi,[2,23]),o($V6,[2,13]),{19:[2,15]}],
|
||||
defaultActions: {2:[2,1],18:[2,2],34:[2,5],49:[2,15]},
|
||||
parseError: function parseError (str, hash) {
|
||||
if (hash.recoverable) {
|
||||
this.trace(str);
|
||||
} else {
|
||||
var error = new Error(str);
|
||||
error.hash = hash;
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
parse: function parse(input) {
|
||||
var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1;
|
||||
var args = lstack.slice.call(arguments, 1);
|
||||
var lexer = Object.create(this.lexer);
|
||||
var sharedState = { yy: {} };
|
||||
for (var k in this.yy) {
|
||||
if (Object.prototype.hasOwnProperty.call(this.yy, k)) {
|
||||
sharedState.yy[k] = this.yy[k];
|
||||
}
|
||||
}
|
||||
lexer.setInput(input, sharedState.yy);
|
||||
sharedState.yy.lexer = lexer;
|
||||
sharedState.yy.parser = this;
|
||||
if (typeof lexer.yylloc == 'undefined') {
|
||||
lexer.yylloc = {};
|
||||
}
|
||||
var yyloc = lexer.yylloc;
|
||||
lstack.push(yyloc);
|
||||
var ranges = lexer.options && lexer.options.ranges;
|
||||
if (typeof sharedState.yy.parseError === 'function') {
|
||||
this.parseError = sharedState.yy.parseError;
|
||||
} else {
|
||||
this.parseError = Object.getPrototypeOf(this).parseError;
|
||||
}
|
||||
function popStack(n) {
|
||||
stack.length = stack.length - 2 * n;
|
||||
vstack.length = vstack.length - n;
|
||||
lstack.length = lstack.length - n;
|
||||
}
|
||||
function lex() {
|
||||
var token;
|
||||
token = tstack.pop() || lexer.lex() || EOF;
|
||||
if (typeof token !== 'number') {
|
||||
if (token instanceof Array) {
|
||||
tstack = token;
|
||||
token = tstack.pop();
|
||||
}
|
||||
token = self.symbols_[token] || token;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected;
|
||||
while (true) {
|
||||
state = stack[stack.length - 1];
|
||||
if (this.defaultActions[state]) {
|
||||
action = this.defaultActions[state];
|
||||
} else {
|
||||
if (symbol === null || typeof symbol == 'undefined') {
|
||||
symbol = lex();
|
||||
}
|
||||
action = table[state] && table[state][symbol];
|
||||
}
|
||||
if (typeof action === 'undefined' || !action.length || !action[0]) {
|
||||
var errStr = '';
|
||||
expected = [];
|
||||
for (p in table[state]) {
|
||||
if (this.terminals_[p] && p > TERROR) {
|
||||
expected.push('\'' + this.terminals_[p] + '\'');
|
||||
}
|
||||
}
|
||||
if (lexer.showPosition) {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\'';
|
||||
} else {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'');
|
||||
}
|
||||
this.parseError(errStr, {
|
||||
text: lexer.match,
|
||||
token: this.terminals_[symbol] || symbol,
|
||||
line: lexer.yylineno,
|
||||
loc: yyloc,
|
||||
expected: expected
|
||||
});
|
||||
}
|
||||
if (action[0] instanceof Array && action.length > 1) {
|
||||
throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol);
|
||||
}
|
||||
switch (action[0]) {
|
||||
case 1:
|
||||
stack.push(symbol);
|
||||
vstack.push(lexer.yytext);
|
||||
lstack.push(lexer.yylloc);
|
||||
stack.push(action[1]);
|
||||
symbol = null;
|
||||
if (!preErrorSymbol) {
|
||||
yyleng = lexer.yyleng;
|
||||
yytext = lexer.yytext;
|
||||
yylineno = lexer.yylineno;
|
||||
yyloc = lexer.yylloc;
|
||||
if (recovering > 0) {
|
||||
recovering--;
|
||||
}
|
||||
} else {
|
||||
symbol = preErrorSymbol;
|
||||
preErrorSymbol = null;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
len = this.productions_[action[1]][1];
|
||||
yyval.$ = vstack[vstack.length - len];
|
||||
yyval._$ = {
|
||||
first_line: lstack[lstack.length - (len || 1)].first_line,
|
||||
last_line: lstack[lstack.length - 1].last_line,
|
||||
first_column: lstack[lstack.length - (len || 1)].first_column,
|
||||
last_column: lstack[lstack.length - 1].last_column
|
||||
};
|
||||
if (ranges) {
|
||||
yyval._$.range = [
|
||||
lstack[lstack.length - (len || 1)].range[0],
|
||||
lstack[lstack.length - 1].range[1]
|
||||
];
|
||||
}
|
||||
r = this.performAction.apply(yyval, [
|
||||
yytext,
|
||||
yyleng,
|
||||
yylineno,
|
||||
sharedState.yy,
|
||||
action[1],
|
||||
vstack,
|
||||
lstack
|
||||
].concat(args));
|
||||
if (typeof r !== 'undefined') {
|
||||
return r;
|
||||
}
|
||||
if (len) {
|
||||
stack = stack.slice(0, -1 * len * 2);
|
||||
vstack = vstack.slice(0, -1 * len);
|
||||
lstack = lstack.slice(0, -1 * len);
|
||||
}
|
||||
stack.push(this.productions_[action[1]][0]);
|
||||
vstack.push(yyval.$);
|
||||
lstack.push(yyval._$);
|
||||
newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
|
||||
stack.push(newState);
|
||||
break;
|
||||
case 3:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}};
|
||||
|
||||
/* generated by jison-lex 0.3.4 */
|
||||
var lexer = (function(){
|
||||
var lexer = ({
|
||||
|
||||
EOF:1,
|
||||
|
||||
parseError:function parseError(str, hash) {
|
||||
if (this.yy.parser) {
|
||||
this.yy.parser.parseError(str, hash);
|
||||
} else {
|
||||
throw new Error(str);
|
||||
}
|
||||
},
|
||||
|
||||
// resets the lexer, sets new input
|
||||
setInput:function (input, yy) {
|
||||
this.yy = yy || this.yy || {};
|
||||
this._input = input;
|
||||
this._more = this._backtrack = this.done = false;
|
||||
this.yylineno = this.yyleng = 0;
|
||||
this.yytext = this.matched = this.match = '';
|
||||
this.conditionStack = ['INITIAL'];
|
||||
this.yylloc = {
|
||||
first_line: 1,
|
||||
first_column: 0,
|
||||
last_line: 1,
|
||||
last_column: 0
|
||||
};
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [0,0];
|
||||
}
|
||||
this.offset = 0;
|
||||
return this;
|
||||
},
|
||||
|
||||
// consumes and returns one char from the input
|
||||
input:function () {
|
||||
var ch = this._input[0];
|
||||
this.yytext += ch;
|
||||
this.yyleng++;
|
||||
this.offset++;
|
||||
this.match += ch;
|
||||
this.matched += ch;
|
||||
var lines = ch.match(/(?:\r\n?|\n).*/g);
|
||||
if (lines) {
|
||||
this.yylineno++;
|
||||
this.yylloc.last_line++;
|
||||
} else {
|
||||
this.yylloc.last_column++;
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range[1]++;
|
||||
}
|
||||
|
||||
this._input = this._input.slice(1);
|
||||
return ch;
|
||||
},
|
||||
|
||||
// unshifts one char (or a string) into the input
|
||||
unput:function (ch) {
|
||||
var len = ch.length;
|
||||
var lines = ch.split(/(?:\r\n?|\n)/g);
|
||||
|
||||
this._input = ch + this._input;
|
||||
this.yytext = this.yytext.substr(0, this.yytext.length - len);
|
||||
//this.yyleng -= len;
|
||||
this.offset -= len;
|
||||
var oldLines = this.match.split(/(?:\r\n?|\n)/g);
|
||||
this.match = this.match.substr(0, this.match.length - 1);
|
||||
this.matched = this.matched.substr(0, this.matched.length - 1);
|
||||
|
||||
if (lines.length - 1) {
|
||||
this.yylineno -= lines.length - 1;
|
||||
}
|
||||
var r = this.yylloc.range;
|
||||
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: lines ?
|
||||
(lines.length === oldLines.length ? this.yylloc.first_column : 0)
|
||||
+ oldLines[oldLines.length - lines.length].length - lines[0].length :
|
||||
this.yylloc.first_column - len
|
||||
};
|
||||
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [r[0], r[0] + this.yyleng - len];
|
||||
}
|
||||
this.yyleng = this.yytext.length;
|
||||
return this;
|
||||
},
|
||||
|
||||
// When called from action, caches matched text and appends it on next action
|
||||
more:function () {
|
||||
this._more = true;
|
||||
return this;
|
||||
},
|
||||
|
||||
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
|
||||
reject:function () {
|
||||
if (this.options.backtrack_lexer) {
|
||||
this._backtrack = true;
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
|
||||
text: "",
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
});
|
||||
|
||||
}
|
||||
return this;
|
||||
},
|
||||
|
||||
// retain first n characters of the match
|
||||
less:function (n) {
|
||||
this.unput(this.match.slice(n));
|
||||
},
|
||||
|
||||
// displays already matched input, i.e. for error messages
|
||||
pastInput:function () {
|
||||
var past = this.matched.substr(0, this.matched.length - this.match.length);
|
||||
return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, "");
|
||||
},
|
||||
|
||||
// displays upcoming input, i.e. for error messages
|
||||
upcomingInput:function () {
|
||||
var next = this.match;
|
||||
if (next.length < 20) {
|
||||
next += this._input.substr(0, 20-next.length);
|
||||
}
|
||||
return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, "");
|
||||
},
|
||||
|
||||
// displays the character position where the lexing error occurred, i.e. for error messages
|
||||
showPosition:function () {
|
||||
var pre = this.pastInput();
|
||||
var c = new Array(pre.length + 1).join("-");
|
||||
return pre + this.upcomingInput() + "\n" + c + "^";
|
||||
},
|
||||
|
||||
// test the lexed token: return FALSE when not a match, otherwise return token
|
||||
test_match:function(match, indexed_rule) {
|
||||
var token,
|
||||
lines,
|
||||
backup;
|
||||
|
||||
if (this.options.backtrack_lexer) {
|
||||
// save context
|
||||
backup = {
|
||||
yylineno: this.yylineno,
|
||||
yylloc: {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.last_line,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: this.yylloc.last_column
|
||||
},
|
||||
yytext: this.yytext,
|
||||
match: this.match,
|
||||
matches: this.matches,
|
||||
matched: this.matched,
|
||||
yyleng: this.yyleng,
|
||||
offset: this.offset,
|
||||
_more: this._more,
|
||||
_input: this._input,
|
||||
yy: this.yy,
|
||||
conditionStack: this.conditionStack.slice(0),
|
||||
done: this.done
|
||||
};
|
||||
if (this.options.ranges) {
|
||||
backup.yylloc.range = this.yylloc.range.slice(0);
|
||||
}
|
||||
}
|
||||
|
||||
lines = match[0].match(/(?:\r\n?|\n).*/g);
|
||||
if (lines) {
|
||||
this.yylineno += lines.length;
|
||||
}
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.last_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.last_column,
|
||||
last_column: lines ?
|
||||
lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length :
|
||||
this.yylloc.last_column + match[0].length
|
||||
};
|
||||
this.yytext += match[0];
|
||||
this.match += match[0];
|
||||
this.matches = match;
|
||||
this.yyleng = this.yytext.length;
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [this.offset, this.offset += this.yyleng];
|
||||
}
|
||||
this._more = false;
|
||||
this._backtrack = false;
|
||||
this._input = this._input.slice(match[0].length);
|
||||
this.matched += match[0];
|
||||
token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]);
|
||||
if (this.done && this._input) {
|
||||
this.done = false;
|
||||
}
|
||||
if (token) {
|
||||
return token;
|
||||
} else if (this._backtrack) {
|
||||
// recover context
|
||||
for (var k in backup) {
|
||||
this[k] = backup[k];
|
||||
}
|
||||
return false; // rule action called reject() implying the next rule should be tested instead.
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
// return next match in input
|
||||
next:function () {
|
||||
if (this.done) {
|
||||
return this.EOF;
|
||||
}
|
||||
if (!this._input) {
|
||||
this.done = true;
|
||||
}
|
||||
|
||||
var token,
|
||||
match,
|
||||
tempMatch,
|
||||
index;
|
||||
if (!this._more) {
|
||||
this.yytext = '';
|
||||
this.match = '';
|
||||
}
|
||||
var rules = this._currentRules();
|
||||
for (var i = 0; i < rules.length; i++) {
|
||||
tempMatch = this._input.match(this.rules[rules[i]]);
|
||||
if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
|
||||
match = tempMatch;
|
||||
index = i;
|
||||
if (this.options.backtrack_lexer) {
|
||||
token = this.test_match(tempMatch, rules[i]);
|
||||
if (token !== false) {
|
||||
return token;
|
||||
} else if (this._backtrack) {
|
||||
match = false;
|
||||
continue; // rule action called reject() implying a rule MISmatch.
|
||||
} else {
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false;
|
||||
}
|
||||
} else if (!this.options.flex) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (match) {
|
||||
token = this.test_match(match, rules[index]);
|
||||
if (token !== false) {
|
||||
return token;
|
||||
}
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false;
|
||||
}
|
||||
if (this._input === "") {
|
||||
return this.EOF;
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
|
||||
text: "",
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
// return next match that has a token
|
||||
lex:function lex () {
|
||||
var r = this.next();
|
||||
if (r) {
|
||||
return r;
|
||||
} else {
|
||||
return this.lex();
|
||||
}
|
||||
},
|
||||
|
||||
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
|
||||
begin:function begin (condition) {
|
||||
this.conditionStack.push(condition);
|
||||
},
|
||||
|
||||
// pop the previously active lexer condition state off the condition stack
|
||||
popState:function popState () {
|
||||
var n = this.conditionStack.length - 1;
|
||||
if (n > 0) {
|
||||
return this.conditionStack.pop();
|
||||
} else {
|
||||
return this.conditionStack[0];
|
||||
}
|
||||
},
|
||||
|
||||
// produce the lexer rule set which is active for the currently active lexer condition state
|
||||
_currentRules:function _currentRules () {
|
||||
if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
|
||||
return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules;
|
||||
} else {
|
||||
return this.conditions["INITIAL"].rules;
|
||||
}
|
||||
},
|
||||
|
||||
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
|
||||
topState:function topState (n) {
|
||||
n = this.conditionStack.length - 1 - Math.abs(n || 0);
|
||||
if (n >= 0) {
|
||||
return this.conditionStack[n];
|
||||
} else {
|
||||
return "INITIAL";
|
||||
}
|
||||
},
|
||||
|
||||
// alias for begin(condition)
|
||||
pushState:function pushState (condition) {
|
||||
this.begin(condition);
|
||||
},
|
||||
|
||||
// return the number of states currently on the stack
|
||||
stateStackSize:function stateStackSize() {
|
||||
return this.conditionStack.length;
|
||||
},
|
||||
options: {},
|
||||
performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) {
|
||||
var YYSTATE=YY_START;
|
||||
switch($avoiding_name_collisions) {
|
||||
case 0:/* do nothing */
|
||||
break;
|
||||
case 1:return 6;
|
||||
break;
|
||||
case 2:/* skip whitespace */
|
||||
break;
|
||||
case 3:return 5;
|
||||
break;
|
||||
case 4: this.begin("struct"); /*console.log('Starting struct');*/return 17;
|
||||
break;
|
||||
case 5: /*console.log('Ending struct');*/this.popState(); return 19;
|
||||
break;
|
||||
case 6:/* nothing */
|
||||
break;
|
||||
case 7: /*console.log('lex-member: ' + yy_.yytext);*/ return "MEMBER";
|
||||
break;
|
||||
case 8:return 16;
|
||||
break;
|
||||
case 9:this.begin("string");
|
||||
break;
|
||||
case 10:this.popState();
|
||||
break;
|
||||
case 11:return "STR";
|
||||
break;
|
||||
case 12:return 27;
|
||||
break;
|
||||
case 13:return 27;
|
||||
break;
|
||||
case 14:return 29;
|
||||
break;
|
||||
case 15:return 29;
|
||||
break;
|
||||
case 16:return 28;
|
||||
break;
|
||||
case 17:return 26;
|
||||
break;
|
||||
case 18:return 30;
|
||||
break;
|
||||
case 19:return 31;
|
||||
break;
|
||||
case 20:return 13;
|
||||
break;
|
||||
case 21:return 43;
|
||||
break;
|
||||
case 22:return 'DOT';
|
||||
break;
|
||||
case 23:return 'PLUS';
|
||||
break;
|
||||
case 24:return 40;
|
||||
break;
|
||||
case 25:return 'EQUALS';
|
||||
break;
|
||||
case 26:return 'EQUALS';
|
||||
break;
|
||||
case 27:return 47;
|
||||
break;
|
||||
case 28:return 'PUNCTUATION';
|
||||
break;
|
||||
case 29:return 46;
|
||||
break;
|
||||
case 30:return 45;
|
||||
break;
|
||||
case 31:return 42;
|
||||
break;
|
||||
case 32:return 8;
|
||||
break;
|
||||
}
|
||||
},
|
||||
rules: [/^(?:%%[^\n]*)/,/^(?:\n+)/,/^(?:\s+)/,/^(?:classDiagram\b)/,/^(?:[\{])/,/^(?:\})/,/^(?:[\n])/,/^(?:[^\{\}\n]*)/,/^(?:class\b)/,/^(?:["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:\s*<\|)/,/^(?:\s*\|>)/,/^(?:\s*>)/,/^(?:\s*<)/,/^(?:\s*\*)/,/^(?:\s*o\b)/,/^(?:--)/,/^(?:\.\.)/,/^(?::[^#\n;]+)/,/^(?:-)/,/^(?:\.)/,/^(?:\+)/,/^(?:%)/,/^(?:=)/,/^(?:=)/,/^(?:[A-Za-z]+)/,/^(?:[!"#$%&'*+,-.`?\\_\/])/,/^(?:[0-9]+)/,/^(?:[\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6]|[\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377]|[\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5]|[\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA]|[\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE]|[\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA]|[\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0]|[\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977]|[\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2]|[\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A]|[\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39]|[\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8]|[\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C]|[\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C]|[\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99]|[\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0]|[\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D]|[\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3]|[\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10]|[\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1]|[\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81]|[\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3]|[\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6]|[\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A]|[\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081]|[\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D]|[\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0]|[\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310]|[\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C]|[\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u1700-\u170C\u170E-\u1711]|[\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7]|[\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C]|[\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16]|[\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF]|[\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC]|[\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D]|[\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D]|[\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3]|[\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F]|[\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128]|[\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2183\u2184]|[\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3]|[\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6]|[\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE]|[\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C]|[\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D]|[\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC]|[\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B]|[\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6E5\uA717-\uA71F\uA722-\uA788]|[\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805]|[\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB]|[\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28]|[\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5]|[\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4]|[\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E]|[\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D]|[\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36]|[\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D]|[\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC]|[\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF]|[\uFFD2-\uFFD7\uFFDA-\uFFDC])/,/^(?:\s)/,/^(?:$)/],
|
||||
conditions: {"string":{"rules":[10,11],"inclusive":false},"struct":{"rules":[5,6,7],"inclusive":false},"INITIAL":{"rules":[0,1,2,3,4,8,9,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32],"inclusive":true}}
|
||||
});
|
||||
return lexer;
|
||||
})();
|
||||
parser.lexer = lexer;
|
||||
function Parser () {
|
||||
this.yy = {};
|
||||
}
|
||||
Parser.prototype = parser;parser.Parser = Parser;
|
||||
return new Parser;
|
||||
})();
|
||||
|
||||
|
||||
if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
|
||||
exports.parser = parser;
|
||||
exports.Parser = parser.Parser;
|
||||
exports.parse = function () { return parser.parse.apply(parser, arguments); };
|
||||
exports.main = function commonjsMain (args) {
|
||||
if (!args[1]) {
|
||||
console.log('Usage: '+args[0]+' FILE');
|
||||
process.exit(1);
|
||||
}
|
||||
var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8");
|
||||
return exports.parser.parse(source);
|
||||
};
|
||||
if (typeof module !== 'undefined' && require.main === module) {
|
||||
exports.main(process.argv.slice(1));
|
||||
}
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
|
||||
var Logger = require('../../logger')
|
||||
var log = Logger.Log
|
||||
var relations = []
|
||||
|
||||
var classes
|
||||
classes = {
|
||||
}
|
||||
|
||||
/**
|
||||
* Function called by parser when a node definition has been found.
|
||||
* @param id
|
||||
* @param text
|
||||
* @param type
|
||||
* @param style
|
||||
*/
|
||||
exports.addClass = function (id) {
|
||||
if (typeof classes[id] === 'undefined') {
|
||||
classes[id] = {
|
||||
id: id,
|
||||
methods: [],
|
||||
members: []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.clear = function () {
|
||||
relations = []
|
||||
classes = {}
|
||||
}
|
||||
|
||||
module.exports.getClass = function (id) {
|
||||
return classes[id]
|
||||
}
|
||||
module.exports.getClasses = function () {
|
||||
return classes
|
||||
}
|
||||
|
||||
module.exports.getRelations = function () {
|
||||
return relations
|
||||
}
|
||||
|
||||
exports.addRelation = function (relation) {
|
||||
log.warn('Adding relation: ' + JSON.stringify(relation))
|
||||
exports.addClass(relation.id1)
|
||||
exports.addClass(relation.id2)
|
||||
|
||||
relations.push(relation)
|
||||
}
|
||||
|
||||
exports.addMembers = function (className, MembersArr) {
|
||||
var theClass = classes[className]
|
||||
if (typeof MembersArr === 'string') {
|
||||
if (MembersArr.substr(-1) === ')') {
|
||||
theClass.methods.push(MembersArr)
|
||||
} else {
|
||||
theClass.members.push(MembersArr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.cleanupLabel = function (label) {
|
||||
if (label.substring(0, 1) === ':') {
|
||||
return label.substr(2).trim()
|
||||
} else {
|
||||
return label.trim()
|
||||
}
|
||||
}
|
||||
|
||||
exports.lineType = {
|
||||
LINE: 0,
|
||||
DOTTED_LINE: 1
|
||||
}
|
||||
|
||||
exports.relationType = {
|
||||
AGGREGATION: 0,
|
||||
EXTENSION: 1,
|
||||
COMPOSITION: 2,
|
||||
DEPENDENCY: 3
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
/* eslint-env jasmine */
|
||||
/**
|
||||
* Created by knut on 14-11-18.
|
||||
*/
|
||||
|
||||
describe('class diagram, ', function () {
|
||||
describe('when rendering a classDiagram', function () {
|
||||
beforeEach(function () {
|
||||
Object.defineProperties(window.HTMLElement.prototype, {
|
||||
getBBox: {
|
||||
get: function () { return { x: 10, y: 10, width: 100, height: 100 } }
|
||||
},
|
||||
offsetLeft: {
|
||||
get: function () { return parseFloat(window.getComputedStyle(this).marginLeft) || 0 }
|
||||
},
|
||||
offsetTop: {
|
||||
get: function () { return parseFloat(window.getComputedStyle(this).marginTop) || 0 }
|
||||
},
|
||||
offsetHeight: {
|
||||
get: function () { return parseFloat(window.getComputedStyle(this).height) || 0 }
|
||||
},
|
||||
offsetWidth: {
|
||||
get: function () { return parseFloat(window.getComputedStyle(this).width) || 0 }
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,736 +0,0 @@
|
||||
/* parser generated by jison 0.4.17 */
|
||||
/*
|
||||
Returns a Parser object of the following structure:
|
||||
|
||||
Parser: {
|
||||
yy: {}
|
||||
}
|
||||
|
||||
Parser.prototype: {
|
||||
yy: {},
|
||||
trace: function(),
|
||||
symbols_: {associative list: name ==> number},
|
||||
terminals_: {associative list: number ==> name},
|
||||
productions_: [...],
|
||||
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
|
||||
table: [...],
|
||||
defaultActions: {...},
|
||||
parseError: function(str, hash),
|
||||
parse: function(input),
|
||||
|
||||
lexer: {
|
||||
EOF: 1,
|
||||
parseError: function(str, hash),
|
||||
setInput: function(input),
|
||||
input: function(),
|
||||
unput: function(str),
|
||||
more: function(),
|
||||
less: function(n),
|
||||
pastInput: function(),
|
||||
upcomingInput: function(),
|
||||
showPosition: function(),
|
||||
test_match: function(regex_match_array, rule_index),
|
||||
next: function(),
|
||||
lex: function(),
|
||||
begin: function(condition),
|
||||
popState: function(),
|
||||
_currentRules: function(),
|
||||
topState: function(),
|
||||
pushState: function(condition),
|
||||
|
||||
options: {
|
||||
ranges: boolean (optional: true ==> token location info will include a .range[] member)
|
||||
flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
|
||||
backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
|
||||
},
|
||||
|
||||
performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
|
||||
rules: [...],
|
||||
conditions: {associative list: name ==> set},
|
||||
}
|
||||
}
|
||||
|
||||
token location info (@$, _$, etc.): {
|
||||
first_line: n,
|
||||
last_line: n,
|
||||
first_column: n,
|
||||
last_column: n,
|
||||
range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based)
|
||||
}
|
||||
|
||||
the parseError function receives a 'hash' object with these members for lexer and parser errors: {
|
||||
text: (matched text)
|
||||
token: (the produced terminal token, if any)
|
||||
line: (yylineno)
|
||||
}
|
||||
while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
|
||||
loc: (yylloc)
|
||||
expected: (string describing the set of expected tokens)
|
||||
recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
|
||||
}
|
||||
*/
|
||||
var parser = (function () {
|
||||
var o = function (k, v, o, l) { for (o = o || {}, l = k.length; l--; o[k[l]] = v);return o }, $V0 = [1, 11], $V1 = [1, 12], $V2 = [1, 13], $V3 = [1, 15], $V4 = [1, 16], $V5 = [1, 17], $V6 = [6, 8], $V7 = [1, 26], $V8 = [1, 27], $V9 = [1, 28], $Va = [1, 29], $Vb = [1, 30], $Vc = [1, 31], $Vd = [6, 8, 13, 17, 23, 26, 27, 28, 29, 30, 31], $Ve = [6, 8, 13, 17, 23, 26, 27, 28, 29, 30, 31, 45, 46, 47], $Vf = [23, 45, 46, 47], $Vg = [23, 30, 31, 45, 46, 47], $Vh = [23, 26, 27, 28, 29, 45, 46, 47], $Vi = [6, 8, 13], $Vj = [1, 46]
|
||||
var parser = {trace: function trace () { },
|
||||
yy: {},
|
||||
symbols_: {'error': 2, 'mermaidDoc': 3, 'graphConfig': 4, 'CLASS_DIAGRAM': 5, 'NEWLINE': 6, 'statements': 7, 'EOF': 8, 'statement': 9, 'className': 10, 'alphaNumToken': 11, 'relationStatement': 12, 'LABEL': 13, 'classStatement': 14, 'methodStatement': 15, 'CLASS': 16, 'STRUCT_START': 17, 'members': 18, 'STRUCT_STOP': 19, 'MEMBER': 20, 'SEPARATOR': 21, 'relation': 22, 'STR': 23, 'relationType': 24, 'lineType': 25, 'AGGREGATION': 26, 'EXTENSION': 27, 'COMPOSITION': 28, 'DEPENDENCY': 29, 'LINE': 30, 'DOTTED_LINE': 31, 'commentToken': 32, 'textToken': 33, 'graphCodeTokens': 34, 'textNoTagsToken': 35, 'TAGSTART': 36, 'TAGEND': 37, '==': 38, '--': 39, 'PCT': 40, 'DEFAULT': 41, 'SPACE': 42, 'MINUS': 43, 'keywords': 44, 'UNICODE_TEXT': 45, 'NUM': 46, 'ALPHA': 47, '$accept': 0, '$end': 1},
|
||||
terminals_: {2: 'error', 5: 'CLASS_DIAGRAM', 6: 'NEWLINE', 8: 'EOF', 13: 'LABEL', 16: 'CLASS', 17: 'STRUCT_START', 19: 'STRUCT_STOP', 20: 'MEMBER', 21: 'SEPARATOR', 23: 'STR', 26: 'AGGREGATION', 27: 'EXTENSION', 28: 'COMPOSITION', 29: 'DEPENDENCY', 30: 'LINE', 31: 'DOTTED_LINE', 34: 'graphCodeTokens', 36: 'TAGSTART', 37: 'TAGEND', 38: '==', 39: '--', 40: 'PCT', 41: 'DEFAULT', 42: 'SPACE', 43: 'MINUS', 44: 'keywords', 45: 'UNICODE_TEXT', 46: 'NUM', 47: 'ALPHA'},
|
||||
productions_: [0, [3, 1], [4, 4], [7, 1], [7, 3], [10, 2], [10, 1], [9, 1], [9, 2], [9, 1], [9, 1], [14, 2], [14, 5], [18, 1], [18, 2], [15, 1], [15, 2], [15, 1], [15, 1], [12, 3], [12, 4], [12, 4], [12, 5], [22, 3], [22, 2], [22, 2], [22, 1], [24, 1], [24, 1], [24, 1], [24, 1], [25, 1], [25, 1], [32, 1], [32, 1], [33, 1], [33, 1], [33, 1], [33, 1], [33, 1], [33, 1], [33, 1], [35, 1], [35, 1], [35, 1], [35, 1], [11, 1], [11, 1], [11, 1]],
|
||||
performAction: function anonymous (yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
|
||||
/* this == yyval */
|
||||
|
||||
var $0 = $$.length - 1
|
||||
switch (yystate) {
|
||||
case 5:
|
||||
this.$ = $$[$0 - 1] + $$[$0]
|
||||
break
|
||||
case 6:
|
||||
this.$ = $$[$0]
|
||||
break
|
||||
case 7:
|
||||
yy.addRelation($$[$0])
|
||||
break
|
||||
case 8:
|
||||
$$[$0 - 1].title = yy.cleanupLabel($$[$0]); yy.addRelation($$[$0 - 1])
|
||||
break
|
||||
case 12:
|
||||
/* console.log($$[$0-3],JSON.stringify($$[$0-1])); */yy.addMembers($$[$0 - 3], $$[$0 - 1])
|
||||
break
|
||||
case 13:
|
||||
this.$ = [$$[$0]]
|
||||
break
|
||||
case 14:
|
||||
$$[$0].push($$[$0 - 1]); this.$ = $$[$0]
|
||||
break
|
||||
case 15:
|
||||
/* console.log('Rel found',$$[$0]); */
|
||||
break
|
||||
case 16:
|
||||
yy.addMembers($$[$0 - 1], yy.cleanupLabel($$[$0]))
|
||||
break
|
||||
case 17:
|
||||
console.warn('Member', $$[$0])
|
||||
break
|
||||
case 18:
|
||||
/* console.log('sep found',$$[$0]); */
|
||||
break
|
||||
case 19:
|
||||
this.$ = {'id1': $$[$0 - 2], 'id2': $$[$0], relation: $$[$0 - 1], relationTitle1: 'none', relationTitle2: 'none'}
|
||||
break
|
||||
case 20:
|
||||
this.$ = {id1: $$[$0 - 3], id2: $$[$0], relation: $$[$0 - 1], relationTitle1: $$[$0 - 2], relationTitle2: 'none'}
|
||||
break
|
||||
case 21:
|
||||
this.$ = {id1: $$[$0 - 3], id2: $$[$0], relation: $$[$0 - 2], relationTitle1: 'none', relationTitle2: $$[$0 - 1]}
|
||||
break
|
||||
case 22:
|
||||
this.$ = {id1: $$[$0 - 4], id2: $$[$0], relation: $$[$0 - 2], relationTitle1: $$[$0 - 3], relationTitle2: $$[$0 - 1]}
|
||||
break
|
||||
case 23:
|
||||
this.$ = {type1: $$[$0 - 2], type2: $$[$0], lineType: $$[$0 - 1]}
|
||||
break
|
||||
case 24:
|
||||
this.$ = {type1: 'none', type2: $$[$0], lineType: $$[$0 - 1]}
|
||||
break
|
||||
case 25:
|
||||
this.$ = {type1: $$[$0 - 1], type2: 'none', lineType: $$[$0]}
|
||||
break
|
||||
case 26:
|
||||
this.$ = {type1: 'none', type2: 'none', lineType: $$[$0]}
|
||||
break
|
||||
case 27:
|
||||
this.$ = yy.relationType.AGGREGATION
|
||||
break
|
||||
case 28:
|
||||
this.$ = yy.relationType.EXTENSION
|
||||
break
|
||||
case 29:
|
||||
this.$ = yy.relationType.COMPOSITION
|
||||
break
|
||||
case 30:
|
||||
this.$ = yy.relationType.DEPENDENCY
|
||||
break
|
||||
case 31:
|
||||
this.$ = yy.lineType.LINE
|
||||
break
|
||||
case 32:
|
||||
this.$ = yy.lineType.DOTTED_LINE
|
||||
break
|
||||
}
|
||||
},
|
||||
table: [{3: 1, 4: 2, 5: [1, 3]}, {1: [3]}, {1: [2, 1]}, {6: [1, 4]}, {7: 5, 9: 6, 10: 10, 11: 14, 12: 7, 14: 8, 15: 9, 16: $V0, 20: $V1, 21: $V2, 45: $V3, 46: $V4, 47: $V5}, {8: [1, 18]}, {6: [1, 19], 8: [2, 3]}, o($V6, [2, 7], {13: [1, 20]}), o($V6, [2, 9]), o($V6, [2, 10]), o($V6, [2, 15], {22: 21, 24: 24, 25: 25, 13: [1, 23], 23: [1, 22], 26: $V7, 27: $V8, 28: $V9, 29: $Va, 30: $Vb, 31: $Vc}), {10: 32, 11: 14, 45: $V3, 46: $V4, 47: $V5}, o($V6, [2, 17]), o($V6, [2, 18]), o($Vd, [2, 6], {11: 14, 10: 33, 45: $V3, 46: $V4, 47: $V5}), o($Ve, [2, 46]), o($Ve, [2, 47]), o($Ve, [2, 48]), {1: [2, 2]}, {7: 34, 9: 6, 10: 10, 11: 14, 12: 7, 14: 8, 15: 9, 16: $V0, 20: $V1, 21: $V2, 45: $V3, 46: $V4, 47: $V5}, o($V6, [2, 8]), {10: 35, 11: 14, 23: [1, 36], 45: $V3, 46: $V4, 47: $V5}, {22: 37, 24: 24, 25: 25, 26: $V7, 27: $V8, 28: $V9, 29: $Va, 30: $Vb, 31: $Vc}, o($V6, [2, 16]), {25: 38, 30: $Vb, 31: $Vc}, o($Vf, [2, 26], {24: 39, 26: $V7, 27: $V8, 28: $V9, 29: $Va}), o($Vg, [2, 27]), o($Vg, [2, 28]), o($Vg, [2, 29]), o($Vg, [2, 30]), o($Vh, [2, 31]), o($Vh, [2, 32]), o($V6, [2, 11], {17: [1, 40]}), o($Vd, [2, 5]), {8: [2, 4]}, o($Vi, [2, 19]), {10: 41, 11: 14, 45: $V3, 46: $V4, 47: $V5}, {10: 42, 11: 14, 23: [1, 43], 45: $V3, 46: $V4, 47: $V5}, o($Vf, [2, 25], {24: 44, 26: $V7, 27: $V8, 28: $V9, 29: $Va}), o($Vf, [2, 24]), {18: 45, 20: $Vj}, o($Vi, [2, 21]), o($Vi, [2, 20]), {10: 47, 11: 14, 45: $V3, 46: $V4, 47: $V5}, o($Vf, [2, 23]), {19: [1, 48]}, {18: 49, 19: [2, 13], 20: $Vj}, o($Vi, [2, 22]), o($V6, [2, 12]), {19: [2, 14]}],
|
||||
defaultActions: {2: [2, 1], 18: [2, 2], 34: [2, 4], 49: [2, 14]},
|
||||
parseError: function parseError (str, hash) {
|
||||
if (hash.recoverable) {
|
||||
this.trace(str)
|
||||
} else {
|
||||
function _parseError (msg, hash) {
|
||||
this.message = msg
|
||||
this.hash = hash
|
||||
}
|
||||
_parseError.prototype = Error
|
||||
|
||||
throw new _parseError(str, hash)
|
||||
}
|
||||
},
|
||||
parse: function parse (input) {
|
||||
var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1
|
||||
var args = lstack.slice.call(arguments, 1)
|
||||
var lexer = Object.create(this.lexer)
|
||||
var sharedState = { yy: {} }
|
||||
for (var k in this.yy) {
|
||||
if (Object.prototype.hasOwnProperty.call(this.yy, k)) {
|
||||
sharedState.yy[k] = this.yy[k]
|
||||
}
|
||||
}
|
||||
lexer.setInput(input, sharedState.yy)
|
||||
sharedState.yy.lexer = lexer
|
||||
sharedState.yy.parser = this
|
||||
if (typeof lexer.yylloc === 'undefined') {
|
||||
lexer.yylloc = {}
|
||||
}
|
||||
var yyloc = lexer.yylloc
|
||||
lstack.push(yyloc)
|
||||
var ranges = lexer.options && lexer.options.ranges
|
||||
if (typeof sharedState.yy.parseError === 'function') {
|
||||
this.parseError = sharedState.yy.parseError
|
||||
} else {
|
||||
this.parseError = Object.getPrototypeOf(this).parseError
|
||||
}
|
||||
function popStack (n) {
|
||||
stack.length = stack.length - 2 * n
|
||||
vstack.length = vstack.length - n
|
||||
lstack.length = lstack.length - n
|
||||
}
|
||||
var lex = function () {
|
||||
var token
|
||||
token = lexer.lex() || EOF
|
||||
if (typeof token !== 'number') {
|
||||
token = self.symbols_[token] || token
|
||||
}
|
||||
return token
|
||||
}
|
||||
var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected
|
||||
while (true) {
|
||||
state = stack[stack.length - 1]
|
||||
if (this.defaultActions[state]) {
|
||||
action = this.defaultActions[state]
|
||||
} else {
|
||||
if (symbol === null || typeof symbol === 'undefined') {
|
||||
symbol = lex()
|
||||
}
|
||||
action = table[state] && table[state][symbol]
|
||||
}
|
||||
if (typeof action === 'undefined' || !action.length || !action[0]) {
|
||||
var errStr = ''
|
||||
expected = []
|
||||
for (p in table[state]) {
|
||||
if (this.terminals_[p] && p > TERROR) {
|
||||
expected.push('\'' + this.terminals_[p] + '\'')
|
||||
}
|
||||
}
|
||||
if (lexer.showPosition) {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\''
|
||||
} else {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'')
|
||||
}
|
||||
this.parseError(errStr, {
|
||||
text: lexer.match,
|
||||
token: this.terminals_[symbol] || symbol,
|
||||
line: lexer.yylineno,
|
||||
loc: yyloc,
|
||||
expected: expected
|
||||
})
|
||||
}
|
||||
if (action[0] instanceof Array && action.length > 1) {
|
||||
throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol)
|
||||
}
|
||||
switch (action[0]) {
|
||||
case 1:
|
||||
stack.push(symbol)
|
||||
vstack.push(lexer.yytext)
|
||||
lstack.push(lexer.yylloc)
|
||||
stack.push(action[1])
|
||||
symbol = null
|
||||
if (!preErrorSymbol) {
|
||||
yyleng = lexer.yyleng
|
||||
yytext = lexer.yytext
|
||||
yylineno = lexer.yylineno
|
||||
yyloc = lexer.yylloc
|
||||
if (recovering > 0) {
|
||||
recovering--
|
||||
}
|
||||
} else {
|
||||
symbol = preErrorSymbol
|
||||
preErrorSymbol = null
|
||||
}
|
||||
break
|
||||
case 2:
|
||||
len = this.productions_[action[1]][1]
|
||||
yyval.$ = vstack[vstack.length - len]
|
||||
yyval._$ = {
|
||||
first_line: lstack[lstack.length - (len || 1)].first_line,
|
||||
last_line: lstack[lstack.length - 1].last_line,
|
||||
first_column: lstack[lstack.length - (len || 1)].first_column,
|
||||
last_column: lstack[lstack.length - 1].last_column
|
||||
}
|
||||
if (ranges) {
|
||||
yyval._$.range = [
|
||||
lstack[lstack.length - (len || 1)].range[0],
|
||||
lstack[lstack.length - 1].range[1]
|
||||
]
|
||||
}
|
||||
r = this.performAction.apply(yyval, [
|
||||
yytext,
|
||||
yyleng,
|
||||
yylineno,
|
||||
sharedState.yy,
|
||||
action[1],
|
||||
vstack,
|
||||
lstack
|
||||
].concat(args))
|
||||
if (typeof r !== 'undefined') {
|
||||
return r
|
||||
}
|
||||
if (len) {
|
||||
stack = stack.slice(0, -1 * len * 2)
|
||||
vstack = vstack.slice(0, -1 * len)
|
||||
lstack = lstack.slice(0, -1 * len)
|
||||
}
|
||||
stack.push(this.productions_[action[1]][0])
|
||||
vstack.push(yyval.$)
|
||||
lstack.push(yyval._$)
|
||||
newState = table[stack[stack.length - 2]][stack[stack.length - 1]]
|
||||
stack.push(newState)
|
||||
break
|
||||
case 3:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return true
|
||||
}}
|
||||
|
||||
/* generated by jison-lex 0.3.4 */
|
||||
var lexer = (function () {
|
||||
var lexer = ({
|
||||
|
||||
EOF: 1,
|
||||
|
||||
parseError: function parseError (str, hash) {
|
||||
if (this.yy.parser) {
|
||||
this.yy.parser.parseError(str, hash)
|
||||
} else {
|
||||
throw new Error(str)
|
||||
}
|
||||
},
|
||||
|
||||
// resets the lexer, sets new input
|
||||
setInput: function (input, yy) {
|
||||
this.yy = yy || this.yy || {}
|
||||
this._input = input
|
||||
this._more = this._backtrack = this.done = false
|
||||
this.yylineno = this.yyleng = 0
|
||||
this.yytext = this.matched = this.match = ''
|
||||
this.conditionStack = ['INITIAL']
|
||||
this.yylloc = {
|
||||
first_line: 1,
|
||||
first_column: 0,
|
||||
last_line: 1,
|
||||
last_column: 0
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [0, 0]
|
||||
}
|
||||
this.offset = 0
|
||||
return this
|
||||
},
|
||||
|
||||
// consumes and returns one char from the input
|
||||
input: function () {
|
||||
var ch = this._input[0]
|
||||
this.yytext += ch
|
||||
this.yyleng++
|
||||
this.offset++
|
||||
this.match += ch
|
||||
this.matched += ch
|
||||
var lines = ch.match(/(?:\r\n?|\n).*/g)
|
||||
if (lines) {
|
||||
this.yylineno++
|
||||
this.yylloc.last_line++
|
||||
} else {
|
||||
this.yylloc.last_column++
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range[1]++
|
||||
}
|
||||
|
||||
this._input = this._input.slice(1)
|
||||
return ch
|
||||
},
|
||||
|
||||
// unshifts one char (or a string) into the input
|
||||
unput: function (ch) {
|
||||
var len = ch.length
|
||||
var lines = ch.split(/(?:\r\n?|\n)/g)
|
||||
|
||||
this._input = ch + this._input
|
||||
this.yytext = this.yytext.substr(0, this.yytext.length - len)
|
||||
// this.yyleng -= len;
|
||||
this.offset -= len
|
||||
var oldLines = this.match.split(/(?:\r\n?|\n)/g)
|
||||
this.match = this.match.substr(0, this.match.length - 1)
|
||||
this.matched = this.matched.substr(0, this.matched.length - 1)
|
||||
|
||||
if (lines.length - 1) {
|
||||
this.yylineno -= lines.length - 1
|
||||
}
|
||||
var r = this.yylloc.range
|
||||
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: lines
|
||||
? (lines.length === oldLines.length ? this.yylloc.first_column : 0) +
|
||||
oldLines[oldLines.length - lines.length].length - lines[0].length
|
||||
: this.yylloc.first_column - len
|
||||
}
|
||||
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [r[0], r[0] + this.yyleng - len]
|
||||
}
|
||||
this.yyleng = this.yytext.length
|
||||
return this
|
||||
},
|
||||
|
||||
// When called from action, caches matched text and appends it on next action
|
||||
more: function () {
|
||||
this._more = true
|
||||
return this
|
||||
},
|
||||
|
||||
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
|
||||
reject: function () {
|
||||
if (this.options.backtrack_lexer) {
|
||||
this._backtrack = true
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
|
||||
text: '',
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
})
|
||||
}
|
||||
return this
|
||||
},
|
||||
|
||||
// retain first n characters of the match
|
||||
less: function (n) {
|
||||
this.unput(this.match.slice(n))
|
||||
},
|
||||
|
||||
// displays already matched input, i.e. for error messages
|
||||
pastInput: function () {
|
||||
var past = this.matched.substr(0, this.matched.length - this.match.length)
|
||||
return (past.length > 20 ? '...' : '') + past.substr(-20).replace(/\n/g, '')
|
||||
},
|
||||
|
||||
// displays upcoming input, i.e. for error messages
|
||||
upcomingInput: function () {
|
||||
var next = this.match
|
||||
if (next.length < 20) {
|
||||
next += this._input.substr(0, 20 - next.length)
|
||||
}
|
||||
return (next.substr(0, 20) + (next.length > 20 ? '...' : '')).replace(/\n/g, '')
|
||||
},
|
||||
|
||||
// displays the character position where the lexing error occurred, i.e. for error messages
|
||||
showPosition: function () {
|
||||
var pre = this.pastInput()
|
||||
var c = new Array(pre.length + 1).join('-')
|
||||
return pre + this.upcomingInput() + '\n' + c + '^'
|
||||
},
|
||||
|
||||
// test the lexed token: return FALSE when not a match, otherwise return token
|
||||
test_match: function (match, indexed_rule) {
|
||||
var token,
|
||||
lines,
|
||||
backup
|
||||
|
||||
if (this.options.backtrack_lexer) {
|
||||
// save context
|
||||
backup = {
|
||||
yylineno: this.yylineno,
|
||||
yylloc: {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.last_line,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: this.yylloc.last_column
|
||||
},
|
||||
yytext: this.yytext,
|
||||
match: this.match,
|
||||
matches: this.matches,
|
||||
matched: this.matched,
|
||||
yyleng: this.yyleng,
|
||||
offset: this.offset,
|
||||
_more: this._more,
|
||||
_input: this._input,
|
||||
yy: this.yy,
|
||||
conditionStack: this.conditionStack.slice(0),
|
||||
done: this.done
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
backup.yylloc.range = this.yylloc.range.slice(0)
|
||||
}
|
||||
}
|
||||
|
||||
lines = match[0].match(/(?:\r\n?|\n).*/g)
|
||||
if (lines) {
|
||||
this.yylineno += lines.length
|
||||
}
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.last_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.last_column,
|
||||
last_column: lines
|
||||
? lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length
|
||||
: this.yylloc.last_column + match[0].length
|
||||
}
|
||||
this.yytext += match[0]
|
||||
this.match += match[0]
|
||||
this.matches = match
|
||||
this.yyleng = this.yytext.length
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [this.offset, this.offset += this.yyleng]
|
||||
}
|
||||
this._more = false
|
||||
this._backtrack = false
|
||||
this._input = this._input.slice(match[0].length)
|
||||
this.matched += match[0]
|
||||
token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1])
|
||||
if (this.done && this._input) {
|
||||
this.done = false
|
||||
}
|
||||
if (token) {
|
||||
return token
|
||||
} else if (this._backtrack) {
|
||||
// recover context
|
||||
for (var k in backup) {
|
||||
this[k] = backup[k]
|
||||
}
|
||||
return false // rule action called reject() implying the next rule should be tested instead.
|
||||
}
|
||||
return false
|
||||
},
|
||||
|
||||
// return next match in input
|
||||
next: function () {
|
||||
if (this.done) {
|
||||
return this.EOF
|
||||
}
|
||||
if (!this._input) {
|
||||
this.done = true
|
||||
}
|
||||
|
||||
var token,
|
||||
match,
|
||||
tempMatch,
|
||||
index
|
||||
if (!this._more) {
|
||||
this.yytext = ''
|
||||
this.match = ''
|
||||
}
|
||||
var rules = this._currentRules()
|
||||
for (var i = 0; i < rules.length; i++) {
|
||||
tempMatch = this._input.match(this.rules[rules[i]])
|
||||
if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
|
||||
match = tempMatch
|
||||
index = i
|
||||
if (this.options.backtrack_lexer) {
|
||||
token = this.test_match(tempMatch, rules[i])
|
||||
if (token !== false) {
|
||||
return token
|
||||
} else if (this._backtrack) {
|
||||
match = false
|
||||
continue // rule action called reject() implying a rule MISmatch.
|
||||
} else {
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false
|
||||
}
|
||||
} else if (!this.options.flex) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if (match) {
|
||||
token = this.test_match(match, rules[index])
|
||||
if (token !== false) {
|
||||
return token
|
||||
}
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false
|
||||
}
|
||||
if (this._input === '') {
|
||||
return this.EOF
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
|
||||
text: '',
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
// return next match that has a token
|
||||
lex: function lex () {
|
||||
var r = this.next()
|
||||
if (r) {
|
||||
return r
|
||||
} else {
|
||||
return this.lex()
|
||||
}
|
||||
},
|
||||
|
||||
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
|
||||
begin: function begin (condition) {
|
||||
this.conditionStack.push(condition)
|
||||
},
|
||||
|
||||
// pop the previously active lexer condition state off the condition stack
|
||||
popState: function popState () {
|
||||
var n = this.conditionStack.length - 1
|
||||
if (n > 0) {
|
||||
return this.conditionStack.pop()
|
||||
} else {
|
||||
return this.conditionStack[0]
|
||||
}
|
||||
},
|
||||
|
||||
// produce the lexer rule set which is active for the currently active lexer condition state
|
||||
_currentRules: function _currentRules () {
|
||||
if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
|
||||
return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules
|
||||
} else {
|
||||
return this.conditions['INITIAL'].rules
|
||||
}
|
||||
},
|
||||
|
||||
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
|
||||
topState: function topState (n) {
|
||||
n = this.conditionStack.length - 1 - Math.abs(n || 0)
|
||||
if (n >= 0) {
|
||||
return this.conditionStack[n]
|
||||
} else {
|
||||
return 'INITIAL'
|
||||
}
|
||||
},
|
||||
|
||||
// alias for begin(condition)
|
||||
pushState: function pushState (condition) {
|
||||
this.begin(condition)
|
||||
},
|
||||
|
||||
// return the number of states currently on the stack
|
||||
stateStackSize: function stateStackSize () {
|
||||
return this.conditionStack.length
|
||||
},
|
||||
options: {},
|
||||
performAction: function anonymous (yy, yy_, $avoiding_name_collisions, YY_START) {
|
||||
var YYSTATE = YY_START
|
||||
switch ($avoiding_name_collisions) {
|
||||
case 0:/* do nothing */
|
||||
break
|
||||
case 1:return 6
|
||||
break
|
||||
case 2:/* skip whitespace */
|
||||
break
|
||||
case 3:return 5
|
||||
break
|
||||
case 4: this.begin('struct'); /* console.log('Starting struct'); */return 17
|
||||
break
|
||||
case 5: /* console.log('Ending struct'); */this.popState(); return 19
|
||||
break
|
||||
case 6:/* nothing */
|
||||
break
|
||||
case 7: return 'MEMBER'
|
||||
break
|
||||
case 8:return 16
|
||||
break
|
||||
case 9:this.begin('string')
|
||||
break
|
||||
case 10:this.popState()
|
||||
break
|
||||
case 11:return 'STR'
|
||||
break
|
||||
case 12:return 27
|
||||
break
|
||||
case 13:return 27
|
||||
break
|
||||
case 14:return 29
|
||||
break
|
||||
case 15:return 29
|
||||
break
|
||||
case 16:return 28
|
||||
break
|
||||
case 17:return 26
|
||||
break
|
||||
case 18:return 30
|
||||
break
|
||||
case 19:return 31
|
||||
break
|
||||
case 20:return 13
|
||||
break
|
||||
case 21:return 43
|
||||
break
|
||||
case 22:return 'DOT'
|
||||
break
|
||||
case 23:return 'PLUS'
|
||||
break
|
||||
case 24:return 40
|
||||
break
|
||||
case 25:return 'EQUALS'
|
||||
break
|
||||
case 26:return 'EQUALS'
|
||||
break
|
||||
case 27:return 47
|
||||
break
|
||||
case 28:return 'PUNCTUATION'
|
||||
break
|
||||
case 29:return 46
|
||||
break
|
||||
case 30:return 45
|
||||
break
|
||||
case 31:return 42
|
||||
break
|
||||
case 32:return 8
|
||||
break
|
||||
}
|
||||
},
|
||||
rules: [/^(?:%%[^\n]*)/, /^(?:\n+)/, /^(?:\s+)/, /^(?:classDiagram\b)/, /^(?:[\{])/, /^(?:\})/, /^(?:[\n])/, /^(?:[^\{\}\n]*)/, /^(?:class\b)/, /^(?:["])/, /^(?:["])/, /^(?:[^"]*)/, /^(?:\s*<\|)/, /^(?:\s*\|>)/, /^(?:\s*>)/, /^(?:\s*<)/, /^(?:\s*\*)/, /^(?:\s*o\b)/, /^(?:--)/, /^(?:\.\.)/, /^(?::[^#\n;]+)/, /^(?:-)/, /^(?:\.)/, /^(?:\+)/, /^(?:%)/, /^(?:=)/, /^(?:=)/, /^(?:[A-Za-z]+)/, /^(?:[!"#$%&'*+,-.`?\\_\/])/, /^(?:[0-9]+)/, /^(?:[\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6]|[\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377]|[\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5]|[\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA]|[\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE]|[\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA]|[\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0]|[\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977]|[\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2]|[\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A]|[\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39]|[\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8]|[\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C]|[\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C]|[\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99]|[\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0]|[\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D]|[\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3]|[\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10]|[\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1]|[\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81]|[\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3]|[\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6]|[\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A]|[\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081]|[\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D]|[\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0]|[\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310]|[\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C]|[\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u1700-\u170C\u170E-\u1711]|[\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7]|[\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C]|[\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16]|[\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF]|[\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC]|[\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D]|[\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D]|[\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3]|[\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F]|[\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128]|[\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2183\u2184]|[\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3]|[\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6]|[\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE]|[\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C]|[\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D]|[\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC]|[\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B]|[\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6E5\uA717-\uA71F\uA722-\uA788]|[\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805]|[\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB]|[\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28]|[\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5]|[\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4]|[\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E]|[\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D]|[\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36]|[\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D]|[\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC]|[\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF]|[\uFFD2-\uFFD7\uFFDA-\uFFDC])/, /^(?:\s)/, /^(?:$)/],
|
||||
conditions: {'string': {'rules': [10, 11], 'inclusive': false}, 'struct': {'rules': [5, 6, 7], 'inclusive': false}, 'INITIAL': {'rules': [0, 1, 2, 3, 4, 8, 9, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32], 'inclusive': true}}
|
||||
})
|
||||
return lexer
|
||||
})()
|
||||
parser.lexer = lexer
|
||||
function Parser () {
|
||||
this.yy = {}
|
||||
}
|
||||
Parser.prototype = parser; parser.Parser = Parser
|
||||
return new Parser()
|
||||
})()
|
||||
|
||||
if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
|
||||
exports.parser = parser
|
||||
exports.Parser = parser.Parser
|
||||
exports.parse = function () { return parser.parse.apply(parser, arguments) }
|
||||
exports.main = function commonjsMain (args) {
|
||||
if (!args[1]) {
|
||||
console.log('Usage: ' + args[0] + ' FILE')
|
||||
process.exit(1)
|
||||
}
|
||||
var source = require('fs').readFileSync(require('path').normalize(args[1]), 'utf8')
|
||||
return exports.parser.parse(source)
|
||||
}
|
||||
if (typeof module !== 'undefined' && require.main === module) {
|
||||
exports.main(process.argv.slice(1))
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
/* eslint-env jasmine */
|
||||
/**
|
||||
* Created by knut on 14-11-18.
|
||||
*/
|
||||
describe('when parsing an info graph it', function () {
|
||||
var ex
|
||||
beforeEach(function () {
|
||||
ex = require('./parser/example').parser
|
||||
ex.yy = require('./exampleDb')
|
||||
})
|
||||
|
||||
it('should handle an info definition', function () {
|
||||
var str = 'info\nsay: hello'
|
||||
|
||||
ex.parse(str)
|
||||
})
|
||||
it('should handle an showMessage statement definition', function () {
|
||||
var str = 'info\nshowInfo'
|
||||
|
||||
ex.parse(str)
|
||||
})
|
||||
})
|
||||
@@ -1,29 +0,0 @@
|
||||
/**
|
||||
* Created by knut on 15-01-14.
|
||||
*/
|
||||
var Logger = require('../../logger')
|
||||
var log = Logger.Log
|
||||
|
||||
var message = ''
|
||||
var info = false
|
||||
|
||||
exports.setMessage = function (txt) {
|
||||
log.debug('Setting message to: ' + txt)
|
||||
message = txt
|
||||
}
|
||||
|
||||
exports.getMessage = function () {
|
||||
return message
|
||||
}
|
||||
|
||||
exports.setInfo = function (inf) {
|
||||
info = inf
|
||||
}
|
||||
|
||||
exports.getInfo = function () {
|
||||
return info
|
||||
}
|
||||
|
||||
exports.parseError = function (err, hash) {
|
||||
global.mermaidAPI.parseError(err, hash)
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
/**
|
||||
* Created by knut on 14-12-11.
|
||||
*/
|
||||
var db = require('./exampleDb')
|
||||
var exampleParser = require('./parser/example.js')
|
||||
var d3 = require('../../d3')
|
||||
|
||||
var Logger = require('../../logger')
|
||||
var log = Logger.Log
|
||||
|
||||
/**
|
||||
* Draws a an info picture in the tag with id: id based on the graph definition in text.
|
||||
* @param text
|
||||
* @param id
|
||||
*/
|
||||
exports.draw = function (txt, id, ver) {
|
||||
var parser
|
||||
parser = exampleParser.parser
|
||||
parser.yy = db
|
||||
log.debug('Renering example diagram')
|
||||
// Parse the graph definition
|
||||
parser.parse(txt)
|
||||
|
||||
// Fetch the default direction, use TD if none was found
|
||||
var svg = d3.select('#' + id)
|
||||
|
||||
var g = svg.append('g')
|
||||
|
||||
g.append('text') // text label for the x axis
|
||||
.attr('x', 100)
|
||||
.attr('y', 40)
|
||||
.attr('class', 'version')
|
||||
.attr('font-size', '32px')
|
||||
.style('text-anchor', 'middle')
|
||||
.text('mermaid ' + ver)
|
||||
|
||||
svg.attr('height', 100)
|
||||
svg.attr('width', 400)
|
||||
}
|
||||
@@ -1,625 +0,0 @@
|
||||
/* parser generated by jison 0.4.17 */
|
||||
/*
|
||||
Returns a Parser object of the following structure:
|
||||
|
||||
Parser: {
|
||||
yy: {}
|
||||
}
|
||||
|
||||
Parser.prototype: {
|
||||
yy: {},
|
||||
trace: function(),
|
||||
symbols_: {associative list: name ==> number},
|
||||
terminals_: {associative list: number ==> name},
|
||||
productions_: [...],
|
||||
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
|
||||
table: [...],
|
||||
defaultActions: {...},
|
||||
parseError: function(str, hash),
|
||||
parse: function(input),
|
||||
|
||||
lexer: {
|
||||
EOF: 1,
|
||||
parseError: function(str, hash),
|
||||
setInput: function(input),
|
||||
input: function(),
|
||||
unput: function(str),
|
||||
more: function(),
|
||||
less: function(n),
|
||||
pastInput: function(),
|
||||
upcomingInput: function(),
|
||||
showPosition: function(),
|
||||
test_match: function(regex_match_array, rule_index),
|
||||
next: function(),
|
||||
lex: function(),
|
||||
begin: function(condition),
|
||||
popState: function(),
|
||||
_currentRules: function(),
|
||||
topState: function(),
|
||||
pushState: function(condition),
|
||||
|
||||
options: {
|
||||
ranges: boolean (optional: true ==> token location info will include a .range[] member)
|
||||
flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
|
||||
backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
|
||||
},
|
||||
|
||||
performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
|
||||
rules: [...],
|
||||
conditions: {associative list: name ==> set},
|
||||
}
|
||||
}
|
||||
|
||||
token location info (@$, _$, etc.): {
|
||||
first_line: n,
|
||||
last_line: n,
|
||||
first_column: n,
|
||||
last_column: n,
|
||||
range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based)
|
||||
}
|
||||
|
||||
the parseError function receives a 'hash' object with these members for lexer and parser errors: {
|
||||
text: (matched text)
|
||||
token: (the produced terminal token, if any)
|
||||
line: (yylineno)
|
||||
}
|
||||
while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
|
||||
loc: (yylloc)
|
||||
expected: (string describing the set of expected tokens)
|
||||
recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
|
||||
}
|
||||
*/
|
||||
var parser = (function () {
|
||||
var o = function (k, v, o, l) { for (o = o || {}, l = k.length; l--; o[k[l]] = v);return o }, $V0 = [6, 9, 10, 12]
|
||||
var parser = {trace: function trace () { },
|
||||
yy: {},
|
||||
symbols_: {'error': 2, 'start': 3, 'info': 4, 'document': 5, 'EOF': 6, 'line': 7, 'statement': 8, 'NL': 9, 'showInfo': 10, 'message': 11, 'say': 12, 'TXT': 13, '$accept': 0, '$end': 1},
|
||||
terminals_: {2: 'error', 4: 'info', 6: 'EOF', 9: 'NL', 10: 'showInfo', 12: 'say', 13: 'TXT'},
|
||||
productions_: [0, [3, 3], [5, 0], [5, 2], [7, 1], [7, 1], [8, 1], [8, 1], [11, 2]],
|
||||
performAction: function anonymous (yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
|
||||
/* this == yyval */
|
||||
|
||||
var $0 = $$.length - 1
|
||||
switch (yystate) {
|
||||
case 1:
|
||||
return yy
|
||||
break
|
||||
case 4:
|
||||
|
||||
break
|
||||
case 6:
|
||||
yy.setInfo(true)
|
||||
break
|
||||
case 7:
|
||||
yy.setMessage($$[$0])
|
||||
break
|
||||
case 8:
|
||||
this.$ = $$[$0 - 1].substring(1).trim().replace(/\\n/gm, '\n')
|
||||
break
|
||||
}
|
||||
},
|
||||
table: [{3: 1, 4: [1, 2]}, {1: [3]}, o($V0, [2, 2], {5: 3}), {6: [1, 4], 7: 5, 8: 6, 9: [1, 7], 10: [1, 8], 11: 9, 12: [1, 10]}, {1: [2, 1]}, o($V0, [2, 3]), o($V0, [2, 4]), o($V0, [2, 5]), o($V0, [2, 6]), o($V0, [2, 7]), {13: [1, 11]}, o($V0, [2, 8])],
|
||||
defaultActions: {4: [2, 1]},
|
||||
parseError: function parseError (str, hash) {
|
||||
if (hash.recoverable) {
|
||||
this.trace(str)
|
||||
} else {
|
||||
function _parseError (msg, hash) {
|
||||
this.message = msg
|
||||
this.hash = hash
|
||||
}
|
||||
_parseError.prototype = Error
|
||||
|
||||
throw new _parseError(str, hash)
|
||||
}
|
||||
},
|
||||
parse: function parse (input) {
|
||||
var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1
|
||||
var args = lstack.slice.call(arguments, 1)
|
||||
var lexer = Object.create(this.lexer)
|
||||
var sharedState = { yy: {} }
|
||||
for (var k in this.yy) {
|
||||
if (Object.prototype.hasOwnProperty.call(this.yy, k)) {
|
||||
sharedState.yy[k] = this.yy[k]
|
||||
}
|
||||
}
|
||||
lexer.setInput(input, sharedState.yy)
|
||||
sharedState.yy.lexer = lexer
|
||||
sharedState.yy.parser = this
|
||||
if (typeof lexer.yylloc === 'undefined') {
|
||||
lexer.yylloc = {}
|
||||
}
|
||||
var yyloc = lexer.yylloc
|
||||
lstack.push(yyloc)
|
||||
var ranges = lexer.options && lexer.options.ranges
|
||||
if (typeof sharedState.yy.parseError === 'function') {
|
||||
this.parseError = sharedState.yy.parseError
|
||||
} else {
|
||||
this.parseError = Object.getPrototypeOf(this).parseError
|
||||
}
|
||||
function popStack (n) {
|
||||
stack.length = stack.length - 2 * n
|
||||
vstack.length = vstack.length - n
|
||||
lstack.length = lstack.length - n
|
||||
}
|
||||
var lex = function () {
|
||||
var token
|
||||
token = lexer.lex() || EOF
|
||||
if (typeof token !== 'number') {
|
||||
token = self.symbols_[token] || token
|
||||
}
|
||||
return token
|
||||
}
|
||||
var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected
|
||||
while (true) {
|
||||
state = stack[stack.length - 1]
|
||||
if (this.defaultActions[state]) {
|
||||
action = this.defaultActions[state]
|
||||
} else {
|
||||
if (symbol === null || typeof symbol === 'undefined') {
|
||||
symbol = lex()
|
||||
}
|
||||
action = table[state] && table[state][symbol]
|
||||
}
|
||||
if (typeof action === 'undefined' || !action.length || !action[0]) {
|
||||
var errStr = ''
|
||||
expected = []
|
||||
for (p in table[state]) {
|
||||
if (this.terminals_[p] && p > TERROR) {
|
||||
expected.push('\'' + this.terminals_[p] + '\'')
|
||||
}
|
||||
}
|
||||
if (lexer.showPosition) {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\''
|
||||
} else {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'')
|
||||
}
|
||||
this.parseError(errStr, {
|
||||
text: lexer.match,
|
||||
token: this.terminals_[symbol] || symbol,
|
||||
line: lexer.yylineno,
|
||||
loc: yyloc,
|
||||
expected: expected
|
||||
})
|
||||
}
|
||||
if (action[0] instanceof Array && action.length > 1) {
|
||||
throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol)
|
||||
}
|
||||
switch (action[0]) {
|
||||
case 1:
|
||||
stack.push(symbol)
|
||||
vstack.push(lexer.yytext)
|
||||
lstack.push(lexer.yylloc)
|
||||
stack.push(action[1])
|
||||
symbol = null
|
||||
if (!preErrorSymbol) {
|
||||
yyleng = lexer.yyleng
|
||||
yytext = lexer.yytext
|
||||
yylineno = lexer.yylineno
|
||||
yyloc = lexer.yylloc
|
||||
if (recovering > 0) {
|
||||
recovering--
|
||||
}
|
||||
} else {
|
||||
symbol = preErrorSymbol
|
||||
preErrorSymbol = null
|
||||
}
|
||||
break
|
||||
case 2:
|
||||
len = this.productions_[action[1]][1]
|
||||
yyval.$ = vstack[vstack.length - len]
|
||||
yyval._$ = {
|
||||
first_line: lstack[lstack.length - (len || 1)].first_line,
|
||||
last_line: lstack[lstack.length - 1].last_line,
|
||||
first_column: lstack[lstack.length - (len || 1)].first_column,
|
||||
last_column: lstack[lstack.length - 1].last_column
|
||||
}
|
||||
if (ranges) {
|
||||
yyval._$.range = [
|
||||
lstack[lstack.length - (len || 1)].range[0],
|
||||
lstack[lstack.length - 1].range[1]
|
||||
]
|
||||
}
|
||||
r = this.performAction.apply(yyval, [
|
||||
yytext,
|
||||
yyleng,
|
||||
yylineno,
|
||||
sharedState.yy,
|
||||
action[1],
|
||||
vstack,
|
||||
lstack
|
||||
].concat(args))
|
||||
if (typeof r !== 'undefined') {
|
||||
return r
|
||||
}
|
||||
if (len) {
|
||||
stack = stack.slice(0, -1 * len * 2)
|
||||
vstack = vstack.slice(0, -1 * len)
|
||||
lstack = lstack.slice(0, -1 * len)
|
||||
}
|
||||
stack.push(this.productions_[action[1]][0])
|
||||
vstack.push(yyval.$)
|
||||
lstack.push(yyval._$)
|
||||
newState = table[stack[stack.length - 2]][stack[stack.length - 1]]
|
||||
stack.push(newState)
|
||||
break
|
||||
case 3:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return true
|
||||
}}
|
||||
/* generated by jison-lex 0.3.4 */
|
||||
var lexer = (function () {
|
||||
var lexer = ({
|
||||
|
||||
EOF: 1,
|
||||
|
||||
parseError: function parseError (str, hash) {
|
||||
if (this.yy.parser) {
|
||||
this.yy.parser.parseError(str, hash)
|
||||
} else {
|
||||
throw new Error(str)
|
||||
}
|
||||
},
|
||||
|
||||
// resets the lexer, sets new input
|
||||
setInput: function (input, yy) {
|
||||
this.yy = yy || this.yy || {}
|
||||
this._input = input
|
||||
this._more = this._backtrack = this.done = false
|
||||
this.yylineno = this.yyleng = 0
|
||||
this.yytext = this.matched = this.match = ''
|
||||
this.conditionStack = ['INITIAL']
|
||||
this.yylloc = {
|
||||
first_line: 1,
|
||||
first_column: 0,
|
||||
last_line: 1,
|
||||
last_column: 0
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [0, 0]
|
||||
}
|
||||
this.offset = 0
|
||||
return this
|
||||
},
|
||||
|
||||
// consumes and returns one char from the input
|
||||
input: function () {
|
||||
var ch = this._input[0]
|
||||
this.yytext += ch
|
||||
this.yyleng++
|
||||
this.offset++
|
||||
this.match += ch
|
||||
this.matched += ch
|
||||
var lines = ch.match(/(?:\r\n?|\n).*/g)
|
||||
if (lines) {
|
||||
this.yylineno++
|
||||
this.yylloc.last_line++
|
||||
} else {
|
||||
this.yylloc.last_column++
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range[1]++
|
||||
}
|
||||
|
||||
this._input = this._input.slice(1)
|
||||
return ch
|
||||
},
|
||||
|
||||
// unshifts one char (or a string) into the input
|
||||
unput: function (ch) {
|
||||
var len = ch.length
|
||||
var lines = ch.split(/(?:\r\n?|\n)/g)
|
||||
|
||||
this._input = ch + this._input
|
||||
this.yytext = this.yytext.substr(0, this.yytext.length - len)
|
||||
// this.yyleng -= len;
|
||||
this.offset -= len
|
||||
var oldLines = this.match.split(/(?:\r\n?|\n)/g)
|
||||
this.match = this.match.substr(0, this.match.length - 1)
|
||||
this.matched = this.matched.substr(0, this.matched.length - 1)
|
||||
|
||||
if (lines.length - 1) {
|
||||
this.yylineno -= lines.length - 1
|
||||
}
|
||||
var r = this.yylloc.range
|
||||
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: lines
|
||||
? (lines.length === oldLines.length ? this.yylloc.first_column : 0) +
|
||||
oldLines[oldLines.length - lines.length].length - lines[0].length
|
||||
: this.yylloc.first_column - len
|
||||
}
|
||||
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [r[0], r[0] + this.yyleng - len]
|
||||
}
|
||||
this.yyleng = this.yytext.length
|
||||
return this
|
||||
},
|
||||
|
||||
// When called from action, caches matched text and appends it on next action
|
||||
more: function () {
|
||||
this._more = true
|
||||
return this
|
||||
},
|
||||
|
||||
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
|
||||
reject: function () {
|
||||
if (this.options.backtrack_lexer) {
|
||||
this._backtrack = true
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
|
||||
text: '',
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
})
|
||||
}
|
||||
return this
|
||||
},
|
||||
|
||||
// retain first n characters of the match
|
||||
less: function (n) {
|
||||
this.unput(this.match.slice(n))
|
||||
},
|
||||
|
||||
// displays already matched input, i.e. for error messages
|
||||
pastInput: function () {
|
||||
var past = this.matched.substr(0, this.matched.length - this.match.length)
|
||||
return (past.length > 20 ? '...' : '') + past.substr(-20).replace(/\n/g, '')
|
||||
},
|
||||
|
||||
// displays upcoming input, i.e. for error messages
|
||||
upcomingInput: function () {
|
||||
var next = this.match
|
||||
if (next.length < 20) {
|
||||
next += this._input.substr(0, 20 - next.length)
|
||||
}
|
||||
return (next.substr(0, 20) + (next.length > 20 ? '...' : '')).replace(/\n/g, '')
|
||||
},
|
||||
|
||||
// displays the character position where the lexing error occurred, i.e. for error messages
|
||||
showPosition: function () {
|
||||
var pre = this.pastInput()
|
||||
var c = new Array(pre.length + 1).join('-')
|
||||
return pre + this.upcomingInput() + '\n' + c + '^'
|
||||
},
|
||||
|
||||
// test the lexed token: return FALSE when not a match, otherwise return token
|
||||
test_match: function (match, indexed_rule) {
|
||||
var token,
|
||||
lines,
|
||||
backup
|
||||
|
||||
if (this.options.backtrack_lexer) {
|
||||
// save context
|
||||
backup = {
|
||||
yylineno: this.yylineno,
|
||||
yylloc: {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.last_line,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: this.yylloc.last_column
|
||||
},
|
||||
yytext: this.yytext,
|
||||
match: this.match,
|
||||
matches: this.matches,
|
||||
matched: this.matched,
|
||||
yyleng: this.yyleng,
|
||||
offset: this.offset,
|
||||
_more: this._more,
|
||||
_input: this._input,
|
||||
yy: this.yy,
|
||||
conditionStack: this.conditionStack.slice(0),
|
||||
done: this.done
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
backup.yylloc.range = this.yylloc.range.slice(0)
|
||||
}
|
||||
}
|
||||
|
||||
lines = match[0].match(/(?:\r\n?|\n).*/g)
|
||||
if (lines) {
|
||||
this.yylineno += lines.length
|
||||
}
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.last_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.last_column,
|
||||
last_column: lines
|
||||
? lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length
|
||||
: this.yylloc.last_column + match[0].length
|
||||
}
|
||||
this.yytext += match[0]
|
||||
this.match += match[0]
|
||||
this.matches = match
|
||||
this.yyleng = this.yytext.length
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [this.offset, this.offset += this.yyleng]
|
||||
}
|
||||
this._more = false
|
||||
this._backtrack = false
|
||||
this._input = this._input.slice(match[0].length)
|
||||
this.matched += match[0]
|
||||
token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1])
|
||||
if (this.done && this._input) {
|
||||
this.done = false
|
||||
}
|
||||
if (token) {
|
||||
return token
|
||||
} else if (this._backtrack) {
|
||||
// recover context
|
||||
for (var k in backup) {
|
||||
this[k] = backup[k]
|
||||
}
|
||||
return false // rule action called reject() implying the next rule should be tested instead.
|
||||
}
|
||||
return false
|
||||
},
|
||||
|
||||
// return next match in input
|
||||
next: function () {
|
||||
if (this.done) {
|
||||
return this.EOF
|
||||
}
|
||||
if (!this._input) {
|
||||
this.done = true
|
||||
}
|
||||
|
||||
var token,
|
||||
match,
|
||||
tempMatch,
|
||||
index
|
||||
if (!this._more) {
|
||||
this.yytext = ''
|
||||
this.match = ''
|
||||
}
|
||||
var rules = this._currentRules()
|
||||
for (var i = 0; i < rules.length; i++) {
|
||||
tempMatch = this._input.match(this.rules[rules[i]])
|
||||
if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
|
||||
match = tempMatch
|
||||
index = i
|
||||
if (this.options.backtrack_lexer) {
|
||||
token = this.test_match(tempMatch, rules[i])
|
||||
if (token !== false) {
|
||||
return token
|
||||
} else if (this._backtrack) {
|
||||
match = false
|
||||
continue // rule action called reject() implying a rule MISmatch.
|
||||
} else {
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false
|
||||
}
|
||||
} else if (!this.options.flex) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if (match) {
|
||||
token = this.test_match(match, rules[index])
|
||||
if (token !== false) {
|
||||
return token
|
||||
}
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false
|
||||
}
|
||||
if (this._input === '') {
|
||||
return this.EOF
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
|
||||
text: '',
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
// return next match that has a token
|
||||
lex: function lex () {
|
||||
var r = this.next()
|
||||
if (r) {
|
||||
return r
|
||||
} else {
|
||||
return this.lex()
|
||||
}
|
||||
},
|
||||
|
||||
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
|
||||
begin: function begin (condition) {
|
||||
this.conditionStack.push(condition)
|
||||
},
|
||||
|
||||
// pop the previously active lexer condition state off the condition stack
|
||||
popState: function popState () {
|
||||
var n = this.conditionStack.length - 1
|
||||
if (n > 0) {
|
||||
return this.conditionStack.pop()
|
||||
} else {
|
||||
return this.conditionStack[0]
|
||||
}
|
||||
},
|
||||
|
||||
// produce the lexer rule set which is active for the currently active lexer condition state
|
||||
_currentRules: function _currentRules () {
|
||||
if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
|
||||
return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules
|
||||
} else {
|
||||
return this.conditions['INITIAL'].rules
|
||||
}
|
||||
},
|
||||
|
||||
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
|
||||
topState: function topState (n) {
|
||||
n = this.conditionStack.length - 1 - Math.abs(n || 0)
|
||||
if (n >= 0) {
|
||||
return this.conditionStack[n]
|
||||
} else {
|
||||
return 'INITIAL'
|
||||
}
|
||||
},
|
||||
|
||||
// alias for begin(condition)
|
||||
pushState: function pushState (condition) {
|
||||
this.begin(condition)
|
||||
},
|
||||
|
||||
// return the number of states currently on the stack
|
||||
stateStackSize: function stateStackSize () {
|
||||
return this.conditionStack.length
|
||||
},
|
||||
options: {'case-insensitive': true},
|
||||
performAction: function anonymous (yy, yy_, $avoiding_name_collisions, YY_START) {
|
||||
// Pre-lexer code can go here
|
||||
|
||||
var YYSTATE = YY_START
|
||||
switch ($avoiding_name_collisions) {
|
||||
case 0:return 9
|
||||
break
|
||||
case 1:return 10
|
||||
break
|
||||
case 2:return 4
|
||||
break
|
||||
case 3:return 12
|
||||
break
|
||||
case 4:return 13
|
||||
break
|
||||
case 5:return 6
|
||||
break
|
||||
case 6:return 'INVALID'
|
||||
break
|
||||
}
|
||||
},
|
||||
rules: [/^(?:[\n]+)/i, /^(?:showInfo\b)/i, /^(?:info\b)/i, /^(?:say\b)/i, /^(?::[^#\n;]+)/i, /^(?:$)/i, /^(?:.)/i],
|
||||
conditions: {'INITIAL': {'rules': [0, 1, 2, 3, 4, 5, 6], 'inclusive': true}}
|
||||
})
|
||||
return lexer
|
||||
})()
|
||||
parser.lexer = lexer
|
||||
function Parser () {
|
||||
this.yy = {}
|
||||
}
|
||||
Parser.prototype = parser; parser.Parser = Parser
|
||||
return new Parser()
|
||||
})()
|
||||
|
||||
if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
|
||||
exports.parser = parser
|
||||
exports.Parser = parser.Parser
|
||||
exports.parse = function () { return parser.parse.apply(parser, arguments) }
|
||||
exports.main = function commonjsMain (args) {
|
||||
if (!args[1]) {
|
||||
console.log('Usage: ' + args[0] + ' FILE')
|
||||
process.exit(1)
|
||||
}
|
||||
var source = require('fs').readFileSync(require('path').normalize(args[1]), 'utf8')
|
||||
return exports.parser.parse(source)
|
||||
}
|
||||
if (typeof module !== 'undefined' && require.main === module) {
|
||||
exports.main(process.argv.slice(1))
|
||||
}
|
||||
}
|
||||
@@ -1,29 +1,28 @@
|
||||
/**
|
||||
* Created by knut on 14-11-03.
|
||||
*/
|
||||
var Logger = require('../../logger')
|
||||
var log = Logger.Log
|
||||
var utils = require('../../utils')
|
||||
import * as d3 from 'd3'
|
||||
|
||||
var d3 = require('../../d3')
|
||||
var vertices = {}
|
||||
var edges = []
|
||||
var classes = []
|
||||
var subGraphs = []
|
||||
var tooltips = {}
|
||||
var subCount = 0
|
||||
var direction
|
||||
import { logger } from '../../logger'
|
||||
import utils from '../../utils'
|
||||
|
||||
let vertices = {}
|
||||
let edges = []
|
||||
let classes = []
|
||||
let subGraphs = []
|
||||
let subGraphLookup = {}
|
||||
let tooltips = {}
|
||||
let subCount = 0
|
||||
let direction
|
||||
// Functions to be run after graph rendering
|
||||
var funs = []
|
||||
let funs = []
|
||||
/**
|
||||
* Function called by parser when a node definition has been found
|
||||
* @param id
|
||||
* @param text
|
||||
* @param type
|
||||
* @param style
|
||||
* @param classes
|
||||
*/
|
||||
exports.addVertex = function (id, text, type, style) {
|
||||
var txt
|
||||
export const addVertex = function (id, text, type, style, classes) {
|
||||
let txt
|
||||
|
||||
if (typeof id === 'undefined') {
|
||||
return
|
||||
@@ -48,9 +47,6 @@ exports.addVertex = function (id, text, type, style) {
|
||||
if (typeof type !== 'undefined') {
|
||||
vertices[id].type = type
|
||||
}
|
||||
if (typeof type !== 'undefined') {
|
||||
vertices[id].type = type
|
||||
}
|
||||
if (typeof style !== 'undefined') {
|
||||
if (style !== null) {
|
||||
style.forEach(function (s) {
|
||||
@@ -58,6 +54,13 @@ exports.addVertex = function (id, text, type, style) {
|
||||
})
|
||||
}
|
||||
}
|
||||
if (typeof classes !== 'undefined') {
|
||||
if (classes !== null) {
|
||||
classes.forEach(function (s) {
|
||||
vertices[id].classes.push(s)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -67,9 +70,9 @@ exports.addVertex = function (id, text, type, style) {
|
||||
* @param type
|
||||
* @param linktext
|
||||
*/
|
||||
exports.addLink = function (start, end, type, linktext) {
|
||||
log.info('Got edge...', start, end)
|
||||
var edge = { start: start, end: end, type: undefined, text: '' }
|
||||
export const addLink = function (start, end, type, linktext) {
|
||||
logger.info('Got edge...', start, end)
|
||||
const edge = { start: start, end: end, type: undefined, text: '' }
|
||||
linktext = type.text
|
||||
|
||||
if (typeof linktext !== 'undefined') {
|
||||
@@ -93,12 +96,14 @@ exports.addLink = function (start, end, type, linktext) {
|
||||
* @param pos
|
||||
* @param interpolate
|
||||
*/
|
||||
exports.updateLinkInterpolate = function (pos, interp) {
|
||||
if (pos === 'default') {
|
||||
edges.defaultInterpolate = interp
|
||||
} else {
|
||||
edges[pos].interpolate = interp
|
||||
}
|
||||
export const updateLinkInterpolate = function (positions, interp) {
|
||||
positions.forEach(function (pos) {
|
||||
if (pos === 'default') {
|
||||
edges.defaultInterpolate = interp
|
||||
} else {
|
||||
edges[pos].interpolate = interp
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -106,18 +111,20 @@ exports.updateLinkInterpolate = function (pos, interp) {
|
||||
* @param pos
|
||||
* @param style
|
||||
*/
|
||||
exports.updateLink = function (pos, style) {
|
||||
if (pos === 'default') {
|
||||
edges.defaultStyle = style
|
||||
} else {
|
||||
if (utils.isSubstringInArray('fill', style) === -1) {
|
||||
style.push('fill:none')
|
||||
export const updateLink = function (positions, style) {
|
||||
positions.forEach(function (pos) {
|
||||
if (pos === 'default') {
|
||||
edges.defaultStyle = style
|
||||
} else {
|
||||
if (utils.isSubstringInArray('fill', style) === -1) {
|
||||
style.push('fill:none')
|
||||
}
|
||||
edges[pos].style = style
|
||||
}
|
||||
edges[pos].style = style
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
exports.addClass = function (id, style) {
|
||||
export const addClass = function (id, style) {
|
||||
if (typeof classes[id] === 'undefined') {
|
||||
classes[id] = { id: id, styles: [] }
|
||||
}
|
||||
@@ -135,41 +142,42 @@ exports.addClass = function (id, style) {
|
||||
* Called by parser when a graph definition is found, stores the direction of the chart.
|
||||
* @param dir
|
||||
*/
|
||||
exports.setDirection = function (dir) {
|
||||
export const setDirection = function (dir) {
|
||||
direction = dir
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by parser when a graph definition is found, stores the direction of the chart.
|
||||
* @param dir
|
||||
* Called by parser when a special node is found, e.g. a clickable element.
|
||||
* @param ids Comma separated list of ids
|
||||
* @param className Class to add
|
||||
*/
|
||||
exports.setClass = function (id, className) {
|
||||
if (id.indexOf(',') > 0) {
|
||||
id.split(',').forEach(function (id2) {
|
||||
if (typeof vertices[id2] !== 'undefined') {
|
||||
vertices[id2].classes.push(className)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
export const setClass = function (ids, className) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (typeof vertices[id] !== 'undefined') {
|
||||
vertices[id].classes.push(className)
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof subGraphLookup[id] !== 'undefined') {
|
||||
subGraphLookup[id].classes.push(className)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
var setTooltip = function (id, tooltip) {
|
||||
if (typeof tooltip !== 'undefined') {
|
||||
tooltips[id] = tooltip
|
||||
}
|
||||
const setTooltip = function (ids, tooltip) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (typeof tooltip !== 'undefined') {
|
||||
tooltips[id] = tooltip
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
var setClickFun = function (id, functionName) {
|
||||
const setClickFun = function (id, functionName) {
|
||||
if (typeof functionName === 'undefined') {
|
||||
return
|
||||
}
|
||||
if (typeof vertices[id] !== 'undefined') {
|
||||
funs.push(function (element) {
|
||||
var elem = d3.select(element).select('#' + id)
|
||||
const elem = d3.select(element).select(`[id="${id}"]`)
|
||||
if (elem !== null) {
|
||||
elem.on('click', function () {
|
||||
window[functionName](id)
|
||||
@@ -179,56 +187,50 @@ var setClickFun = function (id, functionName) {
|
||||
}
|
||||
}
|
||||
|
||||
var setLink = function (id, linkStr) {
|
||||
if (typeof linkStr === 'undefined') {
|
||||
return
|
||||
}
|
||||
if (typeof vertices[id] !== 'undefined') {
|
||||
funs.push(function (element) {
|
||||
var elem = d3.select(element).select('#' + id)
|
||||
if (elem !== null) {
|
||||
elem.on('click', function () {
|
||||
window.open(linkStr, 'newTab')
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
/**
|
||||
* Called by parser when a link is found. Adds the URL to the vertex data.
|
||||
* @param ids Comma separated list of ids
|
||||
* @param linkStr URL to create a link for
|
||||
* @param tooltip Tooltip for the clickable element
|
||||
*/
|
||||
export const setLink = function (ids, linkStr, tooltip) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
if (typeof vertices[id] !== 'undefined') {
|
||||
vertices[id].link = linkStr
|
||||
}
|
||||
})
|
||||
setTooltip(ids, tooltip)
|
||||
setClass(ids, 'clickable')
|
||||
}
|
||||
exports.getTooltip = function (id) {
|
||||
export const getTooltip = function (id) {
|
||||
return tooltips[id]
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by parser when a graph definition is found, stores the direction of the chart.
|
||||
* @param dir
|
||||
* Called by parser when a click definition is found. Registers an event handler.
|
||||
* @param ids Comma separated list of ids
|
||||
* @param functionName Function to be called on click
|
||||
* @param tooltip Tooltip for the clickable element
|
||||
*/
|
||||
exports.setClickEvent = function (id, functionName, link, tooltip) {
|
||||
if (id.indexOf(',') > 0) {
|
||||
id.split(',').forEach(function (id2) {
|
||||
setTooltip(id2, tooltip)
|
||||
setClickFun(id2, functionName)
|
||||
setLink(id2, link)
|
||||
})
|
||||
} else {
|
||||
setTooltip(id, tooltip)
|
||||
setClickFun(id, functionName)
|
||||
setLink(id, link)
|
||||
}
|
||||
export const setClickEvent = function (ids, functionName, tooltip) {
|
||||
ids.split(',').forEach(function (id) { setClickFun(id, functionName) })
|
||||
setTooltip(ids, tooltip)
|
||||
setClass(ids, 'clickable')
|
||||
}
|
||||
|
||||
exports.bindFunctions = function (element) {
|
||||
export const bindFunctions = function (element) {
|
||||
funs.forEach(function (fun) {
|
||||
fun(element)
|
||||
})
|
||||
}
|
||||
exports.getDirection = function () {
|
||||
export const getDirection = function () {
|
||||
return direction
|
||||
}
|
||||
/**
|
||||
* Retrieval function for fetching the found nodes after parsing has completed.
|
||||
* @returns {{}|*|vertices}
|
||||
*/
|
||||
exports.getVertices = function () {
|
||||
export const getVertices = function () {
|
||||
return vertices
|
||||
}
|
||||
|
||||
@@ -236,7 +238,7 @@ exports.getVertices = function () {
|
||||
* Retrieval function for fetching the found links after parsing has completed.
|
||||
* @returns {{}|*|edges}
|
||||
*/
|
||||
exports.getEdges = function () {
|
||||
export const getEdges = function () {
|
||||
return edges
|
||||
}
|
||||
|
||||
@@ -244,31 +246,31 @@ exports.getEdges = function () {
|
||||
* Retrieval function for fetching the found class definitions after parsing has completed.
|
||||
* @returns {{}|*|classes}
|
||||
*/
|
||||
exports.getClasses = function () {
|
||||
export const getClasses = function () {
|
||||
return classes
|
||||
}
|
||||
|
||||
var setupToolTips = function (element) {
|
||||
var tooltipElem = d3.select('.mermaidTooltip')
|
||||
if (tooltipElem[0][0] === null) {
|
||||
const setupToolTips = function (element) {
|
||||
let tooltipElem = d3.select('.mermaidTooltip')
|
||||
if ((tooltipElem._groups || tooltipElem)[0][0] === null) {
|
||||
tooltipElem = d3.select('body')
|
||||
.append('div')
|
||||
.attr('class', 'mermaidTooltip')
|
||||
.style('opacity', 0)
|
||||
}
|
||||
|
||||
var svg = d3.select(element).select('svg')
|
||||
const svg = d3.select(element).select('svg')
|
||||
|
||||
var nodes = svg.selectAll('g.node')
|
||||
const nodes = svg.selectAll('g.node')
|
||||
nodes
|
||||
.on('mouseover', function () {
|
||||
var el = d3.select(this)
|
||||
var title = el.attr('title')
|
||||
const el = d3.select(this)
|
||||
const title = el.attr('title')
|
||||
// Dont try to draw a tooltip if no data is provided
|
||||
if (title === null) {
|
||||
return
|
||||
}
|
||||
var rect = this.getBoundingClientRect()
|
||||
const rect = this.getBoundingClientRect()
|
||||
|
||||
tooltipElem.transition()
|
||||
.duration(200)
|
||||
@@ -282,7 +284,7 @@ var setupToolTips = function (element) {
|
||||
tooltipElem.transition()
|
||||
.duration(500)
|
||||
.style('opacity', 0)
|
||||
var el = d3.select(this)
|
||||
const el = d3.select(this)
|
||||
el.classed('hover', false)
|
||||
})
|
||||
}
|
||||
@@ -291,13 +293,14 @@ funs.push(setupToolTips)
|
||||
/**
|
||||
* Clears the internal graph db so that a new graph can be parsed.
|
||||
*/
|
||||
exports.clear = function () {
|
||||
export const clear = function () {
|
||||
vertices = {}
|
||||
classes = {}
|
||||
edges = []
|
||||
funs = []
|
||||
funs.push(setupToolTips)
|
||||
subGraphs = []
|
||||
subGraphLookup = {}
|
||||
subCount = 0
|
||||
tooltips = []
|
||||
}
|
||||
@@ -305,50 +308,52 @@ exports.clear = function () {
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
exports.defaultStyle = function () {
|
||||
export const defaultStyle = function () {
|
||||
return 'fill:#ffa;stroke: #f66; stroke-width: 3px; stroke-dasharray: 5, 5;fill:#ffa;stroke: #666;'
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the internal graph db so that a new graph can be parsed.
|
||||
*/
|
||||
exports.addSubGraph = function (list, title) {
|
||||
export const addSubGraph = function (id, list, title) {
|
||||
function uniq (a) {
|
||||
var prims = { 'boolean': {}, 'number': {}, 'string': {} }
|
||||
var objs = []
|
||||
const prims = { 'boolean': {}, 'number': {}, 'string': {} }
|
||||
const objs = []
|
||||
|
||||
return a.filter(function (item) {
|
||||
var type = typeof item
|
||||
if (item === ' ') {
|
||||
const type = typeof item
|
||||
if (item.trim() === '') {
|
||||
return false
|
||||
}
|
||||
if (type in prims) { return prims[type].hasOwnProperty(item) ? false : (prims[type][item] = true) } else { return objs.indexOf(item) >= 0 ? false : objs.push(item) }
|
||||
})
|
||||
}
|
||||
|
||||
var nodeList = []
|
||||
let nodeList = []
|
||||
|
||||
nodeList = uniq(nodeList.concat.apply(nodeList, list))
|
||||
|
||||
var subGraph = { id: 'subGraph' + subCount, nodes: nodeList, title: title }
|
||||
subGraphs.push(subGraph)
|
||||
id = id || ('subGraph' + subCount)
|
||||
title = title || ''
|
||||
subCount = subCount + 1
|
||||
return subGraph.id
|
||||
const subGraph = { id: id, nodes: nodeList, title: title.trim(), classes: [] }
|
||||
subGraphs.push(subGraph)
|
||||
subGraphLookup[id] = subGraph
|
||||
return id
|
||||
}
|
||||
|
||||
var getPosForId = function (id) {
|
||||
var i
|
||||
for (i = 0; i < subGraphs.length; i++) {
|
||||
const getPosForId = function (id) {
|
||||
for (let i = 0; i < subGraphs.length; i++) {
|
||||
if (subGraphs[i].id === id) {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
var secCount = -1
|
||||
var posCrossRef = []
|
||||
var indexNodes = function (id, pos) {
|
||||
var nodes = subGraphs[pos].nodes
|
||||
let secCount = -1
|
||||
const posCrossRef = []
|
||||
const indexNodes2 = function (id, pos) {
|
||||
const nodes = subGraphs[pos].nodes
|
||||
secCount = secCount + 1
|
||||
if (secCount > 2000) {
|
||||
return
|
||||
@@ -362,13 +367,13 @@ var indexNodes = function (id, pos) {
|
||||
}
|
||||
}
|
||||
|
||||
var count = 0
|
||||
var posCount = 1
|
||||
let count = 0
|
||||
let posCount = 1
|
||||
while (count < nodes.length) {
|
||||
var childPos = getPosForId(nodes[count])
|
||||
const childPos = getPosForId(nodes[count])
|
||||
// Ignore regular nodes (pos will be -1)
|
||||
if (childPos >= 0) {
|
||||
var res = indexNodes(id, childPos)
|
||||
const res = indexNodes2(id, childPos)
|
||||
if (res.result) {
|
||||
return {
|
||||
result: true,
|
||||
@@ -387,20 +392,40 @@ var indexNodes = function (id, pos) {
|
||||
}
|
||||
}
|
||||
|
||||
exports.getDepthFirstPos = function (pos) {
|
||||
export const getDepthFirstPos = function (pos) {
|
||||
return posCrossRef[pos]
|
||||
}
|
||||
exports.indexNodes = function () {
|
||||
export const indexNodes = function () {
|
||||
secCount = -1
|
||||
if (subGraphs.length > 0) {
|
||||
indexNodes('none', subGraphs.length - 1, 0)
|
||||
indexNodes2('none', subGraphs.length - 1, 0)
|
||||
}
|
||||
}
|
||||
|
||||
exports.getSubGraphs = function () {
|
||||
export const getSubGraphs = function () {
|
||||
return subGraphs
|
||||
}
|
||||
|
||||
exports.parseError = function (err, hash) {
|
||||
global.mermaidAPI.parseError(err, hash)
|
||||
export default {
|
||||
addVertex,
|
||||
addLink,
|
||||
updateLinkInterpolate,
|
||||
updateLink,
|
||||
addClass,
|
||||
setDirection,
|
||||
setClass,
|
||||
getTooltip,
|
||||
setClickEvent,
|
||||
setLink,
|
||||
bindFunctions,
|
||||
getDirection,
|
||||
getVertices,
|
||||
getEdges,
|
||||
getClasses,
|
||||
clear,
|
||||
defaultStyle,
|
||||
addSubGraph,
|
||||
getDepthFirstPos,
|
||||
indexNodes,
|
||||
getSubGraphs
|
||||
}
|
||||
@@ -1,20 +1,18 @@
|
||||
/**
|
||||
* Created by knut on 14-12-11.
|
||||
*/
|
||||
var graph = require('./graphDb')
|
||||
var flow = require('./parser/flow')
|
||||
var dot = require('./parser/dot')
|
||||
var d3 = require('../../d3')
|
||||
var dagreD3 = require('dagre-d3-renderer')
|
||||
var Logger = require('../../logger')
|
||||
var log = Logger.Log
|
||||
import graphlib from 'graphlibrary'
|
||||
import * as d3 from 'd3'
|
||||
|
||||
var conf = {
|
||||
import flowDb from './flowDb'
|
||||
import flow from './parser/flow'
|
||||
import dagreD3 from 'dagre-d3-renderer'
|
||||
import addHtmlLabel from 'dagre-d3-renderer/lib/label/add-html-label.js'
|
||||
import { logger } from '../../logger'
|
||||
import { interpolateToCurve } from '../../utils'
|
||||
|
||||
const conf = {
|
||||
}
|
||||
module.exports.setConf = function (cnf) {
|
||||
var keys = Object.keys(cnf)
|
||||
var i
|
||||
for (i = 0; i < keys.length; i++) {
|
||||
export const setConf = function (cnf) {
|
||||
const keys = Object.keys(cnf)
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
conf[keys[i]] = cnf[keys[i]]
|
||||
}
|
||||
}
|
||||
@@ -24,13 +22,13 @@ module.exports.setConf = function (cnf) {
|
||||
* @param vert Object containing the vertices.
|
||||
* @param g The graph that is to be drawn.
|
||||
*/
|
||||
exports.addVertices = function (vert, g) {
|
||||
var keys = Object.keys(vert)
|
||||
export const addVertices = function (vert, g, svgId) {
|
||||
const svg = d3.select(`[id="${svgId}"]`)
|
||||
const keys = Object.keys(vert)
|
||||
|
||||
var styleFromStyleArr = function (styleStr, arr) {
|
||||
var i
|
||||
const styleFromStyleArr = function (styleStr, arr) {
|
||||
// Create a compound style definition from the style definitions found for the node in the graph definition
|
||||
for (i = 0; i < arr.length; i++) {
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
if (typeof arr[i] !== 'undefined') {
|
||||
styleStr = styleStr + arr[i] + ';'
|
||||
}
|
||||
@@ -39,66 +37,66 @@ exports.addVertices = function (vert, g) {
|
||||
return styleStr
|
||||
}
|
||||
|
||||
// Iterate through each item in the vertice object (containing all the vertices found) in the graph definition
|
||||
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
|
||||
keys.forEach(function (id) {
|
||||
var vertice = vert[id]
|
||||
var verticeText
|
||||
const vertex = vert[id]
|
||||
|
||||
/**
|
||||
* Variable for storing the classes for the vertice
|
||||
* Variable for storing the classes for the vertex
|
||||
* @type {string}
|
||||
*/
|
||||
var classStr = ''
|
||||
|
||||
if (vertice.classes.length > 0) {
|
||||
classStr = vertice.classes.join(' ')
|
||||
let classStr = ''
|
||||
if (vertex.classes.length > 0) {
|
||||
classStr = vertex.classes.join(' ')
|
||||
}
|
||||
|
||||
/**
|
||||
* Variable for storing the extracted style for the vertice
|
||||
* Variable for storing the extracted style for the vertex
|
||||
* @type {string}
|
||||
*/
|
||||
var style = ''
|
||||
let style = ''
|
||||
// Create a compound style definition from the style definitions found for the node in the graph definition
|
||||
style = styleFromStyleArr(style, vertice.styles)
|
||||
style = styleFromStyleArr(style, vertex.styles)
|
||||
|
||||
// Use vertice id as text in the box if no text is provided by the graph definition
|
||||
if (typeof vertice.text === 'undefined') {
|
||||
verticeText = vertice.id
|
||||
} else {
|
||||
verticeText = vertice.text
|
||||
}
|
||||
// Use vertex id as text in the box if no text is provided by the graph definition
|
||||
let vertexText = vertex.text !== undefined ? vertex.text : vertex.id
|
||||
|
||||
var labelTypeStr = ''
|
||||
// We create a SVG label, either by delegating to addHtmlLabel or manually
|
||||
let vertexNode
|
||||
if (conf.htmlLabels) {
|
||||
labelTypeStr = 'html'
|
||||
verticeText = verticeText.replace(/fa:fa[\w-]+/g, function (s) {
|
||||
return '<i class="fa ' + s.substring(3) + '"></i>'
|
||||
})
|
||||
// TODO: addHtmlLabel accepts a labelStyle. Do we possibly have that?
|
||||
const node = { label: vertexText.replace(/fa[lrsb]?:fa-[\w-]+/g, s => `<i class='${s.replace(':', ' ')}'></i>`) }
|
||||
vertexNode = addHtmlLabel(svg, node).node()
|
||||
vertexNode.parentNode.removeChild(vertexNode)
|
||||
} else {
|
||||
var svgLabel = document.createElementNS('http://www.w3.org/2000/svg', 'text')
|
||||
const svgLabel = document.createElementNS('http://www.w3.org/2000/svg', 'text')
|
||||
|
||||
var rows = verticeText.split(/<br>/)
|
||||
const rows = vertexText.split(/<br[/]{0,1}>/)
|
||||
|
||||
var j = 0
|
||||
for (j = 0; j < rows.length; j++) {
|
||||
var tspan = document.createElementNS('http://www.w3.org/2000/svg', 'tspan')
|
||||
for (let j = 0; j < rows.length; j++) {
|
||||
const tspan = document.createElementNS('http://www.w3.org/2000/svg', 'tspan')
|
||||
tspan.setAttributeNS('http://www.w3.org/XML/1998/namespace', 'xml:space', 'preserve')
|
||||
tspan.setAttribute('dy', '1em')
|
||||
tspan.setAttribute('x', '1')
|
||||
tspan.textContent = rows[j]
|
||||
svgLabel.appendChild(tspan)
|
||||
}
|
||||
|
||||
labelTypeStr = 'svg'
|
||||
verticeText = svgLabel
|
||||
vertexNode = svgLabel
|
||||
}
|
||||
|
||||
var radious = 0
|
||||
var _shape = ''
|
||||
// If the node has a link, we wrap it in a SVG link
|
||||
if (vertex.link) {
|
||||
const link = document.createElementNS('http://www.w3.org/2000/svg', 'a')
|
||||
link.setAttributeNS('http://www.w3.org/2000/svg', 'href', vertex.link)
|
||||
link.setAttributeNS('http://www.w3.org/2000/svg', 'rel', 'noopener')
|
||||
link.appendChild(vertexNode)
|
||||
vertexNode = link
|
||||
}
|
||||
|
||||
let radious = 0
|
||||
let _shape = ''
|
||||
// Set the shape based parameters
|
||||
switch (vertice.type) {
|
||||
switch (vertex.type) {
|
||||
case 'round':
|
||||
radious = 5
|
||||
_shape = 'rect'
|
||||
@@ -123,14 +121,12 @@ exports.addVertices = function (vert, g) {
|
||||
break
|
||||
case 'group':
|
||||
_shape = 'rect'
|
||||
// Need to create a text node if using svg labels, see #367
|
||||
verticeText = conf.htmlLabels ? '' : document.createElementNS('http://www.w3.org/2000/svg', 'text')
|
||||
break
|
||||
default:
|
||||
_shape = 'rect'
|
||||
}
|
||||
// Add the node
|
||||
g.setNode(vertice.id, { labelType: labelTypeStr, shape: _shape, label: verticeText, rx: radious, ry: radious, 'class': classStr, style: style, id: vertice.id })
|
||||
g.setNode(vertex.id, { labelType: 'svg', shape: _shape, label: vertexNode, rx: radious, ry: radious, 'class': classStr, style: style, id: vertex.id })
|
||||
})
|
||||
}
|
||||
|
||||
@@ -139,17 +135,17 @@ exports.addVertices = function (vert, g) {
|
||||
* @param {Object} edges The edges to add to the graph
|
||||
* @param {Object} g The graph object
|
||||
*/
|
||||
exports.addEdges = function (edges, g) {
|
||||
var cnt = 0
|
||||
export const addEdges = function (edges, g) {
|
||||
let cnt = 0
|
||||
|
||||
var defaultStyle
|
||||
let defaultStyle
|
||||
if (typeof edges.defaultStyle !== 'undefined') {
|
||||
defaultStyle = edges.defaultStyle.toString().replace(/,/g, ';')
|
||||
}
|
||||
|
||||
edges.forEach(function (edge) {
|
||||
cnt++
|
||||
var edgeData = {}
|
||||
const edgeData = {}
|
||||
|
||||
// Set link type for rendering
|
||||
if (edge.type === 'arrow_open') {
|
||||
@@ -158,8 +154,7 @@ exports.addEdges = function (edges, g) {
|
||||
edgeData.arrowhead = 'normal'
|
||||
}
|
||||
|
||||
var style = ''
|
||||
|
||||
let style = ''
|
||||
if (typeof edge.style !== 'undefined') {
|
||||
edge.style.forEach(function (s) {
|
||||
style = style + s + ';'
|
||||
@@ -183,11 +178,11 @@ exports.addEdges = function (edges, g) {
|
||||
edgeData.style = style
|
||||
|
||||
if (typeof edge.interpolate !== 'undefined') {
|
||||
edgeData.lineInterpolate = edge.interpolate
|
||||
edgeData.curve = interpolateToCurve(edge.interpolate, d3.curveLinear)
|
||||
} else if (typeof edges.defaultInterpolate !== 'undefined') {
|
||||
edgeData.curve = interpolateToCurve(edges.defaultInterpolate, d3.curveLinear)
|
||||
} else {
|
||||
if (typeof edges.defaultInterpolate !== 'undefined') {
|
||||
edgeData.lineInterpolate = edges.defaultInterpolate
|
||||
}
|
||||
edgeData.curve = interpolateToCurve(conf.curve, d3.curveLinear)
|
||||
}
|
||||
|
||||
if (typeof edge.text === 'undefined') {
|
||||
@@ -203,7 +198,7 @@ exports.addEdges = function (edges, g) {
|
||||
edgeData.label = '<span class="edgeLabel">' + edge.text + '</span>'
|
||||
} else {
|
||||
edgeData.labelType = 'text'
|
||||
edgeData.style = 'stroke: #333; stroke-width: 1.5px;fill:none'
|
||||
edgeData.style = edgeData.style || 'stroke: #333; stroke-width: 1.5px;fill:none'
|
||||
edgeData.label = edge.text.replace(/<br>/g, '\n')
|
||||
}
|
||||
} else {
|
||||
@@ -219,30 +214,14 @@ exports.addEdges = function (edges, g) {
|
||||
* Returns the all the styles from classDef statements in the graph definition.
|
||||
* @returns {object} classDef styles
|
||||
*/
|
||||
exports.getClasses = function (text, isDot) {
|
||||
var parser
|
||||
graph.clear()
|
||||
if (isDot) {
|
||||
parser = dot.parser
|
||||
} else {
|
||||
parser = flow.parser
|
||||
}
|
||||
parser.yy = graph
|
||||
export const getClasses = function (text) {
|
||||
flowDb.clear()
|
||||
const parser = flow.parser
|
||||
parser.yy = flowDb
|
||||
|
||||
// Parse the graph definition
|
||||
parser.parse(text)
|
||||
|
||||
var classes = graph.getClasses()
|
||||
|
||||
// Add default class if undefined
|
||||
if (typeof (classes.default) === 'undefined') {
|
||||
classes.default = { id: 'default' }
|
||||
classes.default.styles = []
|
||||
classes.default.clusterStyles = ['rx:4px', 'fill: rgb(255, 255, 222)', 'rx: 4px', 'stroke: rgb(170, 170, 51)', 'stroke-width: 1px']
|
||||
classes.default.nodeLabelStyles = ['fill:#000', 'stroke:none', 'font-weight:300', 'font-family:"Helvetica Neue",Helvetica,Arial,sans-serf', 'font-size:14px']
|
||||
classes.default.edgeLabelStyles = ['fill:#000', 'stroke:none', 'font-weight:300', 'font-family:"Helvetica Neue",Helvetica,Arial,sans-serf', 'font-size:14px']
|
||||
}
|
||||
return classes
|
||||
return flowDb.getClasses()
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -250,33 +229,27 @@ exports.getClasses = function (text, isDot) {
|
||||
* @param text
|
||||
* @param id
|
||||
*/
|
||||
exports.draw = function (text, id, isDot) {
|
||||
log.debug('Drawing flowchart')
|
||||
var parser
|
||||
graph.clear()
|
||||
if (isDot) {
|
||||
parser = dot.parser
|
||||
} else {
|
||||
parser = flow.parser
|
||||
}
|
||||
parser.yy = graph
|
||||
export const draw = function (text, id) {
|
||||
logger.debug('Drawing flowchart')
|
||||
flowDb.clear()
|
||||
const parser = flow.parser
|
||||
parser.yy = flowDb
|
||||
|
||||
// Parse the graph definition
|
||||
try {
|
||||
parser.parse(text)
|
||||
} catch (err) {
|
||||
log.debug('Parsing failed')
|
||||
logger.debug('Parsing failed')
|
||||
}
|
||||
|
||||
// Fetch the default direction, use TD if none was found
|
||||
var dir
|
||||
dir = graph.getDirection()
|
||||
let dir = flowDb.getDirection()
|
||||
if (typeof dir === 'undefined') {
|
||||
dir = 'TD'
|
||||
}
|
||||
|
||||
// Create the input mermaid.graph
|
||||
var g = new dagreD3.graphlib.Graph({
|
||||
const g = new graphlib.Graph({
|
||||
multigraph: true,
|
||||
compound: true
|
||||
})
|
||||
@@ -290,49 +263,47 @@ exports.draw = function (text, id, isDot) {
|
||||
return {}
|
||||
})
|
||||
|
||||
var subG
|
||||
var subGraphs = graph.getSubGraphs()
|
||||
var i = 0
|
||||
for (i = subGraphs.length - 1; i >= 0; i--) {
|
||||
let subG
|
||||
const subGraphs = flowDb.getSubGraphs()
|
||||
for (let i = subGraphs.length - 1; i >= 0; i--) {
|
||||
subG = subGraphs[i]
|
||||
graph.addVertex(subG.id, subG.title, 'group', undefined)
|
||||
flowDb.addVertex(subG.id, subG.title, 'group', undefined, subG.classes)
|
||||
}
|
||||
|
||||
// Fetch the verices/nodes and edges/links from the parsed graph definition
|
||||
var vert = graph.getVertices()
|
||||
const vert = flowDb.getVertices()
|
||||
|
||||
var edges = graph.getEdges()
|
||||
const edges = flowDb.getEdges()
|
||||
|
||||
i = 0
|
||||
var j
|
||||
let i = 0
|
||||
for (i = subGraphs.length - 1; i >= 0; i--) {
|
||||
subG = subGraphs[i]
|
||||
|
||||
d3.selectAll('cluster').append('text')
|
||||
|
||||
for (j = 0; j < subG.nodes.length; j++) {
|
||||
for (let j = 0; j < subG.nodes.length; j++) {
|
||||
g.setParent(subG.nodes[j], subG.id)
|
||||
}
|
||||
}
|
||||
exports.addVertices(vert, g)
|
||||
exports.addEdges(edges, g)
|
||||
addVertices(vert, g, id)
|
||||
addEdges(edges, g)
|
||||
|
||||
// Create the renderer
|
||||
var Render = dagreD3.render
|
||||
var render = new Render()
|
||||
const Render = dagreD3.render
|
||||
const render = new Render()
|
||||
|
||||
// Add custom shape for rhombus type of boc (decision)
|
||||
render.shapes().question = function (parent, bbox, node) {
|
||||
var w = bbox.width
|
||||
var h = bbox.height
|
||||
var s = (w + h) * 0.8
|
||||
var points = [
|
||||
const w = bbox.width
|
||||
const h = bbox.height
|
||||
const s = (w + h) * 0.9
|
||||
const points = [
|
||||
{ x: s / 2, y: 0 },
|
||||
{ x: s, y: -s / 2 },
|
||||
{ x: s / 2, y: -s },
|
||||
{ x: 0, y: -s / 2 }
|
||||
]
|
||||
var shapeSvg = parent.insert('polygon', ':first-child')
|
||||
const shapeSvg = parent.insert('polygon', ':first-child')
|
||||
.attr('points', points.map(function (d) {
|
||||
return d.x + ',' + d.y
|
||||
}).join(' '))
|
||||
@@ -347,16 +318,16 @@ exports.draw = function (text, id, isDot) {
|
||||
|
||||
// Add custom shape for box with inverted arrow on left side
|
||||
render.shapes().rect_left_inv_arrow = function (parent, bbox, node) {
|
||||
var w = bbox.width
|
||||
var h = bbox.height
|
||||
var points = [
|
||||
const w = bbox.width
|
||||
const h = bbox.height
|
||||
const points = [
|
||||
{ x: -h / 2, y: 0 },
|
||||
{ x: w, y: 0 },
|
||||
{ x: w, y: -h },
|
||||
{ x: -h / 2, y: -h },
|
||||
{ x: 0, y: -h / 2 }
|
||||
]
|
||||
var shapeSvg = parent.insert('polygon', ':first-child')
|
||||
const shapeSvg = parent.insert('polygon', ':first-child')
|
||||
.attr('points', points.map(function (d) {
|
||||
return d.x + ',' + d.y
|
||||
}).join(' '))
|
||||
@@ -369,16 +340,16 @@ exports.draw = function (text, id, isDot) {
|
||||
|
||||
// Add custom shape for box with inverted arrow on right side
|
||||
render.shapes().rect_right_inv_arrow = function (parent, bbox, node) {
|
||||
var w = bbox.width
|
||||
var h = bbox.height
|
||||
var points = [
|
||||
const w = bbox.width
|
||||
const h = bbox.height
|
||||
const points = [
|
||||
{ x: 0, y: 0 },
|
||||
{ x: w + h / 2, y: 0 },
|
||||
{ x: w, y: -h / 2 },
|
||||
{ x: w + h / 2, y: -h },
|
||||
{ x: 0, y: -h }
|
||||
]
|
||||
var shapeSvg = parent.insert('polygon', ':first-child')
|
||||
const shapeSvg = parent.insert('polygon', ':first-child')
|
||||
.attr('points', points.map(function (d) {
|
||||
return d.x + ',' + d.y
|
||||
}).join(' '))
|
||||
@@ -391,7 +362,7 @@ exports.draw = function (text, id, isDot) {
|
||||
|
||||
// Add our custom arrow - an empty arrowhead
|
||||
render.arrows().none = function normal (parent, id, edge, type) {
|
||||
var marker = parent.append('marker')
|
||||
const marker = parent.append('marker')
|
||||
.attr('id', id)
|
||||
.attr('viewBox', '0 0 10 10')
|
||||
.attr('refX', 9)
|
||||
@@ -401,14 +372,14 @@ exports.draw = function (text, id, isDot) {
|
||||
.attr('markerHeight', 6)
|
||||
.attr('orient', 'auto')
|
||||
|
||||
var path = marker.append('path')
|
||||
const path = marker.append('path')
|
||||
.attr('d', 'M 0 0 L 0 0 L 0 0 z')
|
||||
dagreD3.util.applyStyle(path, edge[type + 'Style'])
|
||||
}
|
||||
|
||||
// Override normal arrowhead defined in d3. Remove style & add class to allow css styling.
|
||||
render.arrows().normal = function normal (parent, id, edge, type) {
|
||||
var marker = parent.append('marker')
|
||||
const marker = parent.append('marker')
|
||||
.attr('id', id)
|
||||
.attr('viewBox', '0 0 10 10')
|
||||
.attr('refX', 9)
|
||||
@@ -426,75 +397,56 @@ exports.draw = function (text, id, isDot) {
|
||||
}
|
||||
|
||||
// Set up an SVG group so that we can translate the final graph.
|
||||
var svg = d3.select('#' + id)
|
||||
const svg = d3.select(`[id="${id}"]`)
|
||||
|
||||
// Run the renderer. This is what draws the final graph.
|
||||
var element = d3.select('#' + id + ' g')
|
||||
const element = d3.select('#' + id + ' g')
|
||||
render(element, g)
|
||||
|
||||
element.selectAll('g.node')
|
||||
.attr('title', function () {
|
||||
return graph.getTooltip(this.id)
|
||||
return flowDb.getTooltip(this.id)
|
||||
})
|
||||
|
||||
if (conf.useMaxWidth) {
|
||||
// Center the graph
|
||||
svg.attr('height', '100%')
|
||||
svg.attr('width', conf.width)
|
||||
svg.attr('viewBox', '0 0 ' + (g.graph().width + 20) + ' ' + (g.graph().height + 20))
|
||||
svg.attr('style', 'max-width:' + (g.graph().width + 20) + 'px;')
|
||||
} else {
|
||||
// Center the graph
|
||||
svg.attr('height', g.graph().height)
|
||||
if (typeof conf.width === 'undefined') {
|
||||
svg.attr('width', g.graph().width)
|
||||
} else {
|
||||
svg.attr('width', conf.width)
|
||||
}
|
||||
svg.attr('viewBox', '0 0 ' + (g.graph().width + 20) + ' ' + (g.graph().height + 20))
|
||||
}
|
||||
const padding = 8
|
||||
const width = g.maxX - g.minX + padding * 2
|
||||
const height = g.maxY - g.minY + padding * 2
|
||||
svg.attr('width', '100%')
|
||||
svg.attr('style', `max-width: ${width}px;`)
|
||||
svg.attr('viewBox', `0 0 ${width} ${height}`)
|
||||
svg.select('g').attr('transform', `translate(${padding - g.minX}, ${padding - g.minY})`)
|
||||
|
||||
// Index nodes
|
||||
graph.indexNodes('subGraph' + i)
|
||||
flowDb.indexNodes('subGraph' + i)
|
||||
|
||||
// reposition labels
|
||||
for (i = 0; i < subGraphs.length; i++) {
|
||||
subG = subGraphs[i]
|
||||
|
||||
if (subG.title !== 'undefined') {
|
||||
var clusterRects = document.querySelectorAll('#' + id + ' #' + subG.id + ' rect')
|
||||
var clusterEl = document.querySelectorAll('#' + id + ' #' + subG.id)
|
||||
const clusterRects = document.querySelectorAll('#' + id + ' #' + subG.id + ' rect')
|
||||
const clusterEl = document.querySelectorAll('#' + id + ' #' + subG.id)
|
||||
|
||||
var xPos = clusterRects[0].x.baseVal.value
|
||||
var yPos = clusterRects[0].y.baseVal.value
|
||||
var width = clusterRects[0].width.baseVal.value
|
||||
var cluster = d3.select(clusterEl[0])
|
||||
var te = cluster.append('text')
|
||||
te.attr('x', xPos + width / 2)
|
||||
te.attr('y', yPos + 14)
|
||||
te.attr('fill', 'black')
|
||||
te.attr('stroke', 'none')
|
||||
const xPos = clusterRects[0].x.baseVal.value
|
||||
const yPos = clusterRects[0].y.baseVal.value
|
||||
const width = clusterRects[0].width.baseVal.value
|
||||
const cluster = d3.select(clusterEl[0])
|
||||
const te = cluster.select('.label')
|
||||
te.attr('transform', `translate(${xPos + width / 2}, ${yPos + 14})`)
|
||||
te.attr('id', id + 'Text')
|
||||
te.style('text-anchor', 'middle')
|
||||
|
||||
if (typeof subG.title === 'undefined') {
|
||||
te.text('Undef')
|
||||
} else {
|
||||
te.text(subG.title)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add label rects for non html labels
|
||||
if (!conf.htmlLabels) {
|
||||
var labels = document.querySelectorAll('#' + id + ' .edgeLabel .label')
|
||||
var k
|
||||
for (k = 0; k < labels.length; k++) {
|
||||
var label = labels[i]
|
||||
const labels = document.querySelectorAll('#' + id + ' .edgeLabel .label')
|
||||
for (let k = 0; k < labels.length; k++) {
|
||||
const label = labels[k]
|
||||
|
||||
// Get dimensions of label
|
||||
var dim = label.getBBox()
|
||||
const dim = label.getBBox()
|
||||
|
||||
var rect = document.createElementNS('http://www.w3.org/2000/svg', 'rect')
|
||||
const rect = document.createElementNS('http://www.w3.org/2000/svg', 'rect')
|
||||
rect.setAttribute('rx', 0)
|
||||
rect.setAttribute('ry', 0)
|
||||
rect.setAttribute('width', dim.width)
|
||||
@@ -505,3 +457,11 @@ exports.draw = function (text, id, isDot) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
setConf,
|
||||
addVertices,
|
||||
addEdges,
|
||||
getClasses,
|
||||
draw
|
||||
}
|
||||
|
||||
@@ -1,266 +0,0 @@
|
||||
/* description: Parses end executes mathematical expressions. */
|
||||
|
||||
/* lexical grammar */
|
||||
%lex
|
||||
|
||||
%%
|
||||
"style" return 'STYLE';
|
||||
"linkStyle" return 'LINKSTYLE';
|
||||
"classDef" return 'CLASSDEF';
|
||||
"class" return 'CLASS';
|
||||
"click" return 'CLICK';
|
||||
"graph" return 'GRAPH';
|
||||
"digraph" return 'DIGRAPH';
|
||||
"subgraph" return 'SUBGRAPH';
|
||||
"node" return 'NODE';
|
||||
"edge" return 'EDGE';
|
||||
"LR" return 'DIR';
|
||||
"RL" return 'DIR';
|
||||
"TB" return 'DIR';
|
||||
"BT" return 'DIR';
|
||||
"TD" return 'DIR';
|
||||
"BR" return 'DIR';
|
||||
[0-9] return 'NUM';
|
||||
\# return 'BRKT';
|
||||
":" return 'COLON';
|
||||
";" return ';';
|
||||
"," return ',';
|
||||
"=" return '=';
|
||||
"*" return 'MULT';
|
||||
"." return 'DOT';
|
||||
\-\-[x] return 'ARROW_CROSS';
|
||||
\-\> return 'ARROW_POINT';
|
||||
\-\-[o] return 'ARROW_CIRCLE';
|
||||
\-\- return 'ARROW_OPEN';
|
||||
\- return 'MINUS';
|
||||
\+ return 'PLUS';
|
||||
\= return 'EQUALS';
|
||||
[\u0021-\u0027\u002A-\u002E\u003F\u0041-\u005A\u0061-\u007A\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6]|
|
||||
[\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377]|
|
||||
[\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5]|
|
||||
[\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA]|
|
||||
[\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE]|
|
||||
[\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA]|
|
||||
[\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0]|
|
||||
[\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977]|
|
||||
[\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2]|
|
||||
[\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A]|
|
||||
[\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39]|
|
||||
[\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8]|
|
||||
[\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C]|
|
||||
[\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C]|
|
||||
[\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99]|
|
||||
[\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0]|
|
||||
[\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D]|
|
||||
[\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3]|
|
||||
[\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10]|
|
||||
[\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1]|
|
||||
[\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81]|
|
||||
[\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3]|
|
||||
[\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6]|
|
||||
[\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A]|
|
||||
[\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081]|
|
||||
[\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D]|
|
||||
[\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0]|
|
||||
[\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310]|
|
||||
[\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C]|
|
||||
[\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u1700-\u170C\u170E-\u1711]|
|
||||
[\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7]|
|
||||
[\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C]|
|
||||
[\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16]|
|
||||
[\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF]|
|
||||
[\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC]|
|
||||
[\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D]|
|
||||
[\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D]|
|
||||
[\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3]|
|
||||
[\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F]|
|
||||
[\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128]|
|
||||
[\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2183\u2184]|
|
||||
[\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3]|
|
||||
[\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6]|
|
||||
[\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE]|
|
||||
[\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C]|
|
||||
[\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D]|
|
||||
[\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC]|
|
||||
[\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B]|
|
||||
[\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6E5\uA717-\uA71F\uA722-\uA788]|
|
||||
[\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805]|
|
||||
[\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB]|
|
||||
[\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28]|
|
||||
[\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5]|
|
||||
[\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4]|
|
||||
[\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E]|
|
||||
[\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D]|
|
||||
[\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36]|
|
||||
[\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D]|
|
||||
[\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC]]|
|
||||
[\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF]|
|
||||
[\uFFD2-\uFFD7\uFFDA-\uFFDC_]
|
||||
return 'ALPHA';
|
||||
"|" return 'PIPE';
|
||||
"(" return 'PS';
|
||||
")" return 'PE';
|
||||
"[" return '[';
|
||||
"]" return ']';
|
||||
"{" return '{'
|
||||
"}" return '}'
|
||||
"\"" return 'QUOTE';
|
||||
\s return 'SPACE';
|
||||
\n return 'NEWLINE';
|
||||
|
||||
<<EOF>> return 'EOF';
|
||||
|
||||
/lex
|
||||
|
||||
/* operator associations and precedence */
|
||||
|
||||
%left '^'
|
||||
|
||||
%start expressions
|
||||
|
||||
%% /* language grammar */
|
||||
|
||||
expressions
|
||||
: graph EOF
|
||||
{$$=$1;}
|
||||
;
|
||||
|
||||
|
||||
graph : graphStatement idStatement '{' stmt_list '}'
|
||||
{$$=$1;}
|
||||
| strict graphStatement idStatement '{' stmt_list '}'
|
||||
{$$=$1;}
|
||||
|graphStatement '{' stmt_list '}'
|
||||
{$$=$1;}
|
||||
;
|
||||
|
||||
graphStatement: GRAPH
|
||||
| DIGRAPH
|
||||
;
|
||||
|
||||
idStatement:
|
||||
textNoTags
|
||||
;
|
||||
textNoTags: textNoTagsToken
|
||||
{$$=$1;}
|
||||
| textNoTags textNoTagsToken
|
||||
{$$=$1+''+$2;}
|
||||
;
|
||||
|
||||
textNoTagsToken: ALPHA
|
||||
{$$=$1;}
|
||||
| NUM
|
||||
{$$=$1;}
|
||||
| COLON
|
||||
{$$ = $1;}
|
||||
| PLUS
|
||||
{$$ = $1;}
|
||||
| EQUALS
|
||||
{$$ = $1;}
|
||||
| MULT
|
||||
{$$ = $1;}
|
||||
| DOT
|
||||
{$$ = $1;}
|
||||
| BRKT
|
||||
{$$ = '<br>';}
|
||||
| SPACE
|
||||
{$$ = $1;}
|
||||
| MINUS
|
||||
{$$ = $1;}
|
||||
| keywords
|
||||
{$$ = $1;}
|
||||
;
|
||||
|
||||
stmt_list : stmt
|
||||
| stmt ';' stmt_list
|
||||
;
|
||||
stmt : node_stmt
|
||||
| edge_stmt
|
||||
| attr_stmt
|
||||
| idStatement '=' idStatement
|
||||
| subgraph
|
||||
;
|
||||
|
||||
attr_stmt : GRAPH attr_list
|
||||
| NODE attr_list
|
||||
| EDGE attr_list
|
||||
;
|
||||
|
||||
attr_list
|
||||
: '[' a_list ']' attr_list
|
||||
| '[' ']' attr_list
|
||||
| '[' a_list ']'
|
||||
| '[' ']'
|
||||
;
|
||||
|
||||
a_list
|
||||
: idStatement '=' idStatement ';' a_list
|
||||
| idStatement '=' idStatement ',' a_list
|
||||
| idStatement '=' idStatement
|
||||
;
|
||||
|
||||
edge_stmt
|
||||
: subgraph edgeRHS attr_list
|
||||
| node_id edgeRHS attr_list
|
||||
{$$='oy';}
|
||||
| node_id edgeRHS
|
||||
{
|
||||
yy.addLink($1,$2.id,$2.op);
|
||||
$$='oy';}
|
||||
| subgraph edgeRHS
|
||||
;
|
||||
|
||||
edgeRHS
|
||||
: edgeop node_id edgeRHS
|
||||
{
|
||||
yy.addLink($2,$3.id,$3.op);
|
||||
$$={op:$1,id:$2};
|
||||
}
|
||||
| edgeop subgraph edgeRHS
|
||||
| edgeop node_id
|
||||
{
|
||||
$$={op:$1,id:$2};
|
||||
}
|
||||
| edgeop subgraph
|
||||
;
|
||||
|
||||
node_stmt
|
||||
: node_id attr_list
|
||||
| node_id
|
||||
;
|
||||
node_id
|
||||
: idStatement port
|
||||
{yy.addVertex($1);$$=$1;}
|
||||
| idStatement
|
||||
{yy.addVertex($1);$$=$1;}
|
||||
;
|
||||
|
||||
port
|
||||
: ':' idStatement ':' compass_pt
|
||||
| ':' idStatement
|
||||
| ':' compass_pt
|
||||
;
|
||||
subgraph
|
||||
: SUBGRAPH idStatement '{' stmt_list '}'
|
||||
| SUBGRAPH '{' stmt_list '}'
|
||||
| '{' stmt_list '}'
|
||||
;
|
||||
compass_pt
|
||||
: 'n'
|
||||
| ne
|
||||
| e
|
||||
| se
|
||||
| s
|
||||
| sw
|
||||
| w
|
||||
| nw
|
||||
| c
|
||||
| _
|
||||
;
|
||||
edgeop
|
||||
: ARROW_POINT
|
||||
{$$='arrow';}
|
||||
| ARROW_OPEN
|
||||
{$$='arrow_open';}
|
||||
;
|
||||
%%
|
||||
@@ -225,10 +225,14 @@ statement
|
||||
{$$=[];}
|
||||
| clickStatement separator
|
||||
{$$=[];}
|
||||
| subgraph text separator document end
|
||||
{$$=yy.addSubGraph($4,$2);}
|
||||
| subgraph SPACE alphaNum SQS text SQE separator document end
|
||||
{$$=yy.addSubGraph($3,$8,$5);}
|
||||
| subgraph SPACE STR separator document end
|
||||
{$$=yy.addSubGraph(undefined,$5,$3);}
|
||||
| subgraph SPACE alphaNum separator document end
|
||||
{$$=yy.addSubGraph($3,$5,$3);}
|
||||
| subgraph separator document end
|
||||
{$$=yy.addSubGraph($3,undefined);}
|
||||
{$$=yy.addSubGraph(undefined,$3,undefined);}
|
||||
;
|
||||
|
||||
separator: NEWLINE | SEMI | EOF ;
|
||||
@@ -396,10 +400,10 @@ classStatement:CLASS SPACE alphaNum SPACE alphaNum
|
||||
;
|
||||
|
||||
clickStatement
|
||||
: CLICK SPACE alphaNum SPACE alphaNum {$$ = $1;yy.setClickEvent($3, $5, undefined, undefined);}
|
||||
| CLICK SPACE alphaNum SPACE alphaNum SPACE STR {$$ = $1;yy.setClickEvent($3, $5, undefined, $7) ;}
|
||||
| CLICK SPACE alphaNum SPACE STR {$$ = $1;yy.setClickEvent($3, undefined, $5, undefined);}
|
||||
| CLICK SPACE alphaNum SPACE STR SPACE STR {$$ = $1;yy.setClickEvent($3, undefined, $5, $7 );}
|
||||
: CLICK SPACE alphaNum SPACE alphaNum {$$ = $1;yy.setClickEvent($3, $5, undefined);}
|
||||
| CLICK SPACE alphaNum SPACE alphaNum SPACE STR {$$ = $1;yy.setClickEvent($3, $5, $7) ;}
|
||||
| CLICK SPACE alphaNum SPACE STR {$$ = $1;yy.setLink($3, $5, undefined);}
|
||||
| CLICK SPACE alphaNum SPACE STR SPACE STR {$$ = $1;yy.setLink($3, $5, $7 );}
|
||||
;
|
||||
|
||||
styleStatement:STYLE SPACE alphaNum SPACE stylesOpt
|
||||
@@ -410,21 +414,27 @@ styleStatement:STYLE SPACE alphaNum SPACE stylesOpt
|
||||
|
||||
linkStyleStatement
|
||||
: LINKSTYLE SPACE DEFAULT SPACE stylesOpt
|
||||
{$$ = $1;yy.updateLink($3,$5);}
|
||||
| LINKSTYLE SPACE NUM SPACE stylesOpt
|
||||
{$$ = $1;yy.updateLink([$3],$5);}
|
||||
| LINKSTYLE SPACE numList SPACE stylesOpt
|
||||
{$$ = $1;yy.updateLink($3,$5);}
|
||||
| LINKSTYLE SPACE DEFAULT SPACE INTERPOLATE SPACE alphaNum SPACE stylesOpt
|
||||
{$$ = $1;yy.updateLinkInterpolate($3,$7);yy.updateLink($3,$9);}
|
||||
| LINKSTYLE SPACE NUM SPACE INTERPOLATE SPACE alphaNum SPACE stylesOpt
|
||||
{$$ = $1;yy.updateLinkInterpolate([$3],$7);yy.updateLink([$3],$9);}
|
||||
| LINKSTYLE SPACE numList SPACE INTERPOLATE SPACE alphaNum SPACE stylesOpt
|
||||
{$$ = $1;yy.updateLinkInterpolate($3,$7);yy.updateLink($3,$9);}
|
||||
| LINKSTYLE SPACE DEFAULT SPACE INTERPOLATE SPACE alphaNum
|
||||
{$$ = $1;yy.updateLinkInterpolate($3,$7);}
|
||||
| LINKSTYLE SPACE NUM SPACE INTERPOLATE SPACE alphaNum
|
||||
{$$ = $1;yy.updateLinkInterpolate([$3],$7);}
|
||||
| LINKSTYLE SPACE numList SPACE INTERPOLATE SPACE alphaNum
|
||||
{$$ = $1;yy.updateLinkInterpolate($3,$7);}
|
||||
;
|
||||
|
||||
commentStatement: PCT PCT commentText;
|
||||
|
||||
numList: NUM
|
||||
{$$ = [$1]}
|
||||
| numList COMMA NUM
|
||||
{$1.push($3);$$ = $1;}
|
||||
;
|
||||
|
||||
stylesOpt: style
|
||||
{$$ = [$1]}
|
||||
| stylesOpt COMMA style
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
```
|
||||
gantt
|
||||
dateFormat yyyy-mm-dd
|
||||
title Adding gantt diagram functionality to mermaid
|
||||
|
||||
section Design
|
||||
Design jison grammar :des1, 2014-01-01, 2014-01-04
|
||||
Create example text :des2, 2014-01-01, 3d
|
||||
Bounce gantt example with users :des3, after des2, 5d
|
||||
|
||||
section Implementation
|
||||
update build script :2014-01-02,1h
|
||||
Implement parser and jison :after des1, 2d
|
||||
Create tests for parser :3d
|
||||
Create renderer :5d
|
||||
Create tests for renderer :2d
|
||||
Add to mermaid core :1d
|
||||
|
||||
section Documentation
|
||||
Describe gantt syntax :a1, 2014-01-01, 3d
|
||||
Add gantt diagram to demo page :after a1 , 2h
|
||||
Add gantt to diagram to demo page :after a1 , 2h
|
||||
```
|
||||
@@ -1,62 +0,0 @@
|
||||
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
body {
|
||||
background: #fff;
|
||||
font-family: 'Open-Sans',sans-serif;
|
||||
|
||||
}
|
||||
|
||||
#container{
|
||||
margin: 0 auto;
|
||||
position: relative;
|
||||
width:800px;
|
||||
overflow: visible;
|
||||
}
|
||||
|
||||
|
||||
.svg {
|
||||
width:800px;
|
||||
height:400px;
|
||||
overflow: visible;
|
||||
position:absolute;
|
||||
}
|
||||
|
||||
.grid .tick {
|
||||
stroke: lightgrey;
|
||||
opacity: 0.3;
|
||||
shape-rendering: crispEdges;
|
||||
}
|
||||
.grid path {
|
||||
stroke-width: 0;
|
||||
}
|
||||
|
||||
|
||||
#tag {
|
||||
color: white;
|
||||
background: #FA283D;
|
||||
width: 150px;
|
||||
position: absolute;
|
||||
display: none;
|
||||
padding:3px 6px;
|
||||
margin-left: -80px;
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#tag:before {
|
||||
border: solid transparent;
|
||||
content: ' ';
|
||||
height: 0;
|
||||
left: 50%;
|
||||
margin-left: -5px;
|
||||
position: absolute;
|
||||
width: 0;
|
||||
border-width: 10px;
|
||||
border-bottom-color: #FA283D;
|
||||
top: -20px;
|
||||
}
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
/* eslint-env jasmine */
|
||||
/**
|
||||
* Created by knut on 14-11-18.
|
||||
*/
|
||||
describe('when parsing a gantt diagram it', function () {
|
||||
var gantt
|
||||
beforeEach(function () {
|
||||
gantt = require('./parser/gantt').parser
|
||||
gantt.yy = require('./ganttDb')
|
||||
})
|
||||
|
||||
it('should handle an dateFormat definition', function () {
|
||||
var str = 'gantt\ndateFormat yyyy-mm-dd'
|
||||
|
||||
gantt.parse(str)
|
||||
})
|
||||
it('should handle an dateFormat definition', function () {
|
||||
var str = 'gantt\ndateFormat yyyy-mm-dd\ntitle Adding gantt diagram functionality to mermaid'
|
||||
|
||||
gantt.parse(str)
|
||||
})
|
||||
it('should handle an dateFormat definition', function () {
|
||||
var str = 'gantt\ndateFormat yyyy-mm-dd\ntitle Adding gantt diagram functionality to mermaid'
|
||||
|
||||
gantt.parse(str)
|
||||
})
|
||||
it('should handle an section definition', function () {
|
||||
var str = 'gantt\ndateFormat yyyy-mm-dd\ntitle Adding gantt diagram functionality to mermaid'
|
||||
|
||||
gantt.parse(str)
|
||||
})
|
||||
/**
|
||||
* Beslutsflöde inligt nedan. Obs bla bla bla
|
||||
* ```
|
||||
* graph TD
|
||||
* A[Hard pledge] -- text on link -->B(Round edge)
|
||||
* B --> C{to do or not to do}
|
||||
* C -->|Too| D[Result one]
|
||||
* C -->|Doo| E[Result two]
|
||||
```
|
||||
* params bapa - a unique bapap
|
||||
*/
|
||||
it('should handle a task definition', function () {
|
||||
var str = 'gantt\n' +
|
||||
'dateFormat yyyy-mm-dd\n' +
|
||||
'title Adding gantt diagram functionality to mermaid\n' +
|
||||
'section Documentation\n' +
|
||||
'Design jison grammar:des1, 2014-01-01, 2014-01-04'
|
||||
|
||||
gantt.parse(str)
|
||||
})
|
||||
})
|
||||
@@ -1,20 +1,22 @@
|
||||
/**
|
||||
* Created by knut on 15-01-14.
|
||||
*/
|
||||
var moment = require('moment')
|
||||
var Logger = require('../../logger')
|
||||
var log = Logger.Log
|
||||
import moment from 'moment-mini'
|
||||
import { logger } from '../../logger'
|
||||
import * as d3 from 'd3'
|
||||
|
||||
var dateFormat = ''
|
||||
var title = ''
|
||||
var sections = []
|
||||
var tasks = []
|
||||
var currentSection = ''
|
||||
let dateFormat = ''
|
||||
let axisFormat = ''
|
||||
let excludes = []
|
||||
let title = ''
|
||||
let sections = []
|
||||
let tasks = []
|
||||
let currentSection = ''
|
||||
const tags = ['active', 'done', 'crit', 'milestone']
|
||||
let funs = []
|
||||
|
||||
exports.clear = function () {
|
||||
export const clear = function () {
|
||||
sections = []
|
||||
tasks = []
|
||||
currentSection = ''
|
||||
funs = []
|
||||
title = ''
|
||||
taskCnt = 0
|
||||
lastTask = undefined
|
||||
@@ -22,30 +24,39 @@ exports.clear = function () {
|
||||
rawTasks = []
|
||||
}
|
||||
|
||||
exports.setDateFormat = function (txt) {
|
||||
export const setAxisFormat = function (txt) {
|
||||
axisFormat = txt
|
||||
}
|
||||
|
||||
export const getAxisFormat = function () {
|
||||
return axisFormat
|
||||
}
|
||||
|
||||
export const setDateFormat = function (txt) {
|
||||
dateFormat = txt
|
||||
}
|
||||
|
||||
exports.getDateFormat = function () {
|
||||
return dateFormat
|
||||
export const setExcludes = function (txt) {
|
||||
excludes = txt.toLowerCase().split(/[\s,]+/)
|
||||
}
|
||||
exports.setTitle = function (txt) {
|
||||
|
||||
export const setTitle = function (txt) {
|
||||
title = txt
|
||||
}
|
||||
|
||||
exports.getTitle = function () {
|
||||
export const getTitle = function () {
|
||||
return title
|
||||
}
|
||||
|
||||
exports.addSection = function (txt) {
|
||||
export const addSection = function (txt) {
|
||||
currentSection = txt
|
||||
sections.push(txt)
|
||||
}
|
||||
|
||||
exports.getTasks = function () {
|
||||
var allItemsPricessed = compileTasks()
|
||||
var maxDepth = 10
|
||||
var iterationCount = 0
|
||||
export const getTasks = function () {
|
||||
let allItemsPricessed = compileTasks()
|
||||
const maxDepth = 10
|
||||
let iterationCount = 0
|
||||
while (!allItemsPricessed && (iterationCount < maxDepth)) {
|
||||
allItemsPricessed = compileTasks()
|
||||
iterationCount++
|
||||
@@ -56,18 +67,54 @@ exports.getTasks = function () {
|
||||
return tasks
|
||||
}
|
||||
|
||||
var getStartDate = function (prevTime, dateFormat, str) {
|
||||
const isInvalidDate = function (date, dateFormat, excludes) {
|
||||
if (date.isoWeekday() >= 6 && excludes.indexOf('weekends') >= 0) {
|
||||
return true
|
||||
}
|
||||
if (excludes.indexOf(date.format('dddd').toLowerCase()) >= 0) {
|
||||
return true
|
||||
}
|
||||
return excludes.indexOf(date.format(dateFormat.trim())) >= 0
|
||||
}
|
||||
|
||||
const checkTaskDates = function (task, dateFormat, excludes) {
|
||||
if (!excludes.length || task.manualEndTime) return
|
||||
let startTime = moment(task.startTime, dateFormat, true)
|
||||
startTime.add(1, 'd')
|
||||
let endTime = moment(task.endTime, dateFormat, true)
|
||||
let renderEndTime = fixTaskDates(startTime, endTime, dateFormat, excludes)
|
||||
task.endTime = endTime.toDate()
|
||||
task.renderEndTime = renderEndTime
|
||||
}
|
||||
|
||||
const fixTaskDates = function (startTime, endTime, dateFormat, excludes) {
|
||||
let invalid = false
|
||||
let renderEndTime = null
|
||||
while (startTime.date() <= endTime.date()) {
|
||||
if (!invalid) {
|
||||
renderEndTime = endTime.toDate()
|
||||
}
|
||||
invalid = isInvalidDate(startTime, dateFormat, excludes)
|
||||
if (invalid) {
|
||||
endTime.add(1, 'd')
|
||||
}
|
||||
startTime.add(1, 'd')
|
||||
}
|
||||
return renderEndTime
|
||||
}
|
||||
|
||||
const getStartDate = function (prevTime, dateFormat, str) {
|
||||
str = str.trim()
|
||||
|
||||
// Test for after
|
||||
var re = /^after\s+([\d\w-]+)/
|
||||
var afterStatement = re.exec(str.trim())
|
||||
const re = /^after\s+([\d\w-]+)/
|
||||
const afterStatement = re.exec(str.trim())
|
||||
|
||||
if (afterStatement !== null) {
|
||||
var task = exports.findTaskById(afterStatement[1])
|
||||
const task = findTaskById(afterStatement[1])
|
||||
|
||||
if (typeof task === 'undefined') {
|
||||
var dt = new Date()
|
||||
const dt = new Date()
|
||||
dt.setHours(0, 0, 0, 0)
|
||||
return dt
|
||||
}
|
||||
@@ -75,29 +122,31 @@ var getStartDate = function (prevTime, dateFormat, str) {
|
||||
}
|
||||
|
||||
// Check for actual date set
|
||||
if (moment(str, dateFormat.trim(), true).isValid()) {
|
||||
return moment(str, dateFormat.trim(), true).toDate()
|
||||
let mDate = moment(str, dateFormat.trim(), true)
|
||||
if (mDate.isValid()) {
|
||||
return mDate.toDate()
|
||||
} else {
|
||||
log.debug('Invalid date:' + str)
|
||||
log.debug('With date format:' + dateFormat.trim())
|
||||
logger.debug('Invalid date:' + str)
|
||||
logger.debug('With date format:' + dateFormat.trim())
|
||||
}
|
||||
|
||||
// Default date - now
|
||||
return new Date()
|
||||
}
|
||||
|
||||
var getEndDate = function (prevTime, dateFormat, str) {
|
||||
const getEndDate = function (prevTime, dateFormat, str) {
|
||||
str = str.trim()
|
||||
|
||||
// Check for actual date
|
||||
if (moment(str, dateFormat.trim(), true).isValid()) {
|
||||
return moment(str, dateFormat.trim()).toDate()
|
||||
let mDate = moment(str, dateFormat.trim(), true)
|
||||
if (mDate.isValid()) {
|
||||
return mDate.toDate()
|
||||
}
|
||||
|
||||
var d = moment(prevTime)
|
||||
const d = moment(prevTime)
|
||||
// Check for length
|
||||
var re = /^([\d]+)([wdhms])/
|
||||
var durationStatement = re.exec(str.trim())
|
||||
const re = /^([\d]+)([wdhms])/
|
||||
const durationStatement = re.exec(str.trim())
|
||||
|
||||
if (durationStatement !== null) {
|
||||
switch (durationStatement[2]) {
|
||||
@@ -117,14 +166,13 @@ var getEndDate = function (prevTime, dateFormat, str) {
|
||||
d.add(durationStatement[1], 'weeks')
|
||||
break
|
||||
}
|
||||
return d.toDate()
|
||||
}
|
||||
// Default date - now
|
||||
return d.toDate()
|
||||
}
|
||||
|
||||
var taskCnt = 0
|
||||
var parseId = function (idStr) {
|
||||
let taskCnt = 0
|
||||
const parseId = function (idStr) {
|
||||
if (typeof idStr === 'undefined') {
|
||||
taskCnt = taskCnt + 1
|
||||
return 'task' + taskCnt
|
||||
@@ -142,8 +190,8 @@ var parseId = function (idStr) {
|
||||
// endDate
|
||||
// length
|
||||
|
||||
var compileData = function (prevTask, dataStr) {
|
||||
var ds
|
||||
const compileData = function (prevTask, dataStr) {
|
||||
let ds
|
||||
|
||||
if (dataStr.substr(0, 1) === ':') {
|
||||
ds = dataStr.substr(1, dataStr.length)
|
||||
@@ -151,111 +199,95 @@ var compileData = function (prevTask, dataStr) {
|
||||
ds = dataStr
|
||||
}
|
||||
|
||||
var data = ds.split(',')
|
||||
const data = ds.split(',')
|
||||
|
||||
var task = {}
|
||||
var df = exports.getDateFormat()
|
||||
const task = {}
|
||||
|
||||
// Get tags like active, done cand crit
|
||||
var matchFound = true
|
||||
while (matchFound) {
|
||||
matchFound = false
|
||||
if (data[0].match(/^\s*active\s*$/)) {
|
||||
task.active = true
|
||||
data.shift(1)
|
||||
matchFound = true
|
||||
}
|
||||
if (data[0].match(/^\s*done\s*$/)) {
|
||||
task.done = true
|
||||
data.shift(1)
|
||||
matchFound = true
|
||||
}
|
||||
if (data[0].match(/^\s*crit\s*$/)) {
|
||||
task.crit = true
|
||||
data.shift(1)
|
||||
matchFound = true
|
||||
}
|
||||
}
|
||||
var i
|
||||
for (i = 0; i < data.length; i++) {
|
||||
// Get tags like active, done, crit and milestone
|
||||
getTaskTags(data, task, tags)
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
data[i] = data[i].trim()
|
||||
}
|
||||
|
||||
let endTimeData = ''
|
||||
switch (data.length) {
|
||||
case 1:
|
||||
task.id = parseId()
|
||||
task.startTime = prevTask.endTime
|
||||
task.endTime = getEndDate(task.startTime, df, data[0])
|
||||
endTimeData = data[0]
|
||||
break
|
||||
case 2:
|
||||
task.id = parseId()
|
||||
task.startTime = getStartDate(undefined, df, data[0])
|
||||
task.endTime = getEndDate(task.startTime, df, data[1])
|
||||
task.startTime = getStartDate(undefined, dateFormat, data[0])
|
||||
endTimeData = data[1]
|
||||
break
|
||||
case 3:
|
||||
task.id = parseId(data[0])
|
||||
task.startTime = getStartDate(undefined, df, data[1])
|
||||
task.endTime = getEndDate(task.startTime, df, data[2])
|
||||
task.startTime = getStartDate(undefined, dateFormat, data[1])
|
||||
endTimeData = data[2]
|
||||
break
|
||||
default:
|
||||
}
|
||||
|
||||
if (endTimeData) {
|
||||
task.endTime = getEndDate(task.startTime, dateFormat, endTimeData)
|
||||
task.manualEndTime = endTimeData === moment(task.endTime).format(dateFormat.trim())
|
||||
checkTaskDates(task, dateFormat, excludes)
|
||||
}
|
||||
|
||||
return task
|
||||
}
|
||||
|
||||
var parseData = function (prevTaskId, dataStr) {
|
||||
var ds
|
||||
|
||||
const parseData = function (prevTaskId, dataStr) {
|
||||
let ds
|
||||
if (dataStr.substr(0, 1) === ':') {
|
||||
ds = dataStr.substr(1, dataStr.length)
|
||||
} else {
|
||||
ds = dataStr
|
||||
}
|
||||
|
||||
var data = ds.split(',')
|
||||
const data = ds.split(',')
|
||||
|
||||
var task = {}
|
||||
const task = {}
|
||||
|
||||
// Get tags like active, done cand crit
|
||||
var matchFound = true
|
||||
while (matchFound) {
|
||||
matchFound = false
|
||||
if (data[0].match(/^\s*active\s*$/)) {
|
||||
task.active = true
|
||||
data.shift(1)
|
||||
matchFound = true
|
||||
}
|
||||
if (data[0].match(/^\s*done\s*$/)) {
|
||||
task.done = true
|
||||
data.shift(1)
|
||||
matchFound = true
|
||||
}
|
||||
if (data[0].match(/^\s*crit\s*$/)) {
|
||||
task.crit = true
|
||||
data.shift(1)
|
||||
matchFound = true
|
||||
}
|
||||
}
|
||||
var i
|
||||
for (i = 0; i < data.length; i++) {
|
||||
// Get tags like active, done, crit and milestone
|
||||
getTaskTags(data, task, tags)
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
data[i] = data[i].trim()
|
||||
}
|
||||
|
||||
switch (data.length) {
|
||||
case 1:
|
||||
task.id = parseId()
|
||||
task.startTime = { type: 'prevTaskEnd', id: prevTaskId }
|
||||
task.endTime = { data: data[0] }
|
||||
task.startTime = {
|
||||
type: 'prevTaskEnd',
|
||||
id: prevTaskId
|
||||
}
|
||||
task.endTime = {
|
||||
data: data[0]
|
||||
}
|
||||
break
|
||||
case 2:
|
||||
task.id = parseId()
|
||||
task.startTime = { type: 'getStartDate', startData: data[0] }
|
||||
task.endTime = { data: data[1] }
|
||||
task.startTime = {
|
||||
type: 'getStartDate',
|
||||
startData: data[0]
|
||||
}
|
||||
task.endTime = {
|
||||
data: data[1]
|
||||
}
|
||||
break
|
||||
case 3:
|
||||
task.id = parseId(data[0])
|
||||
task.startTime = { type: 'getStartDate', startData: data[1] }
|
||||
task.endTime = { data: data[2] }
|
||||
task.startTime = {
|
||||
type: 'getStartDate',
|
||||
startData: data[1]
|
||||
}
|
||||
task.endTime = {
|
||||
data: data[2]
|
||||
}
|
||||
break
|
||||
default:
|
||||
}
|
||||
@@ -263,19 +295,22 @@ var parseData = function (prevTaskId, dataStr) {
|
||||
return task
|
||||
}
|
||||
|
||||
var lastTask
|
||||
var lastTaskID
|
||||
var rawTasks = []
|
||||
var taskDb = {}
|
||||
exports.addTask = function (descr, data) {
|
||||
var rawTask = {
|
||||
let lastTask
|
||||
let lastTaskID
|
||||
let rawTasks = []
|
||||
const taskDb = {}
|
||||
export const addTask = function (descr, data) {
|
||||
const rawTask = {
|
||||
section: currentSection,
|
||||
type: currentSection,
|
||||
processed: false,
|
||||
manualEndTime: false,
|
||||
renderEndTime: null,
|
||||
raw: { data: data },
|
||||
task: descr
|
||||
task: descr,
|
||||
classes: []
|
||||
}
|
||||
var taskInfo = parseData(lastTaskID, data)
|
||||
const taskInfo = parseData(lastTaskID, data)
|
||||
rawTask.raw.startTime = taskInfo.startTime
|
||||
rawTask.raw.endTime = taskInfo.endTime
|
||||
rawTask.id = taskInfo.id
|
||||
@@ -283,50 +318,51 @@ exports.addTask = function (descr, data) {
|
||||
rawTask.active = taskInfo.active
|
||||
rawTask.done = taskInfo.done
|
||||
rawTask.crit = taskInfo.crit
|
||||
rawTask.milestone = taskInfo.milestone
|
||||
|
||||
var pos = rawTasks.push(rawTask)
|
||||
const pos = rawTasks.push(rawTask)
|
||||
|
||||
lastTaskID = rawTask.id
|
||||
// Store cross ref
|
||||
taskDb[rawTask.id] = pos - 1
|
||||
}
|
||||
|
||||
exports.findTaskById = function (id) {
|
||||
var pos = taskDb[id]
|
||||
export const findTaskById = function (id) {
|
||||
const pos = taskDb[id]
|
||||
return rawTasks[pos]
|
||||
}
|
||||
|
||||
exports.addTaskOrg = function (descr, data) {
|
||||
var newTask = {
|
||||
export const addTaskOrg = function (descr, data) {
|
||||
const newTask = {
|
||||
section: currentSection,
|
||||
type: currentSection,
|
||||
description: descr,
|
||||
task: descr
|
||||
task: descr,
|
||||
classes: []
|
||||
}
|
||||
var taskInfo = compileData(lastTask, data)
|
||||
const taskInfo = compileData(lastTask, data)
|
||||
newTask.startTime = taskInfo.startTime
|
||||
newTask.endTime = taskInfo.endTime
|
||||
newTask.id = taskInfo.id
|
||||
newTask.active = taskInfo.active
|
||||
newTask.done = taskInfo.done
|
||||
newTask.crit = taskInfo.crit
|
||||
newTask.milestone = taskInfo.milestone
|
||||
lastTask = newTask
|
||||
tasks.push(newTask)
|
||||
}
|
||||
|
||||
var compileTasks = function () {
|
||||
var df = exports.getDateFormat()
|
||||
|
||||
var compileTask = function (pos) {
|
||||
var task = rawTasks[pos]
|
||||
var startTime = ''
|
||||
const compileTasks = function () {
|
||||
const compileTask = function (pos) {
|
||||
const task = rawTasks[pos]
|
||||
let startTime = ''
|
||||
switch (rawTasks[pos].raw.startTime.type) {
|
||||
case 'prevTaskEnd':
|
||||
var prevTask = exports.findTaskById(task.prevTaskId)
|
||||
const prevTask = findTaskById(task.prevTaskId)
|
||||
task.startTime = prevTask.endTime
|
||||
break
|
||||
case 'getStartDate':
|
||||
startTime = getStartDate(undefined, df, rawTasks[pos].raw.startTime.startData)
|
||||
startTime = getStartDate(undefined, dateFormat, rawTasks[pos].raw.startTime.startData)
|
||||
if (startTime) {
|
||||
rawTasks[pos].startTime = startTime
|
||||
}
|
||||
@@ -334,18 +370,19 @@ var compileTasks = function () {
|
||||
}
|
||||
|
||||
if (rawTasks[pos].startTime) {
|
||||
rawTasks[pos].endTime = getEndDate(rawTasks[pos].startTime, df, rawTasks[pos].raw.endTime.data)
|
||||
rawTasks[pos].endTime = getEndDate(rawTasks[pos].startTime, dateFormat, rawTasks[pos].raw.endTime.data)
|
||||
if (rawTasks[pos].endTime) {
|
||||
rawTasks[pos].processed = true
|
||||
rawTasks[pos].manualEndTime = rawTasks[pos].raw.endTime.data === moment(rawTasks[pos].endTime).format(dateFormat.trim())
|
||||
checkTaskDates(rawTasks[pos], dateFormat, excludes)
|
||||
}
|
||||
}
|
||||
|
||||
return rawTasks[pos].processed
|
||||
}
|
||||
|
||||
var i
|
||||
var allProcessed = true
|
||||
for (i = 0; i < rawTasks.length; i++) {
|
||||
let allProcessed = true
|
||||
for (let i = 0; i < rawTasks.length; i++) {
|
||||
compileTask(i)
|
||||
|
||||
allProcessed = allProcessed && rawTasks[i].processed
|
||||
@@ -353,6 +390,138 @@ var compileTasks = function () {
|
||||
return allProcessed
|
||||
}
|
||||
|
||||
exports.parseError = function (err, hash) {
|
||||
global.mermaidAPI.parseError(err, hash)
|
||||
/**
|
||||
* Called by parser when a link is found. Adds the URL to the vertex data.
|
||||
* @param ids Comma separated list of ids
|
||||
* @param linkStr URL to create a link for
|
||||
*/
|
||||
export const setLink = function (ids, linkStr) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
let rawTask = findTaskById(id)
|
||||
if (typeof rawTask !== 'undefined') {
|
||||
pushFun(id, () => { window.open(linkStr, '_self') })
|
||||
}
|
||||
})
|
||||
setClass(ids, 'clickable')
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by parser when a special node is found, e.g. a clickable element.
|
||||
* @param ids Comma separated list of ids
|
||||
* @param className Class to add
|
||||
*/
|
||||
export const setClass = function (ids, className) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
let rawTask = findTaskById(id)
|
||||
if (typeof rawTask !== 'undefined') {
|
||||
rawTask.classes.push(className)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const setClickFun = function (id, functionName, functionArgs) {
|
||||
if (typeof functionName === 'undefined') {
|
||||
return
|
||||
}
|
||||
|
||||
let argList = []
|
||||
if (typeof functionArgs === 'string') {
|
||||
/* Splits functionArgs by ',', ignoring all ',' in double quoted strings */
|
||||
argList = functionArgs.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/)
|
||||
for (let i = 0; i < argList.length; i++) {
|
||||
let item = argList[i].trim()
|
||||
/* Removes all double quotes at the start and end of an argument */
|
||||
/* This preserves all starting and ending whitespace inside */
|
||||
if (item.charAt(0) === '"' && item.charAt(item.length - 1) === '"') {
|
||||
item = item.substr(1, item.length - 2)
|
||||
}
|
||||
argList[i] = item
|
||||
}
|
||||
}
|
||||
|
||||
let rawTask = findTaskById(id)
|
||||
if (typeof rawTask !== 'undefined') {
|
||||
pushFun(id, () => { window[functionName](...argList) })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The callbackFunction is executed in a click event bound to the task with the specified id or the task's assigned text
|
||||
* @param id The task's id
|
||||
* @param callbackFunction A function to be executed when clicked on the task or the task's text
|
||||
*/
|
||||
const pushFun = function (id, callbackFunction) {
|
||||
funs.push(function (element) {
|
||||
const elem = d3.select(element).select(`[id="${id}"]`)
|
||||
if (elem !== null) {
|
||||
elem.on('click', function () {
|
||||
callbackFunction()
|
||||
})
|
||||
}
|
||||
})
|
||||
funs.push(function (element) {
|
||||
const elem = d3.select(element).select(`[id="${id}-text"]`)
|
||||
if (elem !== null) {
|
||||
elem.on('click', function () {
|
||||
callbackFunction()
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by parser when a click definition is found. Registers an event handler.
|
||||
* @param ids Comma separated list of ids
|
||||
* @param functionName Function to be called on click
|
||||
* @param functionArgs Function args the function should be called with
|
||||
*/
|
||||
export const setClickEvent = function (ids, functionName, functionArgs) {
|
||||
ids.split(',').forEach(function (id) {
|
||||
setClickFun(id, functionName, functionArgs)
|
||||
})
|
||||
setClass(ids, 'clickable')
|
||||
}
|
||||
|
||||
/**
|
||||
* Binds all functions previously added to fun (specified through click) to the element
|
||||
* @param element
|
||||
*/
|
||||
export const bindFunctions = function (element) {
|
||||
funs.forEach(function (fun) {
|
||||
fun(element)
|
||||
})
|
||||
}
|
||||
|
||||
export default {
|
||||
clear,
|
||||
setDateFormat,
|
||||
setAxisFormat,
|
||||
getAxisFormat,
|
||||
setTitle,
|
||||
getTitle,
|
||||
addSection,
|
||||
getTasks,
|
||||
addTask,
|
||||
findTaskById,
|
||||
addTaskOrg,
|
||||
setExcludes,
|
||||
setClickEvent,
|
||||
setLink,
|
||||
bindFunctions
|
||||
}
|
||||
|
||||
function getTaskTags (data, task, tags) {
|
||||
let matchFound = true
|
||||
while (matchFound) {
|
||||
matchFound = false
|
||||
tags.forEach(function (t) {
|
||||
const pattern = '^\\s*' + t + '\\s*$'
|
||||
const regex = new RegExp(pattern)
|
||||
if (data[0].match(regex)) {
|
||||
task[t] = true
|
||||
data.shift(1)
|
||||
matchFound = true
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,179 +1,128 @@
|
||||
/* eslint-env jasmine */
|
||||
/**
|
||||
* Created by knut on 14-11-18.
|
||||
*/
|
||||
import moment from 'moment-mini'
|
||||
import ganttDb from './ganttDb'
|
||||
|
||||
describe('when using the ganttDb', function () {
|
||||
var gDb
|
||||
var moment = require('moment')
|
||||
|
||||
beforeEach(function () {
|
||||
gDb = require('./ganttDb')
|
||||
gDb.clear()
|
||||
ganttDb.clear()
|
||||
})
|
||||
|
||||
it('should handle an fixed dates', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2013-01-12')
|
||||
var tasks = gDb.getTasks()
|
||||
expect(tasks[0].startTime).toEqual(moment('2013-01-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].endTime).toEqual(moment('2013-01-12', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].id).toEqual('id1')
|
||||
expect(tasks[0].task).toEqual('test1')
|
||||
})
|
||||
it('should handle duration (days) instead of fixed date to determine end date', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2d')
|
||||
var tasks = gDb.getTasks()
|
||||
expect(tasks[0].startTime).toEqual(moment('2013-01-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].endTime).toEqual(moment('2013-01-03', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].id).toEqual('id1')
|
||||
expect(tasks[0].task).toEqual('test1')
|
||||
})
|
||||
it('should handle duration (hours) instead of fixed date to determine end date', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2h')
|
||||
var tasks = gDb.getTasks()
|
||||
expect(tasks[0].startTime).toEqual(moment('2013-01-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].endTime).toEqual(moment('2013-01-01 2:00', 'YYYY-MM-DD hh:mm').toDate())
|
||||
expect(tasks[0].id).toEqual('id1')
|
||||
expect(tasks[0].task).toEqual('test1')
|
||||
})
|
||||
it('should handle duration (minutes) instead of fixed date to determine end date', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2m')
|
||||
var tasks = gDb.getTasks()
|
||||
expect(tasks[0].startTime).toEqual(moment('2013-01-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].endTime).toEqual(moment('2013-01-01 00:02', 'YYYY-MM-DD hh:mm').toDate())
|
||||
expect(tasks[0].id).toEqual('id1')
|
||||
expect(tasks[0].task).toEqual('test1')
|
||||
})
|
||||
it('should handle duration (seconds) instead of fixed date to determine end date', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2s')
|
||||
var tasks = gDb.getTasks()
|
||||
expect(tasks[0].startTime).toEqual(moment('2013-01-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].endTime).toEqual(moment('2013-01-01 00:00:02', 'YYYY-MM-DD hh:mm:ss').toDate())
|
||||
expect(tasks[0].id).toEqual('id1')
|
||||
expect(tasks[0].task).toEqual('test1')
|
||||
})
|
||||
it('should handle duration (weeks) instead of fixed date to determine end date', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2w')
|
||||
var tasks = gDb.getTasks()
|
||||
expect(tasks[0].startTime).toEqual(moment('2013-01-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].endTime).toEqual(moment('2013-01-15', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].id).toEqual('id1')
|
||||
expect(tasks[0].task).toEqual('test1')
|
||||
})
|
||||
it.each`
|
||||
testName | section | taskName | taskData | expStartDate | expEndDate | expId | expTask
|
||||
${'should handle fixed dates'} | ${'testa1'} | ${'test1'} | ${'id1,2013-01-01,2013-01-12'} | ${new Date(2013, 0, 1)} | ${new Date(2013, 0, 12)} | ${'id1'} | ${'test1'}
|
||||
${'should handle duration (days) instead of fixed date to determine end date'} | ${'testa1'} | ${'test1'} | ${'id1,2013-01-01,2d'} | ${new Date(2013, 0, 1)} | ${new Date(2013, 0, 3)} | ${'id1'} | ${'test1'}
|
||||
${'should handle duration (hours) instead of fixed date to determine end date'} | ${'testa1'} | ${'test1'} | ${'id1,2013-01-01,2h'} | ${new Date(2013, 0, 1)} | ${new Date(2013, 0, 1, 2)} | ${'id1'} | ${'test1'}
|
||||
${'should handle duration (minutes) instead of fixed date to determine end date'} | ${'testa1'} | ${'test1'} | ${'id1,2013-01-01,2m'} | ${new Date(2013, 0, 1)} | ${new Date(2013, 0, 1, 0, 2)} | ${'id1'} | ${'test1'}
|
||||
${'should handle duration (seconds) instead of fixed date to determine end date'} | ${'testa1'} | ${'test1'} | ${'id1,2013-01-01,2s'} | ${new Date(2013, 0, 1)} | ${new Date(2013, 0, 1, 0, 0, 2)} | ${'id1'} | ${'test1'}
|
||||
${'should handle duration (weeks) instead of fixed date to determine end date'} | ${'testa1'} | ${'test1'} | ${'id1,2013-01-01,2w'} | ${new Date(2013, 0, 1)} | ${new Date(2013, 0, 15)} | ${'id1'} | ${'test1'}
|
||||
${'should handle fixed dates without id'} | ${'testa1'} | ${'test1'} | ${'2013-01-01,2013-01-12'} | ${new Date(2013, 0, 1)} | ${new Date(2013, 0, 12)} | ${'task1'} | ${'test1'}
|
||||
${'should handle duration instead of a fixed date to determine end date without id'} | ${'testa1'} | ${'test1'} | ${'2013-01-01,4d'} | ${new Date(2013, 0, 1)} | ${new Date(2013, 0, 5)} | ${'task1'} | ${'test1'}
|
||||
`('$testName', ({ section, taskName, taskData, expStartDate, expEndDate, expId, expTask }) => {
|
||||
ganttDb.setDateFormat('YYYY-MM-DD')
|
||||
ganttDb.addSection(section)
|
||||
ganttDb.addTask(taskName, taskData)
|
||||
const tasks = ganttDb.getTasks()
|
||||
expect(tasks[0].startTime).toEqual(expStartDate)
|
||||
expect(tasks[0].endTime).toEqual(expEndDate)
|
||||
expect(tasks[0].id).toEqual(expId)
|
||||
expect(tasks[0].task).toEqual(expTask)
|
||||
})
|
||||
|
||||
it('should handle relative start date based on id', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2w')
|
||||
gDb.addTask('test2', 'id2,after id1,1d')
|
||||
it.each`
|
||||
section | taskName1 | taskName2 | taskData1 | taskData2 | expStartDate2 | expEndDate2 | expId2 | expTask2
|
||||
${'testa1'} | ${'test1'} | ${'test2'} | ${'id1,2013-01-01,2w'} | ${'id2,after id1,1d'} | ${new Date(2013, 0, 15)} | ${undefined} | ${'id2'} | ${'test2'}
|
||||
${'testa1'} | ${'test1'} | ${'test2'} | ${'id1,2013-01-01,2w'} | ${'id2,after id3,1d'} | ${new Date((new Date()).setHours(0, 0, 0, 0))} | ${undefined} | ${'id2'} | ${'test2'}
|
||||
${'testa1'} | ${'test1'} | ${'test2'} | ${'id1,2013-01-01,2w'} | ${'after id1,1d'} | ${new Date(2013, 0, 15)} | ${undefined} | ${'task1'} | ${'test2'}
|
||||
${'testa1'} | ${'test1'} | ${'test2'} | ${'id1,2013-01-01,2w'} | ${'2013-01-26'} | ${new Date(2013, 0, 15)} | ${new Date(2013, 0, 26)} | ${'task1'} | ${'test2'}
|
||||
${'testa1'} | ${'test1'} | ${'test2'} | ${'id1,2013-01-01,2w'} | ${'2d'} | ${new Date(2013, 0, 15)} | ${new Date(2013, 0, 17)} | ${'task1'} | ${'test2'}
|
||||
`('$testName', ({ section, taskName1, taskName2, taskData1, taskData2, expStartDate2, expEndDate2, expId2, expTask2 }) => {
|
||||
ganttDb.setDateFormat('YYYY-MM-DD')
|
||||
ganttDb.addSection(section)
|
||||
ganttDb.addTask(taskName1, taskData1)
|
||||
ganttDb.addTask(taskName2, taskData2)
|
||||
const tasks = ganttDb.getTasks()
|
||||
expect(tasks[1].startTime).toEqual(expStartDate2)
|
||||
if (!expEndDate2 === undefined) {
|
||||
expect(tasks[1].endTime).toEqual(expEndDate2)
|
||||
}
|
||||
expect(tasks[1].id).toEqual(expId2)
|
||||
expect(tasks[1].task).toEqual(expTask2)
|
||||
})
|
||||
|
||||
var tasks = gDb.getTasks()
|
||||
|
||||
expect(tasks[1].startTime).toEqual(moment('2013-01-15', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].id).toEqual('id2')
|
||||
expect(tasks[1].task).toEqual('test2')
|
||||
})
|
||||
|
||||
it('should handle relative start date based on id when id is invalid', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2w')
|
||||
gDb.addTask('test2', 'id2,after id3,1d')
|
||||
var tasks = gDb.getTasks()
|
||||
expect(tasks[1].startTime).toEqual(new Date((new Date()).setHours(0, 0, 0, 0)))
|
||||
expect(tasks[1].id).toEqual('id2')
|
||||
expect(tasks[1].task).toEqual('test2')
|
||||
})
|
||||
|
||||
it('should handle fixed dates without id', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', '2013-01-01,2013-01-12')
|
||||
var tasks = gDb.getTasks()
|
||||
expect(tasks[0].startTime).toEqual(moment('2013-01-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].endTime).toEqual(moment('2013-01-12', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].id).toEqual('task1')
|
||||
expect(tasks[0].task).toEqual('test1')
|
||||
})
|
||||
|
||||
it('should handle duration instead of a fixed date to determine end date without id', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', '2013-01-01,4d')
|
||||
var tasks = gDb.getTasks()
|
||||
expect(tasks[0].startTime).toEqual(moment('2013-01-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].endTime).toEqual(moment('2013-01-05', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].id).toEqual('task1')
|
||||
expect(tasks[0].task).toEqual('test1')
|
||||
})
|
||||
|
||||
it('should handle relative start date of a fixed date to determine end date without id', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2w')
|
||||
gDb.addTask('test2', 'after id1,1d')
|
||||
|
||||
var tasks = gDb.getTasks()
|
||||
|
||||
expect(tasks[1].startTime).toEqual(moment('2013-01-15', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].id).toEqual('task1')
|
||||
expect(tasks[1].task).toEqual('test2')
|
||||
})
|
||||
it('should handle a new task with only an end date as definition', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2w')
|
||||
gDb.addTask('test2', '2013-01-26')
|
||||
|
||||
var tasks = gDb.getTasks()
|
||||
|
||||
expect(tasks[1].startTime).toEqual(moment('2013-01-15', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].endTime).toEqual(moment('2013-01-26', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].id).toEqual('task1')
|
||||
expect(tasks[1].task).toEqual('test2')
|
||||
})
|
||||
it('should handle a new task with only an end date as definition', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2w')
|
||||
gDb.addTask('test2', '2d')
|
||||
|
||||
var tasks = gDb.getTasks()
|
||||
|
||||
expect(tasks[1].startTime).toEqual(moment('2013-01-15', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].endTime).toEqual(moment('2013-01-17', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].id).toEqual('task1')
|
||||
expect(tasks[1].task).toEqual('test2')
|
||||
})
|
||||
it('should handle relative start date based on id regardless of sections', function () {
|
||||
gDb.setDateFormat('YYYY-MM-DD')
|
||||
gDb.addSection('testa1')
|
||||
gDb.addTask('test1', 'id1,2013-01-01,2w')
|
||||
gDb.addTask('test2', 'id2,after id3,1d')
|
||||
gDb.addSection('testa2')
|
||||
gDb.addTask('test3', 'id3,after id1,2d')
|
||||
ganttDb.setDateFormat('YYYY-MM-DD')
|
||||
ganttDb.addSection('testa1')
|
||||
ganttDb.addTask('test1', 'id1,2013-01-01,2w')
|
||||
ganttDb.addTask('test2', 'id2,after id3,1d')
|
||||
ganttDb.addSection('testa2')
|
||||
ganttDb.addTask('test3', 'id3,after id1,2d')
|
||||
|
||||
var tasks = gDb.getTasks()
|
||||
const tasks = ganttDb.getTasks()
|
||||
|
||||
expect(tasks[1].startTime).toEqual(moment('2013-01-17', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].endTime).toEqual(moment('2013-01-18', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].startTime).toEqual(new Date(2013, 0, 17))
|
||||
expect(tasks[1].endTime).toEqual(new Date(2013, 0, 18))
|
||||
expect(tasks[1].id).toEqual('id2')
|
||||
expect(tasks[1].task).toEqual('test2')
|
||||
|
||||
expect(tasks[2].id).toEqual('id3')
|
||||
expect(tasks[2].task).toEqual('test3')
|
||||
expect(tasks[2].startTime).toEqual(moment('2013-01-15', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[2].endTime).toEqual(moment('2013-01-17', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[2].startTime).toEqual(new Date(2013, 0, 15))
|
||||
expect(tasks[2].endTime).toEqual(new Date(2013, 0, 17))
|
||||
})
|
||||
it('should ignore weekends', function () {
|
||||
ganttDb.setDateFormat('YYYY-MM-DD')
|
||||
ganttDb.setExcludes('weekends 2019-02-06,friday')
|
||||
ganttDb.addSection('weekends skip test')
|
||||
ganttDb.addTask('test1', 'id1,2019-02-01,1d')
|
||||
ganttDb.addTask('test2', 'id2,after id1,2d')
|
||||
ganttDb.addTask('test3', 'id3,after id2,7d')
|
||||
ganttDb.addTask('test4', 'id4,2019-02-01,2019-02-20') // Fixed endTime
|
||||
ganttDb.addTask('test5', 'id5,after id4,1d')
|
||||
ganttDb.addSection('full ending taks on last day')
|
||||
ganttDb.addTask('test6', 'id6,2019-02-13,2d')
|
||||
ganttDb.addTask('test7', 'id7,after id6,1d')
|
||||
|
||||
const tasks = ganttDb.getTasks()
|
||||
|
||||
expect(tasks[0].startTime).toEqual(moment('2019-02-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].endTime).toEqual(moment('2019-02-04', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].renderEndTime).toEqual(moment('2019-02-02', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[0].id).toEqual('id1')
|
||||
expect(tasks[0].task).toEqual('test1')
|
||||
|
||||
expect(tasks[1].startTime).toEqual(moment('2019-02-04', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].endTime).toEqual(moment('2019-02-07', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].renderEndTime).toEqual(moment('2019-02-06', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[1].id).toEqual('id2')
|
||||
expect(tasks[1].task).toEqual('test2')
|
||||
|
||||
expect(tasks[2].startTime).toEqual(moment('2019-02-07', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[2].endTime).toEqual(moment('2019-02-20', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[2].renderEndTime).toEqual(moment('2019-02-20', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[2].id).toEqual('id3')
|
||||
expect(tasks[2].task).toEqual('test3')
|
||||
|
||||
expect(tasks[3].startTime).toEqual(moment('2019-02-01', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[3].endTime).toEqual(moment('2019-02-20', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[3].renderEndTime).toBeNull() // Fixed end
|
||||
expect(tasks[3].id).toEqual('id4')
|
||||
expect(tasks[3].task).toEqual('test4')
|
||||
|
||||
expect(tasks[4].startTime).toEqual(moment('2019-02-20', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[4].endTime).toEqual(moment('2019-02-21', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[4].renderEndTime).toEqual(moment('2019-02-21', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[4].id).toEqual('id5')
|
||||
expect(tasks[4].task).toEqual('test5')
|
||||
|
||||
expect(tasks[5].startTime).toEqual(moment('2019-02-13', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[5].endTime).toEqual(moment('2019-02-18', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[5].renderEndTime).toEqual(moment('2019-02-15', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[5].id).toEqual('id6')
|
||||
expect(tasks[5].task).toEqual('test6')
|
||||
|
||||
expect(tasks[6].startTime).toEqual(moment('2019-02-18', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[6].endTime).toEqual(moment('2019-02-19', 'YYYY-MM-DD').toDate())
|
||||
expect(tasks[6].id).toEqual('id7')
|
||||
expect(tasks[6].task).toEqual('test7')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
var gantt = require('./parser/gantt').parser
|
||||
gantt.yy = require('./ganttDb')
|
||||
var d3 = require('../../d3')
|
||||
var moment = require('moment')
|
||||
import * as d3 from 'd3'
|
||||
|
||||
var daysInChart
|
||||
var conf = {
|
||||
import { parser } from './parser/gantt'
|
||||
import ganttDb from './ganttDb'
|
||||
|
||||
parser.yy = ganttDb
|
||||
|
||||
const conf = {
|
||||
titleTopMargin: 25,
|
||||
barHeight: 20,
|
||||
barGap: 4,
|
||||
@@ -15,19 +16,19 @@ var conf = {
|
||||
fontSize: 11,
|
||||
fontFamily: '"Open-Sans", "sans-serif"'
|
||||
}
|
||||
module.exports.setConf = function (cnf) {
|
||||
var keys = Object.keys(cnf)
|
||||
export const setConf = function (cnf) {
|
||||
const keys = Object.keys(cnf)
|
||||
|
||||
keys.forEach(function (key) {
|
||||
conf[key] = cnf[key]
|
||||
})
|
||||
}
|
||||
var w
|
||||
module.exports.draw = function (text, id) {
|
||||
gantt.yy.clear()
|
||||
gantt.parse(text)
|
||||
let w
|
||||
export const draw = function (text, id) {
|
||||
parser.yy.clear()
|
||||
parser.parse(text)
|
||||
|
||||
var elem = document.getElementById(id)
|
||||
const elem = document.getElementById(id)
|
||||
w = elem.parentElement.offsetWidth
|
||||
|
||||
if (typeof w === 'undefined') {
|
||||
@@ -38,42 +39,33 @@ module.exports.draw = function (text, id) {
|
||||
w = conf.useWidth
|
||||
}
|
||||
|
||||
var taskArray = gantt.yy.getTasks()
|
||||
const taskArray = parser.yy.getTasks()
|
||||
|
||||
// Set height based on number of tasks
|
||||
var h = taskArray.length * (conf.barHeight + conf.barGap) + 2 * conf.topPadding
|
||||
const h = taskArray.length * (conf.barHeight + conf.barGap) + 2 * conf.topPadding
|
||||
|
||||
elem.setAttribute('height', '100%')
|
||||
// Set viewBox
|
||||
elem.setAttribute('viewBox', '0 0 ' + w + ' ' + h)
|
||||
var svg = d3.select('#' + id)
|
||||
|
||||
var startDate = d3.min(taskArray, function (d) {
|
||||
return d.startTime
|
||||
})
|
||||
var endDate = d3.max(taskArray, function (d) {
|
||||
return d.endTime
|
||||
})
|
||||
const svg = d3.select(`[id="${id}"]`)
|
||||
|
||||
// Set timescale
|
||||
var timeScale = d3.time.scale()
|
||||
const timeScale = d3.scaleTime()
|
||||
.domain([d3.min(taskArray, function (d) {
|
||||
return d.startTime
|
||||
}),
|
||||
d3.max(taskArray, function (d) {
|
||||
return d.endTime
|
||||
})])
|
||||
d3.max(taskArray, function (d) {
|
||||
return d.endTime
|
||||
})])
|
||||
.rangeRound([0, w - conf.leftPadding - conf.rightPadding])
|
||||
|
||||
var categories = []
|
||||
let categories = []
|
||||
|
||||
daysInChart = moment.duration(endDate - startDate).asDays()
|
||||
|
||||
for (var i = 0; i < taskArray.length; i++) {
|
||||
for (let i = 0; i < taskArray.length; i++) {
|
||||
categories.push(taskArray[i].type)
|
||||
}
|
||||
|
||||
var catsUnfiltered = categories // for vert labels
|
||||
const catsUnfiltered = categories // for vert labels
|
||||
|
||||
categories = checkUnique(categories)
|
||||
|
||||
@@ -83,18 +75,18 @@ module.exports.draw = function (text, id) {
|
||||
}
|
||||
|
||||
svg.append('text')
|
||||
.text(gantt.yy.getTitle())
|
||||
.text(parser.yy.getTitle())
|
||||
.attr('x', w / 2)
|
||||
.attr('y', conf.titleTopMargin)
|
||||
.attr('class', 'titleText')
|
||||
|
||||
function makeGant (tasks, pageWidth, pageHeight) {
|
||||
var barHeight = conf.barHeight
|
||||
var gap = barHeight + conf.barGap
|
||||
var topPadding = conf.topPadding
|
||||
var leftPadding = conf.leftPadding
|
||||
const barHeight = conf.barHeight
|
||||
const gap = barHeight + conf.barGap
|
||||
const topPadding = conf.topPadding
|
||||
const leftPadding = conf.leftPadding
|
||||
|
||||
var colorScale = d3.scale.linear()
|
||||
const colorScale = d3.scaleLinear()
|
||||
.domain([0, categories.length])
|
||||
.range(['#00B9FA', '#F95002'])
|
||||
.interpolate(d3.interpolateHcl)
|
||||
@@ -106,6 +98,7 @@ module.exports.draw = function (text, id) {
|
||||
}
|
||||
|
||||
function drawRects (theArray, theGap, theTopPad, theSidePad, theBarHeight, theColorScale, w, h) {
|
||||
// Draw background rects covering the entire width of the graph, these form the section rows.
|
||||
svg.append('g')
|
||||
.selectAll('rect')
|
||||
.data(theArray)
|
||||
@@ -120,7 +113,7 @@ module.exports.draw = function (text, id) {
|
||||
})
|
||||
.attr('height', theGap)
|
||||
.attr('class', function (d) {
|
||||
for (var i = 0; i < categories.length; i++) {
|
||||
for (let i = 0; i < categories.length; i++) {
|
||||
if (d.type === categories[i]) {
|
||||
return 'section section' + (i % conf.numberSectionStyles)
|
||||
}
|
||||
@@ -128,66 +121,100 @@ module.exports.draw = function (text, id) {
|
||||
return 'section section0'
|
||||
})
|
||||
|
||||
var rectangles = svg.append('g')
|
||||
// Draw the rects representing the tasks
|
||||
const rectangles = svg.append('g')
|
||||
.selectAll('rect')
|
||||
.data(theArray)
|
||||
.enter()
|
||||
|
||||
rectangles.append('rect')
|
||||
.attr('id', function (d) { return d.id })
|
||||
.attr('rx', 3)
|
||||
.attr('ry', 3)
|
||||
.attr('x', function (d) {
|
||||
if (d.milestone) {
|
||||
return timeScale(d.startTime) + theSidePad + (0.5 * (timeScale(d.endTime) - timeScale(d.startTime))) - (0.5 * theBarHeight)
|
||||
}
|
||||
return timeScale(d.startTime) + theSidePad
|
||||
})
|
||||
.attr('y', function (d, i) {
|
||||
return i * theGap + theTopPad
|
||||
})
|
||||
.attr('width', function (d) {
|
||||
return (timeScale(d.endTime) - timeScale(d.startTime))
|
||||
if (d.milestone) {
|
||||
return theBarHeight
|
||||
}
|
||||
return (timeScale(d.renderEndTime || d.endTime) - timeScale(d.startTime))
|
||||
})
|
||||
.attr('height', theBarHeight)
|
||||
.attr('transform-origin', function (d, i) {
|
||||
return (timeScale(d.startTime) + theSidePad + 0.5 * (timeScale(d.endTime) - timeScale(d.startTime))).toString() + 'px ' + (i * theGap + theTopPad + 0.5 * theBarHeight).toString() + 'px'
|
||||
})
|
||||
.attr('class', function (d) {
|
||||
var res = 'task '
|
||||
const res = 'task'
|
||||
|
||||
var secNum = 0
|
||||
for (var i = 0; i < categories.length; i++) {
|
||||
let classStr = ''
|
||||
if (d.classes.length > 0) {
|
||||
classStr = d.classes.join(' ')
|
||||
}
|
||||
|
||||
let secNum = 0
|
||||
for (let i = 0; i < categories.length; i++) {
|
||||
if (d.type === categories[i]) {
|
||||
secNum = (i % conf.numberSectionStyles)
|
||||
}
|
||||
}
|
||||
|
||||
let taskClass = ''
|
||||
if (d.active) {
|
||||
if (d.crit) {
|
||||
return res + ' activeCrit' + secNum
|
||||
taskClass += ' activeCrit'
|
||||
} else {
|
||||
return res + ' active' + secNum
|
||||
taskClass = ' active'
|
||||
}
|
||||
}
|
||||
|
||||
if (d.done) {
|
||||
} else if (d.done) {
|
||||
if (d.crit) {
|
||||
return res + ' doneCrit' + secNum
|
||||
taskClass = ' doneCrit'
|
||||
} else {
|
||||
return res + ' done' + secNum
|
||||
taskClass = ' done'
|
||||
}
|
||||
} else {
|
||||
if (d.crit) {
|
||||
taskClass += ' crit'
|
||||
}
|
||||
}
|
||||
|
||||
if (d.crit) {
|
||||
return res + ' crit' + secNum
|
||||
if (taskClass.length === 0) {
|
||||
taskClass = ' task'
|
||||
}
|
||||
|
||||
return res + ' task' + secNum
|
||||
if (d.milestone) {
|
||||
taskClass = ' milestone ' + taskClass
|
||||
}
|
||||
|
||||
taskClass += secNum
|
||||
|
||||
taskClass += ' ' + classStr
|
||||
|
||||
return res + taskClass
|
||||
})
|
||||
|
||||
// Append task labels
|
||||
rectangles.append('text')
|
||||
.text(function (d) {
|
||||
return d.task
|
||||
})
|
||||
.attr('font-size', conf.fontSize)
|
||||
.attr('x', function (d) {
|
||||
var startX = timeScale(d.startTime)
|
||||
var endX = timeScale(d.endTime)
|
||||
var textWidth = this.getBBox().width
|
||||
let startX = timeScale(d.startTime)
|
||||
let endX = timeScale(d.renderEndTime || d.endTime)
|
||||
if (d.milestone) {
|
||||
startX += (0.5 * (timeScale(d.endTime) - timeScale(d.startTime))) - (0.5 * theBarHeight)
|
||||
}
|
||||
if (d.milestone) {
|
||||
endX = startX + theBarHeight
|
||||
}
|
||||
const textWidth = this.getBBox().width
|
||||
|
||||
// Check id text width > width of rectangle
|
||||
if (textWidth > (endX - startX)) {
|
||||
@@ -205,17 +232,26 @@ module.exports.draw = function (text, id) {
|
||||
})
|
||||
.attr('text-height', theBarHeight)
|
||||
.attr('class', function (d) {
|
||||
var startX = timeScale(d.startTime)
|
||||
var endX = timeScale(d.endTime)
|
||||
var textWidth = this.getBBox().width
|
||||
var secNum = 0
|
||||
for (var i = 0; i < categories.length; i++) {
|
||||
const startX = timeScale(d.startTime)
|
||||
let endX = timeScale(d.endTime)
|
||||
if (d.milestone) {
|
||||
endX = startX + theBarHeight
|
||||
}
|
||||
const textWidth = this.getBBox().width
|
||||
|
||||
let classStr = ''
|
||||
if (d.classes.length > 0) {
|
||||
classStr = d.classes.join(' ')
|
||||
}
|
||||
|
||||
let secNum = 0
|
||||
for (let i = 0; i < categories.length; i++) {
|
||||
if (d.type === categories[i]) {
|
||||
secNum = (i % conf.numberSectionStyles)
|
||||
}
|
||||
}
|
||||
|
||||
var taskType = ''
|
||||
let taskType = ''
|
||||
if (d.active) {
|
||||
if (d.crit) {
|
||||
taskType = 'activeCritText' + secNum
|
||||
@@ -236,75 +272,27 @@ module.exports.draw = function (text, id) {
|
||||
}
|
||||
}
|
||||
|
||||
if (d.milestone) {
|
||||
taskType += ' milestoneText'
|
||||
}
|
||||
|
||||
// Check id text width > width of rectangle
|
||||
if (textWidth > (endX - startX)) {
|
||||
if (endX + textWidth + 1.5 * conf.leftPadding > w) {
|
||||
return 'taskTextOutsideLeft taskTextOutside' + secNum + ' ' + taskType
|
||||
return classStr + ' taskTextOutsideLeft taskTextOutside' + secNum + ' ' + taskType
|
||||
} else {
|
||||
return 'taskTextOutsideRight taskTextOutside' + secNum + ' ' + taskType
|
||||
return classStr + ' taskTextOutsideRight taskTextOutside' + secNum + ' ' + taskType
|
||||
}
|
||||
} else {
|
||||
return 'taskText taskText' + secNum + ' ' + taskType
|
||||
return classStr + ' taskText taskText' + secNum + ' ' + taskType
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function makeGrid (theSidePad, theTopPad, w, h) {
|
||||
var pre = [
|
||||
['.%L', function (d) {
|
||||
return d.getMilliseconds()
|
||||
}],
|
||||
[':%S', function (d) {
|
||||
return d.getSeconds()
|
||||
}],
|
||||
// Within a hour
|
||||
['h1 %I:%M', function (d) {
|
||||
return d.getMinutes()
|
||||
}]]
|
||||
var post = [
|
||||
['%Y', function () {
|
||||
return true
|
||||
}]]
|
||||
|
||||
var mid = [
|
||||
// Within a day
|
||||
['%I:%M', function (d) {
|
||||
return d.getHours()
|
||||
}],
|
||||
// Day within a week (not monday)
|
||||
['%a %d', function (d) {
|
||||
return d.getDay() && d.getDate() !== 1
|
||||
}],
|
||||
// within a month
|
||||
['%b %d', function (d) {
|
||||
return d.getDate() !== 1
|
||||
}],
|
||||
// Month
|
||||
['%B', function (d) {
|
||||
return d.getMonth()
|
||||
}]
|
||||
]
|
||||
var formatter
|
||||
if (typeof conf.axisFormatter !== 'undefined') {
|
||||
mid = []
|
||||
conf.axisFormatter.forEach(function (item) {
|
||||
var n = []
|
||||
n[0] = item[0]
|
||||
n[1] = item[1]
|
||||
mid.push(n)
|
||||
})
|
||||
}
|
||||
formatter = pre.concat(mid).concat(post)
|
||||
|
||||
var xAxis = d3.svg.axis()
|
||||
.scale(timeScale)
|
||||
.orient('bottom')
|
||||
.tickSize(-h + theTopPad + conf.gridLineStartPadding, 0, 0)
|
||||
.tickFormat(d3.time.format.multi(formatter))
|
||||
|
||||
if (daysInChart > 7 && daysInChart < 230) {
|
||||
xAxis = xAxis.ticks(d3.time.monday.range)
|
||||
}
|
||||
let xAxis = d3.axisBottom(timeScale)
|
||||
.tickSize(-h + theTopPad + conf.gridLineStartPadding)
|
||||
.tickFormat(d3.timeFormat(parser.yy.getAxisFormat() || conf.axisFormat || '%Y-%m-%d'))
|
||||
|
||||
svg.append('g')
|
||||
.attr('class', 'grid')
|
||||
@@ -319,10 +307,10 @@ module.exports.draw = function (text, id) {
|
||||
}
|
||||
|
||||
function vertLabels (theGap, theTopPad) {
|
||||
var numOccurances = []
|
||||
var prevGap = 0
|
||||
const numOccurances = []
|
||||
let prevGap = 0
|
||||
|
||||
for (var i = 0; i < categories.length; i++) {
|
||||
for (let i = 0; i < categories.length; i++) {
|
||||
numOccurances[i] = [categories[i], getCount(categories[i], catsUnfiltered)]
|
||||
}
|
||||
|
||||
@@ -337,7 +325,7 @@ module.exports.draw = function (text, id) {
|
||||
.attr('x', 10)
|
||||
.attr('y', function (d, i) {
|
||||
if (i > 0) {
|
||||
for (var j = 0; j < i; j++) {
|
||||
for (let j = 0; j < i; j++) {
|
||||
prevGap += numOccurances[i - 1][1]
|
||||
return d[1] * theGap / 2 + prevGap * theGap + theTopPad
|
||||
}
|
||||
@@ -346,7 +334,7 @@ module.exports.draw = function (text, id) {
|
||||
}
|
||||
})
|
||||
.attr('class', function (d) {
|
||||
for (var i = 0; i < categories.length; i++) {
|
||||
for (let i = 0; i < categories.length; i++) {
|
||||
if (d[0] === categories[i]) {
|
||||
return 'sectionTitle sectionTitle' + (i % conf.numberSectionStyles)
|
||||
}
|
||||
@@ -356,10 +344,10 @@ module.exports.draw = function (text, id) {
|
||||
}
|
||||
|
||||
function drawToday (theSidePad, theTopPad, w, h) {
|
||||
var todayG = svg.append('g')
|
||||
const todayG = svg.append('g')
|
||||
.attr('class', 'today')
|
||||
|
||||
var today = new Date()
|
||||
const today = new Date()
|
||||
|
||||
todayG.append('line')
|
||||
.attr('x1', timeScale(today) + theSidePad)
|
||||
@@ -371,9 +359,9 @@ module.exports.draw = function (text, id) {
|
||||
|
||||
// from this stackexchange question: http://stackoverflow.com/questions/1890203/unique-for-arrays-in-javascript
|
||||
function checkUnique (arr) {
|
||||
var hash = {}
|
||||
var result = []
|
||||
for (var i = 0, l = arr.length; i < l; ++i) {
|
||||
const hash = {}
|
||||
const result = []
|
||||
for (let i = 0, l = arr.length; i < l; ++i) {
|
||||
if (!hash.hasOwnProperty(arr[i])) { // it works with objects! in FF, at least
|
||||
hash[arr[i]] = true
|
||||
result.push(arr[i])
|
||||
@@ -384,8 +372,8 @@ module.exports.draw = function (text, id) {
|
||||
|
||||
// from this stackexchange question: http://stackoverflow.com/questions/14227981/count-how-many-strings-in-an-array-have-duplicates-in-the-same-array
|
||||
function getCounts (arr) {
|
||||
var i = arr.length // var to loop over
|
||||
var obj = {} // obj to store results
|
||||
let i = arr.length // const to loop over
|
||||
const obj = {} // obj to store results
|
||||
while (i) {
|
||||
obj[arr[--i]] = (obj[arr[i]] || 0) + 1 // count occurrences
|
||||
}
|
||||
@@ -397,3 +385,8 @@ module.exports.draw = function (text, id) {
|
||||
return getCounts(arr)[word] || 0
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
setConf,
|
||||
draw
|
||||
}
|
||||
|
||||
@@ -7,26 +7,64 @@
|
||||
|
||||
%options case-insensitive
|
||||
|
||||
%{
|
||||
// Pre-lexer code can go here
|
||||
%}
|
||||
|
||||
%x click
|
||||
%x href
|
||||
%x callbackname
|
||||
%x callbackargs
|
||||
%%
|
||||
|
||||
[\n]+ return 'NL';
|
||||
\s+ /* skip whitespace */
|
||||
\#[^\n]* /* skip comments */
|
||||
\%%[^\n]* /* skip comments */
|
||||
"gantt" return 'gantt';
|
||||
|
||||
/*
|
||||
---interactivity command---
|
||||
'href' adds a link to the specified task. 'href' can only be specified when the
|
||||
line was introduced with 'click'.
|
||||
'href "<link>"' attaches the specified link to the task that was specified by 'click'.
|
||||
*/
|
||||
"href"[\s]+["] this.begin("href");
|
||||
<href>["] this.popState();
|
||||
<href>[^"]* return 'href';
|
||||
|
||||
/*
|
||||
---interactivity command---
|
||||
'call' adds a callback to the specified task. 'call' can only be specified when
|
||||
the line was introdcued with 'click'.
|
||||
'call <callbackname>(<args>)' attaches the function 'callbackname' with the specified
|
||||
arguments to the task that was specified by 'click'.
|
||||
Function arguments are optional: 'call <callbackname>()' simply executes 'callbackname' without any arguments.
|
||||
*/
|
||||
"call"[\s]+ this.begin("callbackname");
|
||||
<callbackname>\([\s]*\) this.popState();
|
||||
<callbackname>\( this.popState(); this.begin("callbackargs");
|
||||
<callbackname>[^(]* return 'callbackname';
|
||||
<callbackargs>\) this.popState();
|
||||
<callbackargs>[^)]* return 'callbackargs';
|
||||
|
||||
/*
|
||||
'click' is the keyword to introduce a line that contains interactivity commands.
|
||||
'click' must be followed by an existing task-id. All commands are attached to
|
||||
that id.
|
||||
'click <id>' can be followed by href or call commands in any desired order
|
||||
*/
|
||||
"click"[\s]+ this.begin("click");
|
||||
<click>[\s\n] this.popState();
|
||||
<click>[^\s\n]* return 'click';
|
||||
|
||||
"gantt" return 'gantt';
|
||||
"dateFormat"\s[^#\n;]+ return 'dateFormat';
|
||||
"axisFormat"\s[^#\n;]+ return 'axisFormat';
|
||||
"excludes"\s[^#\n;]+ return 'excludes';
|
||||
\d\d\d\d"-"\d\d"-"\d\d return 'date';
|
||||
"title"\s[^#\n;]+ return 'title';
|
||||
"section"\s[^#:\n;]+ return 'section';
|
||||
[^#:\n;]+ return 'taskTxt';
|
||||
[^#:\n;]+ return 'taskTxt';
|
||||
":"[^#\n;]+ return 'taskData';
|
||||
":" return ':';
|
||||
<<EOF>> return 'EOF';
|
||||
. return 'INVALID';
|
||||
":" return ':';
|
||||
<<EOF>> return 'EOF';
|
||||
. return 'INVALID';
|
||||
|
||||
/lex
|
||||
|
||||
@@ -53,10 +91,40 @@ line
|
||||
;
|
||||
|
||||
statement
|
||||
: 'dateFormat' {yy.setDateFormat($1.substr(11));$$=$1.substr(11);}
|
||||
| title {yy.setTitle($1.substr(6));$$=$1.substr(6);}
|
||||
| section {yy.addSection($1.substr(8));$$=$1.substr(8);}
|
||||
| taskTxt taskData {yy.addTask($1,$2);$$='task';}
|
||||
;
|
||||
: 'dateFormat' {yy.setDateFormat($1.substr(11));$$=$1.substr(11);}
|
||||
| 'axisFormat' {yy.setAxisFormat($1.substr(11));$$=$1.substr(11);}
|
||||
| 'excludes' {yy.setExcludes($1.substr(9));$$=$1.substr(9);}
|
||||
| title {yy.setTitle($1.substr(6));$$=$1.substr(6);}
|
||||
| section {yy.addSection($1.substr(8));$$=$1.substr(8);}
|
||||
| clickStatement
|
||||
| taskTxt taskData {yy.addTask($1,$2);$$='task';}
|
||||
;
|
||||
|
||||
%%
|
||||
/*
|
||||
click allows any combination of href and call.
|
||||
*/
|
||||
clickStatement
|
||||
: click callbackname {$$ = $1;yy.setClickEvent($1, $2, null);}
|
||||
| click callbackname callbackargs {$$ = $1;yy.setClickEvent($1, $2, $3);}
|
||||
|
||||
| click callbackname href {$$ = $1;yy.setClickEvent($1, $2, null);yy.setLink($1,$3);}
|
||||
| click callbackname callbackargs href {$$ = $1;yy.setClickEvent($1, $2, $3);yy.setLink($1,$4);}
|
||||
|
||||
| click href callbackname {$$ = $1;yy.setClickEvent($1, $3, null);yy.setLink($1,$2);}
|
||||
| click href callbackname callbackargs {$$ = $1;yy.setClickEvent($1, $3, $4);yy.setLink($1,$2);}
|
||||
|
||||
| click href {$$ = $1;yy.setLink($1, $2);}
|
||||
;
|
||||
|
||||
clickStatementDebug
|
||||
: click callbackname {$$=$1 + ' ' + $2;}
|
||||
| click callbackname href {$$=$1 + ' ' + $2 + ' ' + $3;}
|
||||
|
||||
| click callbackname callbackargs {$$=$1 + ' ' + $2 + ' ' + $3;}
|
||||
| click callbackname callbackargs href {$$=$1 + ' ' + $2 + ' ' + $3 + ' ' + $4;}
|
||||
|
||||
| click href callbackname {$$=$1 + ' ' + $2 + ' ' + $3;}
|
||||
| click href callbackname callbackargs {$$=$1 + ' ' + $2 + ' ' + $3 + ' ' + $4;}
|
||||
|
||||
| click href {$$=$1 + ' ' + $2;}
|
||||
;%%
|
||||
|
||||
91
src/diagrams/gantt/parser/gantt.spec.js
Normal file
@@ -0,0 +1,91 @@
|
||||
/* eslint-env jasmine */
|
||||
/* eslint-disable no-eval */
|
||||
import { parser } from './gantt'
|
||||
import ganttDb from '../ganttDb'
|
||||
|
||||
describe('when parsing a gantt diagram it', function () {
|
||||
beforeEach(function () {
|
||||
parser.yy = ganttDb
|
||||
parser.yy.clear()
|
||||
})
|
||||
|
||||
it('should handle a dateFormat definition', function () {
|
||||
const str = 'gantt\ndateFormat yyyy-mm-dd'
|
||||
|
||||
parser.parse(str)
|
||||
})
|
||||
it('should handle a title definition', function () {
|
||||
const str = 'gantt\ndateFormat yyyy-mm-dd\ntitle Adding gantt diagram functionality to mermaid'
|
||||
|
||||
parser.parse(str)
|
||||
})
|
||||
it('should handle an excludes definition', function () {
|
||||
const str = 'gantt\ndateFormat yyyy-mm-dd\ntitle Adding gantt diagram functionality to mermaid\nexcludes weekdays 2019-02-01'
|
||||
|
||||
parser.parse(str)
|
||||
})
|
||||
it('should handle a section definition', function () {
|
||||
const str = 'gantt\n' +
|
||||
'dateFormat yyyy-mm-dd\n' +
|
||||
'title Adding gantt diagram functionality to mermaid\n' +
|
||||
'excludes weekdays 2019-02-01\n' +
|
||||
'section Documentation'
|
||||
|
||||
parser.parse(str)
|
||||
})
|
||||
/**
|
||||
* Beslutsflöde inligt nedan. Obs bla bla bla
|
||||
* ```
|
||||
* graph TD
|
||||
* A[Hard pledge] -- text on link -->B(Round edge)
|
||||
* B --> C{to do or not to do}
|
||||
* C -->|Too| D[Result one]
|
||||
* C -->|Doo| E[Result two]
|
||||
```
|
||||
* params bapa - a unique bapap
|
||||
*/
|
||||
it('should handle a task definition', function () {
|
||||
const str = 'gantt\n' +
|
||||
'dateFormat YYYY-MM-DD\n' +
|
||||
'title Adding gantt diagram functionality to mermaid\n' +
|
||||
'section Documentation\n' +
|
||||
'Design jison grammar:des1, 2014-01-01, 2014-01-04'
|
||||
|
||||
parser.parse(str)
|
||||
|
||||
const tasks = parser.yy.getTasks()
|
||||
|
||||
expect(tasks[0].startTime).toEqual(new Date(2014, 0, 1))
|
||||
expect(tasks[0].endTime).toEqual(new Date(2014, 0, 4))
|
||||
expect(tasks[0].id).toEqual('des1')
|
||||
expect(tasks[0].task).toEqual('Design jison grammar')
|
||||
})
|
||||
it.each`
|
||||
tags | milestone | done | crit | active
|
||||
${'milestone'} | ${true} | ${false} | ${false} | ${false}
|
||||
${'done'} | ${false} | ${true} | ${false} | ${false}
|
||||
${'crit'} | ${false} | ${false} | ${true} | ${false}
|
||||
${'active'} | ${false} | ${false} | ${false} | ${true}
|
||||
${'crit,milestone,done'} | ${true} | ${true} | ${true} | ${false}
|
||||
`('should handle a task with tags $tags', ({ tags, milestone, done, crit, active }) => {
|
||||
const str = 'gantt\n' +
|
||||
'dateFormat YYYY-MM-DD\n' +
|
||||
'title Adding gantt diagram functionality to mermaid\n' +
|
||||
'section Documentation\n' +
|
||||
'test task:' + tags + ', 2014-01-01, 2014-01-04'
|
||||
|
||||
const allowedTags = ['active', 'done', 'crit', 'milestone']
|
||||
|
||||
parser.parse(str)
|
||||
|
||||
const tasks = parser.yy.getTasks()
|
||||
|
||||
allowedTags.forEach(function (t) {
|
||||
if (eval(t)) {
|
||||
expect(tasks[0][t]).toBeTruthy()
|
||||
} else {
|
||||
expect(tasks[0][t]).toBeFalsy()
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
229
src/diagrams/git/gitGraphAst.js
Normal file
@@ -0,0 +1,229 @@
|
||||
import _ from 'lodash'
|
||||
|
||||
import { logger } from '../../logger'
|
||||
|
||||
let commits = {}
|
||||
let head = null
|
||||
let branches = { 'master': head }
|
||||
let curBranch = 'master'
|
||||
let direction = 'LR'
|
||||
let seq = 0
|
||||
|
||||
function getRandomInt (min, max) {
|
||||
return Math.floor(Math.random() * (max - min)) + min
|
||||
}
|
||||
|
||||
function getId () {
|
||||
const pool = '0123456789abcdef'
|
||||
let id = ''
|
||||
for (let i = 0; i < 7; i++) {
|
||||
id += pool[getRandomInt(0, 16)]
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
function isfastforwardable (currentCommit, otherCommit) {
|
||||
logger.debug('Entering isfastforwardable:', currentCommit.id, otherCommit.id)
|
||||
while (currentCommit.seq <= otherCommit.seq && currentCommit !== otherCommit) {
|
||||
// only if other branch has more commits
|
||||
if (otherCommit.parent == null) break
|
||||
if (Array.isArray(otherCommit.parent)) {
|
||||
logger.debug('In merge commit:', otherCommit.parent)
|
||||
return isfastforwardable(currentCommit, commits[otherCommit.parent[0]]) ||
|
||||
isfastforwardable(currentCommit, commits[otherCommit.parent[1]])
|
||||
} else {
|
||||
otherCommit = commits[otherCommit.parent]
|
||||
}
|
||||
}
|
||||
logger.debug(currentCommit.id, otherCommit.id)
|
||||
return currentCommit.id === otherCommit.id
|
||||
}
|
||||
|
||||
function isReachableFrom (currentCommit, otherCommit) {
|
||||
const currentSeq = currentCommit.seq
|
||||
const otherSeq = otherCommit.seq
|
||||
if (currentSeq > otherSeq) return isfastforwardable(otherCommit, currentCommit)
|
||||
return false
|
||||
}
|
||||
|
||||
export const setDirection = function (dir) {
|
||||
direction = dir
|
||||
}
|
||||
let options = {}
|
||||
export const setOptions = function (rawOptString) {
|
||||
logger.debug('options str', rawOptString)
|
||||
rawOptString = rawOptString && rawOptString.trim()
|
||||
rawOptString = rawOptString || '{}'
|
||||
try {
|
||||
options = JSON.parse(rawOptString)
|
||||
} catch (e) {
|
||||
logger.error('error while parsing gitGraph options', e.message)
|
||||
}
|
||||
}
|
||||
|
||||
export const getOptions = function () {
|
||||
return options
|
||||
}
|
||||
|
||||
export const commit = function (msg) {
|
||||
const commit = {
|
||||
id: getId(),
|
||||
message: msg,
|
||||
seq: seq++,
|
||||
parent: head == null ? null : head.id
|
||||
}
|
||||
head = commit
|
||||
commits[commit.id] = commit
|
||||
branches[curBranch] = commit.id
|
||||
logger.debug('in pushCommit ' + commit.id)
|
||||
}
|
||||
|
||||
export const branch = function (name) {
|
||||
branches[name] = head != null ? head.id : null
|
||||
logger.debug('in createBranch')
|
||||
}
|
||||
|
||||
export const merge = function (otherBranch) {
|
||||
const currentCommit = commits[branches[curBranch]]
|
||||
const otherCommit = commits[branches[otherBranch]]
|
||||
if (isReachableFrom(currentCommit, otherCommit)) {
|
||||
logger.debug('Already merged')
|
||||
return
|
||||
}
|
||||
if (isfastforwardable(currentCommit, otherCommit)) {
|
||||
branches[curBranch] = branches[otherBranch]
|
||||
head = commits[branches[curBranch]]
|
||||
} else {
|
||||
// create merge commit
|
||||
const commit = {
|
||||
id: getId(),
|
||||
message: 'merged branch ' + otherBranch + ' into ' + curBranch,
|
||||
seq: seq++,
|
||||
parent: [head == null ? null : head.id, branches[otherBranch]]
|
||||
}
|
||||
head = commit
|
||||
commits[commit.id] = commit
|
||||
branches[curBranch] = commit.id
|
||||
}
|
||||
logger.debug(branches)
|
||||
logger.debug('in mergeBranch')
|
||||
}
|
||||
|
||||
export const checkout = function (branch) {
|
||||
logger.debug('in checkout')
|
||||
curBranch = branch
|
||||
const id = branches[curBranch]
|
||||
head = commits[id]
|
||||
}
|
||||
|
||||
export const reset = function (commitRef) {
|
||||
logger.debug('in reset', commitRef)
|
||||
const ref = commitRef.split(':')[0]
|
||||
let parentCount = parseInt(commitRef.split(':')[1])
|
||||
let commit = ref === 'HEAD' ? head : commits[branches[ref]]
|
||||
logger.debug(commit, parentCount)
|
||||
while (parentCount > 0) {
|
||||
commit = commits[commit.parent]
|
||||
parentCount--
|
||||
if (!commit) {
|
||||
const err = 'Critical error - unique parent commit not found during reset'
|
||||
logger.error(err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
head = commit
|
||||
branches[curBranch] = commit.id
|
||||
}
|
||||
|
||||
function upsert (arr, key, newval) {
|
||||
const index = arr.indexOf(key)
|
||||
if (index === -1) {
|
||||
arr.push(newval)
|
||||
} else {
|
||||
arr.splice(index, 1, newval)
|
||||
}
|
||||
}
|
||||
|
||||
function prettyPrintCommitHistory (commitArr) {
|
||||
const commit = _.maxBy(commitArr, 'seq')
|
||||
let line = ''
|
||||
commitArr.forEach(function (c) {
|
||||
if (c === commit) {
|
||||
line += '\t*'
|
||||
} else {
|
||||
line += '\t|'
|
||||
}
|
||||
})
|
||||
const label = [line, commit.id, commit.seq]
|
||||
for (let branch in branches) {
|
||||
if (branches[branch] === commit.id) label.push(branch)
|
||||
}
|
||||
logger.debug(label.join(' '))
|
||||
if (Array.isArray(commit.parent)) {
|
||||
const newCommit = commits[commit.parent[0]]
|
||||
upsert(commitArr, commit, newCommit)
|
||||
commitArr.push(commits[commit.parent[1]])
|
||||
} else if (commit.parent == null) {
|
||||
return
|
||||
} else {
|
||||
const nextCommit = commits[commit.parent]
|
||||
upsert(commitArr, commit, nextCommit)
|
||||
}
|
||||
commitArr = _.uniqBy(commitArr, 'id')
|
||||
prettyPrintCommitHistory(commitArr)
|
||||
}
|
||||
|
||||
export const prettyPrint = function () {
|
||||
logger.debug(commits)
|
||||
const node = getCommitsArray()[0]
|
||||
prettyPrintCommitHistory([node])
|
||||
}
|
||||
|
||||
export const clear = function () {
|
||||
commits = {}
|
||||
head = null
|
||||
branches = { 'master': head }
|
||||
curBranch = 'master'
|
||||
seq = 0
|
||||
}
|
||||
|
||||
export const getBranchesAsObjArray = function () {
|
||||
const branchArr = []
|
||||
for (let branch in branches) {
|
||||
branchArr.push({ name: branch, commit: commits[branches[branch]] })
|
||||
}
|
||||
return branchArr
|
||||
}
|
||||
|
||||
export const getBranches = function () { return branches }
|
||||
export const getCommits = function () { return commits }
|
||||
export const getCommitsArray = function () {
|
||||
const commitArr = Object.keys(commits).map(function (key) {
|
||||
return commits[key]
|
||||
})
|
||||
commitArr.forEach(function (o) { logger.debug(o.id) })
|
||||
return _.orderBy(commitArr, ['seq'], ['desc'])
|
||||
}
|
||||
export const getCurrentBranch = function () { return curBranch }
|
||||
export const getDirection = function () { return direction }
|
||||
export const getHead = function () { return head }
|
||||
|
||||
export default {
|
||||
setDirection,
|
||||
setOptions,
|
||||
getOptions,
|
||||
commit,
|
||||
branch,
|
||||
merge,
|
||||
checkout,
|
||||
reset,
|
||||
prettyPrint,
|
||||
clear,
|
||||
getBranchesAsObjArray,
|
||||
getBranches,
|
||||
getCommits,
|
||||
getCommitsArray,
|
||||
getCurrentBranch,
|
||||
getDirection,
|
||||
getHead
|
||||
}
|
||||
@@ -1,20 +1,18 @@
|
||||
/* eslint-env jasmine */
|
||||
var parser = require('./parser/gitGraph').parser
|
||||
var ast = require('./gitGraphAst.js')
|
||||
import gitGraphAst from './gitGraphAst'
|
||||
import { parser } from './parser/gitGraph'
|
||||
|
||||
describe('when parsing a gitGraph', function () {
|
||||
'use strict'
|
||||
beforeEach(function () {
|
||||
console.log('ast', ast)
|
||||
console.log('parser', parser)
|
||||
parser.yy = ast
|
||||
parser.yy = gitGraphAst
|
||||
parser.yy.clear()
|
||||
})
|
||||
it('should handle a gitGraph defintion', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'commit\n'
|
||||
|
||||
parser.parse(str)
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('master')
|
||||
@@ -23,13 +21,13 @@ describe('when parsing a gitGraph', function () {
|
||||
})
|
||||
|
||||
it('should handle a gitGraph defintion with empty options', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'options\n' +
|
||||
'end\n' +
|
||||
'commit\n'
|
||||
|
||||
parser.parse(str)
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
|
||||
expect(parser.yy.getOptions()).toEqual({})
|
||||
expect(Object.keys(commits).length).toBe(1)
|
||||
@@ -39,14 +37,14 @@ describe('when parsing a gitGraph', function () {
|
||||
})
|
||||
|
||||
it('should handle a gitGraph defintion with valid options', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'options\n' +
|
||||
'{"key": "value"}\n' +
|
||||
'end\n' +
|
||||
'commit\n'
|
||||
|
||||
parser.parse(str)
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
expect(parser.yy.getOptions()['key']).toBe('value')
|
||||
expect(Object.keys(commits).length).toBe(1)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('master')
|
||||
@@ -55,14 +53,14 @@ describe('when parsing a gitGraph', function () {
|
||||
})
|
||||
|
||||
it('should not fail on a gitGraph with malformed json', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'options\n' +
|
||||
'{"key": "value"\n' +
|
||||
'end\n' +
|
||||
'commit\n'
|
||||
|
||||
parser.parse(str)
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
expect(Object.keys(commits).length).toBe(1)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('master')
|
||||
expect(parser.yy.getDirection()).toBe('LR')
|
||||
@@ -70,11 +68,11 @@ describe('when parsing a gitGraph', function () {
|
||||
})
|
||||
|
||||
it('should handle set direction', function () {
|
||||
var str = 'gitGraph BT:\n' +
|
||||
const str = 'gitGraph BT:\n' +
|
||||
'commit\n'
|
||||
|
||||
parser.parse(str)
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('master')
|
||||
@@ -83,48 +81,48 @@ describe('when parsing a gitGraph', function () {
|
||||
})
|
||||
|
||||
it('should checkout a branch', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'branch new\n' +
|
||||
'checkout new\n'
|
||||
|
||||
parser.parse(str)
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
|
||||
expect(Object.keys(commits).length).toBe(0)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new')
|
||||
})
|
||||
|
||||
it('should add commits to checked out branch', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'branch new\n' +
|
||||
'checkout new\n' +
|
||||
'commit\n' +
|
||||
'commit\n'
|
||||
|
||||
parser.parse(str)
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
|
||||
expect(Object.keys(commits).length).toBe(2)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('new')
|
||||
var branchCommit = parser.yy.getBranches()['new']
|
||||
const branchCommit = parser.yy.getBranches()['new']
|
||||
expect(branchCommit).not.toBeNull()
|
||||
expect(commits[branchCommit].parent).not.toBeNull()
|
||||
})
|
||||
it('should handle commit with args', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'commit "a commit"\n'
|
||||
|
||||
parser.parse(str)
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
|
||||
expect(Object.keys(commits).length).toBe(1)
|
||||
var key = Object.keys(commits)[0]
|
||||
const key = Object.keys(commits)[0]
|
||||
expect(commits[key].message).toBe('a commit')
|
||||
expect(parser.yy.getCurrentBranch()).toBe('master')
|
||||
})
|
||||
|
||||
it('it should reset a branch', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'commit\n' +
|
||||
'commit\n' +
|
||||
'branch newbranch\n' +
|
||||
@@ -134,7 +132,7 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str)
|
||||
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
expect(Object.keys(commits).length).toBe(3)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch')
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['master'])
|
||||
@@ -142,7 +140,7 @@ describe('when parsing a gitGraph', function () {
|
||||
})
|
||||
|
||||
it('reset can take an argument', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'commit\n' +
|
||||
'commit\n' +
|
||||
'branch newbranch\n' +
|
||||
@@ -152,15 +150,15 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str)
|
||||
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
expect(Object.keys(commits).length).toBe(3)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch')
|
||||
var master = commits[parser.yy.getBranches()['master']]
|
||||
const master = commits[parser.yy.getBranches()['master']]
|
||||
expect(parser.yy.getHead().id).toEqual(master.parent)
|
||||
})
|
||||
|
||||
it('it should handle fast forwardable merges', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'commit\n' +
|
||||
'branch newbranch\n' +
|
||||
'checkout newbranch\n' +
|
||||
@@ -171,7 +169,7 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str)
|
||||
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
expect(Object.keys(commits).length).toBe(3)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('master')
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['master'])
|
||||
@@ -179,7 +177,7 @@ describe('when parsing a gitGraph', function () {
|
||||
})
|
||||
|
||||
it('it should handle cases when merge is a noop', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'commit\n' +
|
||||
'branch newbranch\n' +
|
||||
'checkout newbranch\n' +
|
||||
@@ -189,7 +187,7 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str)
|
||||
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
expect(Object.keys(commits).length).toBe(3)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch')
|
||||
expect(parser.yy.getBranches()['newbranch']).not.toEqual(parser.yy.getBranches()['master'])
|
||||
@@ -197,7 +195,7 @@ describe('when parsing a gitGraph', function () {
|
||||
})
|
||||
|
||||
it('it should handle merge with 2 parents', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'commit\n' +
|
||||
'branch newbranch\n' +
|
||||
'checkout newbranch\n' +
|
||||
@@ -209,7 +207,7 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str)
|
||||
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
expect(Object.keys(commits).length).toBe(5)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('master')
|
||||
expect(parser.yy.getBranches()['newbranch']).not.toEqual(parser.yy.getBranches()['master'])
|
||||
@@ -217,7 +215,7 @@ describe('when parsing a gitGraph', function () {
|
||||
})
|
||||
|
||||
it('it should handle ff merge when history walk has two parents (merge commit)', function () {
|
||||
var str = 'gitGraph:\n' +
|
||||
const str = 'gitGraph:\n' +
|
||||
'commit\n' +
|
||||
'branch newbranch\n' +
|
||||
'checkout newbranch\n' +
|
||||
@@ -232,7 +230,7 @@ describe('when parsing a gitGraph', function () {
|
||||
|
||||
parser.parse(str)
|
||||
|
||||
var commits = parser.yy.getCommits()
|
||||
const commits = parser.yy.getCommits()
|
||||
expect(Object.keys(commits).length).toBe(6)
|
||||
expect(parser.yy.getCurrentBranch()).toBe('newbranch')
|
||||
expect(parser.yy.getBranches()['newbranch']).toEqual(parser.yy.getBranches()['master'])
|
||||
@@ -1,13 +1,14 @@
|
||||
const db = require('./gitGraphAst')
|
||||
const _ = require('lodash')
|
||||
const gitGraphParser = require('./parser/gitGraph')
|
||||
const d3 = require('../../d3')
|
||||
const Logger = require('../../logger')
|
||||
const log = Logger.Log
|
||||
import * as d3 from 'd3'
|
||||
import _ from 'lodash'
|
||||
|
||||
var allCommitsDict = {}
|
||||
var branchNum
|
||||
var config = {
|
||||
import db from './gitGraphAst'
|
||||
import gitGraphParser from './parser/gitGraph'
|
||||
import { logger } from '../../logger'
|
||||
import { interpolateToCurve } from '../../utils'
|
||||
|
||||
let allCommitsDict = {}
|
||||
let branchNum
|
||||
let config = {
|
||||
nodeSpacing: 150,
|
||||
nodeFillColor: 'yellow',
|
||||
nodeStrokeWidth: 2,
|
||||
@@ -25,8 +26,8 @@ var config = {
|
||||
y: 0
|
||||
}
|
||||
}
|
||||
var apiConfig = {}
|
||||
exports.setConf = function (c) {
|
||||
let apiConfig = {}
|
||||
export const setConf = function (c) {
|
||||
apiConfig = c
|
||||
}
|
||||
|
||||
@@ -47,21 +48,21 @@ function svgCreateDefs (svg) {
|
||||
.attr('y', config.nodeLabel.y)
|
||||
.attr('class', 'node-label')
|
||||
.attr('requiredFeatures', 'http://www.w3.org/TR/SVG11/feature#Extensibility')
|
||||
.append('xhtml:p')
|
||||
.append('p')
|
||||
.html('')
|
||||
}
|
||||
|
||||
function svgDrawLine (svg, points, colorIdx, interpolate) {
|
||||
interpolate = interpolate || 'basis'
|
||||
var color = config.branchColors[colorIdx % config.branchColors.length]
|
||||
var lineGen = d3.svg.line()
|
||||
const curve = interpolateToCurve(interpolate, d3.curveBasis)
|
||||
const color = config.branchColors[colorIdx % config.branchColors.length]
|
||||
const lineGen = d3.line()
|
||||
.x(function (d) {
|
||||
return Math.round(d.x)
|
||||
})
|
||||
.y(function (d) {
|
||||
return Math.round(d.y)
|
||||
})
|
||||
.interpolate(interpolate)
|
||||
.curve(curve)
|
||||
|
||||
svg
|
||||
.append('svg:path')
|
||||
@@ -70,12 +71,13 @@ function svgDrawLine (svg, points, colorIdx, interpolate) {
|
||||
.style('stroke-width', config.lineStrokeWidth)
|
||||
.style('fill', 'none')
|
||||
}
|
||||
|
||||
// Pass in the element and its pre-transform coords
|
||||
function getElementCoords (element, coords) {
|
||||
coords = coords || element.node().getBBox()
|
||||
var ctm = element.node().getCTM()
|
||||
var xn = ctm.e + coords.x * ctm.a
|
||||
var yn = ctm.f + coords.y * ctm.d
|
||||
const ctm = element.node().getCTM()
|
||||
const xn = ctm.e + coords.x * ctm.a
|
||||
const yn = ctm.f + coords.y * ctm.d
|
||||
return {
|
||||
left: xn,
|
||||
top: yn,
|
||||
@@ -85,17 +87,17 @@ function getElementCoords (element, coords) {
|
||||
}
|
||||
|
||||
function svgDrawLineForCommits (svg, fromId, toId, direction, color) {
|
||||
log.debug('svgDrawLineForCommits: ', fromId, toId)
|
||||
var fromBbox = getElementCoords(svg.select('#node-' + fromId + ' circle'))
|
||||
var toBbox = getElementCoords(svg.select('#node-' + toId + ' circle'))
|
||||
logger.debug('svgDrawLineForCommits: ', fromId, toId)
|
||||
const fromBbox = getElementCoords(svg.select('#node-' + fromId + ' circle'))
|
||||
const toBbox = getElementCoords(svg.select('#node-' + toId + ' circle'))
|
||||
switch (direction) {
|
||||
case 'LR':
|
||||
// (toBbox)
|
||||
// +--------
|
||||
// + (fromBbox)
|
||||
if (fromBbox.left - toBbox.left > config.nodeSpacing) {
|
||||
var lineStart = { x: fromBbox.left - config.nodeSpacing, y: toBbox.top + toBbox.height / 2 }
|
||||
var lineEnd = { x: toBbox.left + toBbox.width, y: toBbox.top + toBbox.height / 2 }
|
||||
const lineStart = { x: fromBbox.left - config.nodeSpacing, y: toBbox.top + toBbox.height / 2 }
|
||||
const lineEnd = { x: toBbox.left + toBbox.width, y: toBbox.top + toBbox.height / 2 }
|
||||
svgDrawLine(svg, [lineStart, lineEnd], color, 'linear')
|
||||
svgDrawLine(svg, [
|
||||
{ x: fromBbox.left, y: fromBbox.top + fromBbox.height / 2 },
|
||||
@@ -124,8 +126,8 @@ function svgDrawLineForCommits (svg, fromId, toId, direction, color) {
|
||||
// |
|
||||
// + (toBbox)
|
||||
if (toBbox.top - fromBbox.top > config.nodeSpacing) {
|
||||
lineStart = { x: toBbox.left + toBbox.width / 2, y: fromBbox.top + fromBbox.height + config.nodeSpacing }
|
||||
lineEnd = { x: toBbox.left + toBbox.width / 2, y: toBbox.top }
|
||||
const lineStart = { x: toBbox.left + toBbox.width / 2, y: fromBbox.top + fromBbox.height + config.nodeSpacing }
|
||||
const lineEnd = { x: toBbox.left + toBbox.width / 2, y: toBbox.top }
|
||||
svgDrawLine(svg, [lineStart, lineEnd], color, 'linear')
|
||||
svgDrawLine(svg, [
|
||||
{ x: fromBbox.left + fromBbox.width / 2, y: fromBbox.top + fromBbox.height },
|
||||
@@ -156,12 +158,12 @@ function cloneNode (svg, selector) {
|
||||
}
|
||||
|
||||
function renderCommitHistory (svg, commitid, branches, direction) {
|
||||
var commit
|
||||
var numCommits = Object.keys(allCommitsDict).length
|
||||
if (_.isString(commitid)) {
|
||||
let commit
|
||||
const numCommits = Object.keys(allCommitsDict).length
|
||||
if (typeof commitid === 'string') {
|
||||
do {
|
||||
commit = allCommitsDict[commitid]
|
||||
log.debug('in renderCommitHistory', commit.id, commit.seq)
|
||||
logger.debug('in renderCommitHistory', commit.id, commit.seq)
|
||||
if (svg.select('#node-' + commitid).size() > 0) {
|
||||
return
|
||||
}
|
||||
@@ -187,9 +189,15 @@ function renderCommitHistory (svg, commitid, branches, direction) {
|
||||
.attr('stroke', config.nodeStrokeColor)
|
||||
.attr('stroke-width', config.nodeStrokeWidth)
|
||||
|
||||
var branch = _.find(branches, ['commit', commit])
|
||||
let branch
|
||||
for (let branchName in branches) {
|
||||
if (branches[branchName].commit === commit) {
|
||||
branch = branches[branchName]
|
||||
break
|
||||
}
|
||||
}
|
||||
if (branch) {
|
||||
log.debug('found branch ', branch.name)
|
||||
logger.debug('found branch ', branch.name)
|
||||
svg.select('#node-' + commit.id + ' p')
|
||||
.append('xhtml:span')
|
||||
.attr('class', 'branch-label')
|
||||
@@ -209,8 +217,8 @@ function renderCommitHistory (svg, commitid, branches, direction) {
|
||||
} while (commitid && allCommitsDict[commitid])
|
||||
}
|
||||
|
||||
if (_.isArray(commitid)) {
|
||||
log.debug('found merge commmit', commitid)
|
||||
if (Array.isArray(commitid)) {
|
||||
logger.debug('found merge commmit', commitid)
|
||||
renderCommitHistory(svg, commitid[0], branches, direction)
|
||||
branchNum++
|
||||
renderCommitHistory(svg, commitid[1], branches, direction)
|
||||
@@ -221,11 +229,11 @@ function renderCommitHistory (svg, commitid, branches, direction) {
|
||||
function renderLines (svg, commit, direction, branchColor) {
|
||||
branchColor = branchColor || 0
|
||||
while (commit.seq > 0 && !commit.lineDrawn) {
|
||||
if (_.isString(commit.parent)) {
|
||||
if (typeof commit.parent === 'string') {
|
||||
svgDrawLineForCommits(svg, commit.id, commit.parent, direction, branchColor)
|
||||
commit.lineDrawn = true
|
||||
commit = allCommitsDict[commit.parent]
|
||||
} else if (_.isArray(commit.parent)) {
|
||||
} else if (Array.isArray(commit.parent)) {
|
||||
svgDrawLineForCommits(svg, commit.id, commit.parent[0], direction, branchColor)
|
||||
svgDrawLineForCommits(svg, commit.id, commit.parent[1], direction, branchColor + 1)
|
||||
renderLines(svg, allCommitsDict[commit.parent[1]], direction, branchColor + 1)
|
||||
@@ -235,40 +243,45 @@ function renderLines (svg, commit, direction, branchColor) {
|
||||
}
|
||||
}
|
||||
|
||||
exports.draw = function (txt, id, ver) {
|
||||
export const draw = function (txt, id, ver) {
|
||||
try {
|
||||
var parser
|
||||
parser = gitGraphParser.parser
|
||||
const parser = gitGraphParser.parser
|
||||
parser.yy = db
|
||||
|
||||
log.debug('in gitgraph renderer', txt, id, ver)
|
||||
logger.debug('in gitgraph renderer', txt, id, ver)
|
||||
// Parse the graph definition
|
||||
parser.parse(txt + '\n')
|
||||
|
||||
config = _.extend(config, apiConfig, db.getOptions())
|
||||
log.debug('effective options', config)
|
||||
var direction = db.getDirection()
|
||||
config = _.assign(config, apiConfig, db.getOptions())
|
||||
logger.debug('effective options', config)
|
||||
const direction = db.getDirection()
|
||||
allCommitsDict = db.getCommits()
|
||||
var branches = db.getBranchesAsObjArray()
|
||||
const branches = db.getBranchesAsObjArray()
|
||||
if (direction === 'BT') {
|
||||
config.nodeLabel.x = branches.length * config.branchOffset
|
||||
config.nodeLabel.width = '100%'
|
||||
config.nodeLabel.y = -1 * 2 * config.nodeRadius
|
||||
}
|
||||
var svg = d3.select('#' + id)
|
||||
const svg = d3.select(`[id="${id}"]`)
|
||||
svgCreateDefs(svg)
|
||||
branchNum = 1
|
||||
_.each(branches, function (v) {
|
||||
for (let branch in branches) {
|
||||
const v = branches[branch]
|
||||
renderCommitHistory(svg, v.commit.id, branches, direction)
|
||||
renderLines(svg, v.commit, direction)
|
||||
branchNum++
|
||||
})
|
||||
}
|
||||
svg.attr('height', function () {
|
||||
if (direction === 'BT') return Object.keys(allCommitsDict).length * config.nodeSpacing
|
||||
return (branches.length + 1) * config.branchOffset
|
||||
})
|
||||
} catch (e) {
|
||||
log.error('Error while rendering gitgraph')
|
||||
log.error(e.message)
|
||||
logger.error('Error while rendering gitgraph')
|
||||
logger.error(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
setConf,
|
||||
draw
|
||||
}
|
||||
696
src/diagrams/git/parser/gitGraph.js
Normal file
@@ -0,0 +1,696 @@
|
||||
/* parser generated by jison 0.4.18 */
|
||||
/*
|
||||
Returns a Parser object of the following structure:
|
||||
|
||||
Parser: {
|
||||
yy: {}
|
||||
}
|
||||
|
||||
Parser.prototype: {
|
||||
yy: {},
|
||||
trace: function(),
|
||||
symbols_: {associative list: name ==> number},
|
||||
terminals_: {associative list: number ==> name},
|
||||
productions_: [...],
|
||||
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
|
||||
table: [...],
|
||||
defaultActions: {...},
|
||||
parseError: function(str, hash),
|
||||
parse: function(input),
|
||||
|
||||
lexer: {
|
||||
EOF: 1,
|
||||
parseError: function(str, hash),
|
||||
setInput: function(input),
|
||||
input: function(),
|
||||
unput: function(str),
|
||||
more: function(),
|
||||
less: function(n),
|
||||
pastInput: function(),
|
||||
upcomingInput: function(),
|
||||
showPosition: function(),
|
||||
test_match: function(regex_match_array, rule_index),
|
||||
next: function(),
|
||||
lex: function(),
|
||||
begin: function(condition),
|
||||
popState: function(),
|
||||
_currentRules: function(),
|
||||
topState: function(),
|
||||
pushState: function(condition),
|
||||
|
||||
options: {
|
||||
ranges: boolean (optional: true ==> token location info will include a .range[] member)
|
||||
flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
|
||||
backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
|
||||
},
|
||||
|
||||
performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
|
||||
rules: [...],
|
||||
conditions: {associative list: name ==> set},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
token location info (@$, _$, etc.): {
|
||||
first_line: n,
|
||||
last_line: n,
|
||||
first_column: n,
|
||||
last_column: n,
|
||||
range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based)
|
||||
}
|
||||
|
||||
|
||||
the parseError function receives a 'hash' object with these members for lexer and parser errors: {
|
||||
text: (matched text)
|
||||
token: (the produced terminal token, if any)
|
||||
line: (yylineno)
|
||||
}
|
||||
while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
|
||||
loc: (yylloc)
|
||||
expected: (string describing the set of expected tokens)
|
||||
recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
|
||||
}
|
||||
*/
|
||||
var parser = (function(){
|
||||
var o=function(k,v,o,l){for(o=o||{},l=k.length;l--;o[k[l]]=v);return o},$V0=[2,3],$V1=[1,7],$V2=[7,12,15,17,19,20,21],$V3=[7,11,12,15,17,19,20,21],$V4=[2,20],$V5=[1,32];
|
||||
var parser = {trace: function trace () { },
|
||||
yy: {},
|
||||
symbols_: {"error":2,"start":3,"GG":4,":":5,"document":6,"EOF":7,"DIR":8,"options":9,"body":10,"OPT":11,"NL":12,"line":13,"statement":14,"COMMIT":15,"commit_arg":16,"BRANCH":17,"ID":18,"CHECKOUT":19,"MERGE":20,"RESET":21,"reset_arg":22,"STR":23,"HEAD":24,"reset_parents":25,"CARET":26,"$accept":0,"$end":1},
|
||||
terminals_: {2:"error",4:"GG",5:":",7:"EOF",8:"DIR",11:"OPT",12:"NL",15:"COMMIT",17:"BRANCH",18:"ID",19:"CHECKOUT",20:"MERGE",21:"RESET",23:"STR",24:"HEAD",26:"CARET"},
|
||||
productions_: [0,[3,4],[3,5],[6,0],[6,2],[9,2],[9,1],[10,0],[10,2],[13,2],[13,1],[14,2],[14,2],[14,2],[14,2],[14,2],[16,0],[16,1],[22,2],[22,2],[25,0],[25,2]],
|
||||
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
|
||||
/* this == yyval */
|
||||
|
||||
var $0 = $$.length - 1;
|
||||
switch (yystate) {
|
||||
case 1:
|
||||
return $$[$0-1];
|
||||
break;
|
||||
case 2:
|
||||
yy.setDirection($$[$0-3]); return $$[$0-1];
|
||||
break;
|
||||
case 4:
|
||||
yy.setOptions($$[$0-1]); this.$ = $$[$0]
|
||||
break;
|
||||
case 5:
|
||||
$$[$0-1] +=$$[$0]; this.$=$$[$0-1]
|
||||
break;
|
||||
case 7:
|
||||
this.$ = []
|
||||
break;
|
||||
case 8:
|
||||
$$[$0-1].push($$[$0]); this.$=$$[$0-1];
|
||||
break;
|
||||
case 9:
|
||||
this.$ =$$[$0-1]
|
||||
break;
|
||||
case 11:
|
||||
yy.commit($$[$0])
|
||||
break;
|
||||
case 12:
|
||||
yy.branch($$[$0])
|
||||
break;
|
||||
case 13:
|
||||
yy.checkout($$[$0])
|
||||
break;
|
||||
case 14:
|
||||
yy.merge($$[$0])
|
||||
break;
|
||||
case 15:
|
||||
yy.reset($$[$0])
|
||||
break;
|
||||
case 16:
|
||||
this.$ = ""
|
||||
break;
|
||||
case 17:
|
||||
this.$=$$[$0]
|
||||
break;
|
||||
case 18:
|
||||
this.$ = $$[$0-1]+ ":" + $$[$0]
|
||||
break;
|
||||
case 19:
|
||||
this.$ = $$[$0-1]+ ":" + yy.count; yy.count = 0
|
||||
break;
|
||||
case 20:
|
||||
yy.count = 0
|
||||
break;
|
||||
case 21:
|
||||
yy.count += 1
|
||||
break;
|
||||
}
|
||||
},
|
||||
table: [{3:1,4:[1,2]},{1:[3]},{5:[1,3],8:[1,4]},{6:5,7:$V0,9:6,12:$V1},{5:[1,8]},{7:[1,9]},o($V2,[2,7],{10:10,11:[1,11]}),o($V3,[2,6]),{6:12,7:$V0,9:6,12:$V1},{1:[2,1]},{7:[2,4],12:[1,15],13:13,14:14,15:[1,16],17:[1,17],19:[1,18],20:[1,19],21:[1,20]},o($V3,[2,5]),{7:[1,21]},o($V2,[2,8]),{12:[1,22]},o($V2,[2,10]),{12:[2,16],16:23,23:[1,24]},{18:[1,25]},{18:[1,26]},{18:[1,27]},{18:[1,30],22:28,24:[1,29]},{1:[2,2]},o($V2,[2,9]),{12:[2,11]},{12:[2,17]},{12:[2,12]},{12:[2,13]},{12:[2,14]},{12:[2,15]},{12:$V4,25:31,26:$V5},{12:$V4,25:33,26:$V5},{12:[2,18]},{12:$V4,25:34,26:$V5},{12:[2,19]},{12:[2,21]}],
|
||||
defaultActions: {9:[2,1],21:[2,2],23:[2,11],24:[2,17],25:[2,12],26:[2,13],27:[2,14],28:[2,15],31:[2,18],33:[2,19],34:[2,21]},
|
||||
parseError: function parseError (str, hash) {
|
||||
if (hash.recoverable) {
|
||||
this.trace(str);
|
||||
} else {
|
||||
var error = new Error(str);
|
||||
error.hash = hash;
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
parse: function parse(input) {
|
||||
var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1;
|
||||
var args = lstack.slice.call(arguments, 1);
|
||||
var lexer = Object.create(this.lexer);
|
||||
var sharedState = { yy: {} };
|
||||
for (var k in this.yy) {
|
||||
if (Object.prototype.hasOwnProperty.call(this.yy, k)) {
|
||||
sharedState.yy[k] = this.yy[k];
|
||||
}
|
||||
}
|
||||
lexer.setInput(input, sharedState.yy);
|
||||
sharedState.yy.lexer = lexer;
|
||||
sharedState.yy.parser = this;
|
||||
if (typeof lexer.yylloc == 'undefined') {
|
||||
lexer.yylloc = {};
|
||||
}
|
||||
var yyloc = lexer.yylloc;
|
||||
lstack.push(yyloc);
|
||||
var ranges = lexer.options && lexer.options.ranges;
|
||||
if (typeof sharedState.yy.parseError === 'function') {
|
||||
this.parseError = sharedState.yy.parseError;
|
||||
} else {
|
||||
this.parseError = Object.getPrototypeOf(this).parseError;
|
||||
}
|
||||
function popStack(n) {
|
||||
stack.length = stack.length - 2 * n;
|
||||
vstack.length = vstack.length - n;
|
||||
lstack.length = lstack.length - n;
|
||||
}
|
||||
function lex() {
|
||||
var token;
|
||||
token = tstack.pop() || lexer.lex() || EOF;
|
||||
if (typeof token !== 'number') {
|
||||
if (token instanceof Array) {
|
||||
tstack = token;
|
||||
token = tstack.pop();
|
||||
}
|
||||
token = self.symbols_[token] || token;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected;
|
||||
while (true) {
|
||||
state = stack[stack.length - 1];
|
||||
if (this.defaultActions[state]) {
|
||||
action = this.defaultActions[state];
|
||||
} else {
|
||||
if (symbol === null || typeof symbol == 'undefined') {
|
||||
symbol = lex();
|
||||
}
|
||||
action = table[state] && table[state][symbol];
|
||||
}
|
||||
if (typeof action === 'undefined' || !action.length || !action[0]) {
|
||||
var errStr = '';
|
||||
expected = [];
|
||||
for (p in table[state]) {
|
||||
if (this.terminals_[p] && p > TERROR) {
|
||||
expected.push('\'' + this.terminals_[p] + '\'');
|
||||
}
|
||||
}
|
||||
if (lexer.showPosition) {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\'';
|
||||
} else {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'');
|
||||
}
|
||||
this.parseError(errStr, {
|
||||
text: lexer.match,
|
||||
token: this.terminals_[symbol] || symbol,
|
||||
line: lexer.yylineno,
|
||||
loc: yyloc,
|
||||
expected: expected
|
||||
});
|
||||
}
|
||||
if (action[0] instanceof Array && action.length > 1) {
|
||||
throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol);
|
||||
}
|
||||
switch (action[0]) {
|
||||
case 1:
|
||||
stack.push(symbol);
|
||||
vstack.push(lexer.yytext);
|
||||
lstack.push(lexer.yylloc);
|
||||
stack.push(action[1]);
|
||||
symbol = null;
|
||||
if (!preErrorSymbol) {
|
||||
yyleng = lexer.yyleng;
|
||||
yytext = lexer.yytext;
|
||||
yylineno = lexer.yylineno;
|
||||
yyloc = lexer.yylloc;
|
||||
if (recovering > 0) {
|
||||
recovering--;
|
||||
}
|
||||
} else {
|
||||
symbol = preErrorSymbol;
|
||||
preErrorSymbol = null;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
len = this.productions_[action[1]][1];
|
||||
yyval.$ = vstack[vstack.length - len];
|
||||
yyval._$ = {
|
||||
first_line: lstack[lstack.length - (len || 1)].first_line,
|
||||
last_line: lstack[lstack.length - 1].last_line,
|
||||
first_column: lstack[lstack.length - (len || 1)].first_column,
|
||||
last_column: lstack[lstack.length - 1].last_column
|
||||
};
|
||||
if (ranges) {
|
||||
yyval._$.range = [
|
||||
lstack[lstack.length - (len || 1)].range[0],
|
||||
lstack[lstack.length - 1].range[1]
|
||||
];
|
||||
}
|
||||
r = this.performAction.apply(yyval, [
|
||||
yytext,
|
||||
yyleng,
|
||||
yylineno,
|
||||
sharedState.yy,
|
||||
action[1],
|
||||
vstack,
|
||||
lstack
|
||||
].concat(args));
|
||||
if (typeof r !== 'undefined') {
|
||||
return r;
|
||||
}
|
||||
if (len) {
|
||||
stack = stack.slice(0, -1 * len * 2);
|
||||
vstack = vstack.slice(0, -1 * len);
|
||||
lstack = lstack.slice(0, -1 * len);
|
||||
}
|
||||
stack.push(this.productions_[action[1]][0]);
|
||||
vstack.push(yyval.$);
|
||||
lstack.push(yyval._$);
|
||||
newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
|
||||
stack.push(newState);
|
||||
break;
|
||||
case 3:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}};
|
||||
/* generated by jison-lex 0.3.4 */
|
||||
var lexer = (function(){
|
||||
var lexer = ({
|
||||
|
||||
EOF:1,
|
||||
|
||||
parseError:function parseError(str, hash) {
|
||||
if (this.yy.parser) {
|
||||
this.yy.parser.parseError(str, hash);
|
||||
} else {
|
||||
throw new Error(str);
|
||||
}
|
||||
},
|
||||
|
||||
// resets the lexer, sets new input
|
||||
setInput:function (input, yy) {
|
||||
this.yy = yy || this.yy || {};
|
||||
this._input = input;
|
||||
this._more = this._backtrack = this.done = false;
|
||||
this.yylineno = this.yyleng = 0;
|
||||
this.yytext = this.matched = this.match = '';
|
||||
this.conditionStack = ['INITIAL'];
|
||||
this.yylloc = {
|
||||
first_line: 1,
|
||||
first_column: 0,
|
||||
last_line: 1,
|
||||
last_column: 0
|
||||
};
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [0,0];
|
||||
}
|
||||
this.offset = 0;
|
||||
return this;
|
||||
},
|
||||
|
||||
// consumes and returns one char from the input
|
||||
input:function () {
|
||||
var ch = this._input[0];
|
||||
this.yytext += ch;
|
||||
this.yyleng++;
|
||||
this.offset++;
|
||||
this.match += ch;
|
||||
this.matched += ch;
|
||||
var lines = ch.match(/(?:\r\n?|\n).*/g);
|
||||
if (lines) {
|
||||
this.yylineno++;
|
||||
this.yylloc.last_line++;
|
||||
} else {
|
||||
this.yylloc.last_column++;
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range[1]++;
|
||||
}
|
||||
|
||||
this._input = this._input.slice(1);
|
||||
return ch;
|
||||
},
|
||||
|
||||
// unshifts one char (or a string) into the input
|
||||
unput:function (ch) {
|
||||
var len = ch.length;
|
||||
var lines = ch.split(/(?:\r\n?|\n)/g);
|
||||
|
||||
this._input = ch + this._input;
|
||||
this.yytext = this.yytext.substr(0, this.yytext.length - len);
|
||||
//this.yyleng -= len;
|
||||
this.offset -= len;
|
||||
var oldLines = this.match.split(/(?:\r\n?|\n)/g);
|
||||
this.match = this.match.substr(0, this.match.length - 1);
|
||||
this.matched = this.matched.substr(0, this.matched.length - 1);
|
||||
|
||||
if (lines.length - 1) {
|
||||
this.yylineno -= lines.length - 1;
|
||||
}
|
||||
var r = this.yylloc.range;
|
||||
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: lines ?
|
||||
(lines.length === oldLines.length ? this.yylloc.first_column : 0)
|
||||
+ oldLines[oldLines.length - lines.length].length - lines[0].length :
|
||||
this.yylloc.first_column - len
|
||||
};
|
||||
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [r[0], r[0] + this.yyleng - len];
|
||||
}
|
||||
this.yyleng = this.yytext.length;
|
||||
return this;
|
||||
},
|
||||
|
||||
// When called from action, caches matched text and appends it on next action
|
||||
more:function () {
|
||||
this._more = true;
|
||||
return this;
|
||||
},
|
||||
|
||||
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
|
||||
reject:function () {
|
||||
if (this.options.backtrack_lexer) {
|
||||
this._backtrack = true;
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
|
||||
text: "",
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
});
|
||||
|
||||
}
|
||||
return this;
|
||||
},
|
||||
|
||||
// retain first n characters of the match
|
||||
less:function (n) {
|
||||
this.unput(this.match.slice(n));
|
||||
},
|
||||
|
||||
// displays already matched input, i.e. for error messages
|
||||
pastInput:function () {
|
||||
var past = this.matched.substr(0, this.matched.length - this.match.length);
|
||||
return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, "");
|
||||
},
|
||||
|
||||
// displays upcoming input, i.e. for error messages
|
||||
upcomingInput:function () {
|
||||
var next = this.match;
|
||||
if (next.length < 20) {
|
||||
next += this._input.substr(0, 20-next.length);
|
||||
}
|
||||
return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, "");
|
||||
},
|
||||
|
||||
// displays the character position where the lexing error occurred, i.e. for error messages
|
||||
showPosition:function () {
|
||||
var pre = this.pastInput();
|
||||
var c = new Array(pre.length + 1).join("-");
|
||||
return pre + this.upcomingInput() + "\n" + c + "^";
|
||||
},
|
||||
|
||||
// test the lexed token: return FALSE when not a match, otherwise return token
|
||||
test_match:function(match, indexed_rule) {
|
||||
var token,
|
||||
lines,
|
||||
backup;
|
||||
|
||||
if (this.options.backtrack_lexer) {
|
||||
// save context
|
||||
backup = {
|
||||
yylineno: this.yylineno,
|
||||
yylloc: {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.last_line,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: this.yylloc.last_column
|
||||
},
|
||||
yytext: this.yytext,
|
||||
match: this.match,
|
||||
matches: this.matches,
|
||||
matched: this.matched,
|
||||
yyleng: this.yyleng,
|
||||
offset: this.offset,
|
||||
_more: this._more,
|
||||
_input: this._input,
|
||||
yy: this.yy,
|
||||
conditionStack: this.conditionStack.slice(0),
|
||||
done: this.done
|
||||
};
|
||||
if (this.options.ranges) {
|
||||
backup.yylloc.range = this.yylloc.range.slice(0);
|
||||
}
|
||||
}
|
||||
|
||||
lines = match[0].match(/(?:\r\n?|\n).*/g);
|
||||
if (lines) {
|
||||
this.yylineno += lines.length;
|
||||
}
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.last_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.last_column,
|
||||
last_column: lines ?
|
||||
lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length :
|
||||
this.yylloc.last_column + match[0].length
|
||||
};
|
||||
this.yytext += match[0];
|
||||
this.match += match[0];
|
||||
this.matches = match;
|
||||
this.yyleng = this.yytext.length;
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [this.offset, this.offset += this.yyleng];
|
||||
}
|
||||
this._more = false;
|
||||
this._backtrack = false;
|
||||
this._input = this._input.slice(match[0].length);
|
||||
this.matched += match[0];
|
||||
token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]);
|
||||
if (this.done && this._input) {
|
||||
this.done = false;
|
||||
}
|
||||
if (token) {
|
||||
return token;
|
||||
} else if (this._backtrack) {
|
||||
// recover context
|
||||
for (var k in backup) {
|
||||
this[k] = backup[k];
|
||||
}
|
||||
return false; // rule action called reject() implying the next rule should be tested instead.
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
// return next match in input
|
||||
next:function () {
|
||||
if (this.done) {
|
||||
return this.EOF;
|
||||
}
|
||||
if (!this._input) {
|
||||
this.done = true;
|
||||
}
|
||||
|
||||
var token,
|
||||
match,
|
||||
tempMatch,
|
||||
index;
|
||||
if (!this._more) {
|
||||
this.yytext = '';
|
||||
this.match = '';
|
||||
}
|
||||
var rules = this._currentRules();
|
||||
for (var i = 0; i < rules.length; i++) {
|
||||
tempMatch = this._input.match(this.rules[rules[i]]);
|
||||
if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
|
||||
match = tempMatch;
|
||||
index = i;
|
||||
if (this.options.backtrack_lexer) {
|
||||
token = this.test_match(tempMatch, rules[i]);
|
||||
if (token !== false) {
|
||||
return token;
|
||||
} else if (this._backtrack) {
|
||||
match = false;
|
||||
continue; // rule action called reject() implying a rule MISmatch.
|
||||
} else {
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false;
|
||||
}
|
||||
} else if (!this.options.flex) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (match) {
|
||||
token = this.test_match(match, rules[index]);
|
||||
if (token !== false) {
|
||||
return token;
|
||||
}
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false;
|
||||
}
|
||||
if (this._input === "") {
|
||||
return this.EOF;
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
|
||||
text: "",
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
// return next match that has a token
|
||||
lex:function lex () {
|
||||
var r = this.next();
|
||||
if (r) {
|
||||
return r;
|
||||
} else {
|
||||
return this.lex();
|
||||
}
|
||||
},
|
||||
|
||||
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
|
||||
begin:function begin (condition) {
|
||||
this.conditionStack.push(condition);
|
||||
},
|
||||
|
||||
// pop the previously active lexer condition state off the condition stack
|
||||
popState:function popState () {
|
||||
var n = this.conditionStack.length - 1;
|
||||
if (n > 0) {
|
||||
return this.conditionStack.pop();
|
||||
} else {
|
||||
return this.conditionStack[0];
|
||||
}
|
||||
},
|
||||
|
||||
// produce the lexer rule set which is active for the currently active lexer condition state
|
||||
_currentRules:function _currentRules () {
|
||||
if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
|
||||
return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules;
|
||||
} else {
|
||||
return this.conditions["INITIAL"].rules;
|
||||
}
|
||||
},
|
||||
|
||||
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
|
||||
topState:function topState (n) {
|
||||
n = this.conditionStack.length - 1 - Math.abs(n || 0);
|
||||
if (n >= 0) {
|
||||
return this.conditionStack[n];
|
||||
} else {
|
||||
return "INITIAL";
|
||||
}
|
||||
},
|
||||
|
||||
// alias for begin(condition)
|
||||
pushState:function pushState (condition) {
|
||||
this.begin(condition);
|
||||
},
|
||||
|
||||
// return the number of states currently on the stack
|
||||
stateStackSize:function stateStackSize() {
|
||||
return this.conditionStack.length;
|
||||
},
|
||||
options: {"case-insensitive":true},
|
||||
performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) {
|
||||
var YYSTATE=YY_START;
|
||||
switch($avoiding_name_collisions) {
|
||||
case 0:return 12;
|
||||
break;
|
||||
case 1:/* skip all whitespace */
|
||||
break;
|
||||
case 2:/* skip comments */
|
||||
break;
|
||||
case 3:/* skip comments */
|
||||
break;
|
||||
case 4:return 4;
|
||||
break;
|
||||
case 5:return 15;
|
||||
break;
|
||||
case 6:return 17;
|
||||
break;
|
||||
case 7:return 20;
|
||||
break;
|
||||
case 8:return 21;
|
||||
break;
|
||||
case 9:return 19;
|
||||
break;
|
||||
case 10:return 8;
|
||||
break;
|
||||
case 11:return 8;
|
||||
break;
|
||||
case 12:return 5;
|
||||
break;
|
||||
case 13:return 26
|
||||
break;
|
||||
case 14:this.begin("options");
|
||||
break;
|
||||
case 15:this.popState();
|
||||
break;
|
||||
case 16:return 11;
|
||||
break;
|
||||
case 17:this.begin("string");
|
||||
break;
|
||||
case 18:this.popState();
|
||||
break;
|
||||
case 19:return 23;
|
||||
break;
|
||||
case 20:return 18;
|
||||
break;
|
||||
case 21:return 7;
|
||||
break;
|
||||
}
|
||||
},
|
||||
rules: [/^(?:(\r?\n)+)/i,/^(?:\s+)/i,/^(?:#[^\n]*)/i,/^(?:%[^\n]*)/i,/^(?:gitGraph\b)/i,/^(?:commit\b)/i,/^(?:branch\b)/i,/^(?:merge\b)/i,/^(?:reset\b)/i,/^(?:checkout\b)/i,/^(?:LR\b)/i,/^(?:BT\b)/i,/^(?::)/i,/^(?:\^)/i,/^(?:options\r?\n)/i,/^(?:end\r?\n)/i,/^(?:[^\n]+\r?\n)/i,/^(?:["])/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:[a-zA-Z][a-zA-Z0-9_]+)/i,/^(?:$)/i],
|
||||
conditions: {"options":{"rules":[15,16],"inclusive":false},"string":{"rules":[18,19],"inclusive":false},"INITIAL":{"rules":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,17,20,21],"inclusive":true}}
|
||||
});
|
||||
return lexer;
|
||||
})();
|
||||
parser.lexer = lexer;
|
||||
function Parser () {
|
||||
this.yy = {};
|
||||
}
|
||||
Parser.prototype = parser;parser.Parser = Parser;
|
||||
return new Parser;
|
||||
})();
|
||||
|
||||
|
||||
if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
|
||||
exports.parser = parser;
|
||||
exports.Parser = parser.Parser;
|
||||
exports.parse = function () { return parser.parse.apply(parser, arguments); };
|
||||
exports.main = function commonjsMain (args) {
|
||||
if (!args[1]) {
|
||||
console.log('Usage: '+args[0]+' FILE');
|
||||
process.exit(1);
|
||||
}
|
||||
var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8");
|
||||
return exports.parser.parse(source);
|
||||
};
|
||||
if (typeof module !== 'undefined' && require.main === module) {
|
||||
exports.main(process.argv.slice(1));
|
||||
}
|
||||
}
|
||||
@@ -1,208 +0,0 @@
|
||||
const Logger = require('../../logger')
|
||||
const log = Logger.Log
|
||||
const _ = require('lodash')
|
||||
|
||||
var commits = {}
|
||||
var head = null
|
||||
var branches = { 'master': head }
|
||||
var curBranch = 'master'
|
||||
var direction = 'LR'
|
||||
var seq = 0
|
||||
|
||||
function getRandomInt (min, max) {
|
||||
return Math.floor(Math.random() * (max - min)) + min
|
||||
}
|
||||
|
||||
function getId () {
|
||||
var pool = '0123456789abcdef'
|
||||
var id = ''
|
||||
for (var i = 0; i < 7; i++) {
|
||||
id += pool[getRandomInt(0, 16)]
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
function isfastforwardable (currentCommit, otherCommit) {
|
||||
log.debug('Entering isfastforwardable:', currentCommit.id, otherCommit.id)
|
||||
while (currentCommit.seq <= otherCommit.seq && currentCommit !== otherCommit) {
|
||||
// only if other branch has more commits
|
||||
if (otherCommit.parent == null) break
|
||||
if (Array.isArray(otherCommit.parent)) {
|
||||
log.debug('In merge commit:', otherCommit.parent)
|
||||
return isfastforwardable(currentCommit, commits[otherCommit.parent[0]]) ||
|
||||
isfastforwardable(currentCommit, commits[otherCommit.parent[1]])
|
||||
} else {
|
||||
otherCommit = commits[otherCommit.parent]
|
||||
}
|
||||
}
|
||||
log.debug(currentCommit.id, otherCommit.id)
|
||||
return currentCommit.id === otherCommit.id
|
||||
}
|
||||
|
||||
function isReachableFrom (currentCommit, otherCommit) {
|
||||
var currentSeq = currentCommit.seq
|
||||
var otherSeq = otherCommit.seq
|
||||
if (currentSeq > otherSeq) return isfastforwardable(otherCommit, currentCommit)
|
||||
return false
|
||||
}
|
||||
|
||||
exports.setDirection = function (dir) {
|
||||
direction = dir
|
||||
}
|
||||
var options = {}
|
||||
exports.setOptions = function (rawOptString) {
|
||||
log.debug('options str', rawOptString)
|
||||
rawOptString = rawOptString && rawOptString.trim()
|
||||
rawOptString = rawOptString || '{}'
|
||||
try {
|
||||
options = JSON.parse(rawOptString)
|
||||
} catch (e) {
|
||||
log.error('error while parsing gitGraph options', e.message)
|
||||
}
|
||||
}
|
||||
|
||||
exports.getOptions = function () {
|
||||
return options
|
||||
}
|
||||
|
||||
exports.commit = function (msg) {
|
||||
var commit = {
|
||||
id: getId(),
|
||||
message: msg,
|
||||
seq: seq++,
|
||||
parent: head == null ? null : head.id
|
||||
}
|
||||
head = commit
|
||||
commits[commit.id] = commit
|
||||
branches[curBranch] = commit.id
|
||||
log.debug('in pushCommit ' + commit.id)
|
||||
}
|
||||
|
||||
exports.branch = function (name) {
|
||||
branches[name] = head != null ? head.id : null
|
||||
log.debug('in createBranch')
|
||||
}
|
||||
|
||||
exports.merge = function (otherBranch) {
|
||||
var currentCommit = commits[branches[curBranch]]
|
||||
var otherCommit = commits[branches[otherBranch]]
|
||||
if (isReachableFrom(currentCommit, otherCommit)) {
|
||||
log.debug('Already merged')
|
||||
return
|
||||
}
|
||||
if (isfastforwardable(currentCommit, otherCommit)) {
|
||||
branches[curBranch] = branches[otherBranch]
|
||||
head = commits[branches[curBranch]]
|
||||
} else {
|
||||
// create merge commit
|
||||
var commit = {
|
||||
id: getId(),
|
||||
message: 'merged branch ' + otherBranch + ' into ' + curBranch,
|
||||
seq: seq++,
|
||||
parent: [head == null ? null : head.id, branches[otherBranch]]
|
||||
}
|
||||
head = commit
|
||||
commits[commit.id] = commit
|
||||
branches[curBranch] = commit.id
|
||||
}
|
||||
log.debug(branches)
|
||||
log.debug('in mergeBranch')
|
||||
}
|
||||
|
||||
exports.checkout = function (branch) {
|
||||
log.debug('in checkout')
|
||||
curBranch = branch
|
||||
var id = branches[curBranch]
|
||||
head = commits[id]
|
||||
}
|
||||
|
||||
exports.reset = function (commitRef) {
|
||||
log.debug('in reset', commitRef)
|
||||
var ref = commitRef.split(':')[0]
|
||||
var parentCount = parseInt(commitRef.split(':')[1])
|
||||
var commit = ref === 'HEAD' ? head : commits[branches[ref]]
|
||||
log.debug(commit, parentCount)
|
||||
while (parentCount > 0) {
|
||||
commit = commits[commit.parent]
|
||||
parentCount--
|
||||
if (!commit) {
|
||||
var err = 'Critical error - unique parent commit not found during reset'
|
||||
log.error(err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
head = commit
|
||||
branches[curBranch] = commit.id
|
||||
}
|
||||
|
||||
function upsert (arr, key, newval) {
|
||||
const index = arr.indexOf(key)
|
||||
if (index === -1) {
|
||||
arr.push(newval)
|
||||
} else {
|
||||
arr.splice(index, 1, newval)
|
||||
}
|
||||
}
|
||||
|
||||
function prettyPrintCommitHistory (commitArr) {
|
||||
var commit = _.maxBy(commitArr, 'seq')
|
||||
var line = ''
|
||||
commitArr.forEach(function (c) {
|
||||
if (c === commit) {
|
||||
line += '\t*'
|
||||
} else {
|
||||
line += '\t|'
|
||||
}
|
||||
})
|
||||
var label = [line, commit.id, commit.seq]
|
||||
_.each(branches, function (value, key) {
|
||||
if (value === commit.id) label.push(key)
|
||||
})
|
||||
log.debug(label.join(' '))
|
||||
if (Array.isArray(commit.parent)) {
|
||||
var newCommit = commits[commit.parent[0]]
|
||||
upsert(commitArr, commit, newCommit)
|
||||
commitArr.push(commits[commit.parent[1]])
|
||||
} else if (commit.parent == null) {
|
||||
return
|
||||
} else {
|
||||
var nextCommit = commits[commit.parent]
|
||||
upsert(commitArr, commit, nextCommit)
|
||||
}
|
||||
commitArr = _.uniqBy(commitArr, 'id')
|
||||
prettyPrintCommitHistory(commitArr)
|
||||
}
|
||||
|
||||
exports.prettyPrint = function () {
|
||||
log.debug(commits)
|
||||
var node = exports.getCommitsArray()[0]
|
||||
prettyPrintCommitHistory([node])
|
||||
}
|
||||
|
||||
exports.clear = function () {
|
||||
commits = {}
|
||||
head = null
|
||||
branches = { 'master': head }
|
||||
curBranch = 'master'
|
||||
seq = 0
|
||||
}
|
||||
|
||||
exports.getBranchesAsObjArray = function () {
|
||||
const branchArr = _.map(branches, function (value, key) {
|
||||
return { 'name': key, 'commit': commits[value] }
|
||||
})
|
||||
return branchArr
|
||||
}
|
||||
|
||||
exports.getBranches = function () { return branches }
|
||||
exports.getCommits = function () { return commits }
|
||||
exports.getCommitsArray = function () {
|
||||
var commitArr = Object.keys(commits).map(function (key) {
|
||||
return commits[key]
|
||||
})
|
||||
commitArr.forEach(function (o) { log.debug(o.id) })
|
||||
return _.orderBy(commitArr, ['seq'], ['desc'])
|
||||
}
|
||||
exports.getCurrentBranch = function () { return curBranch }
|
||||
exports.getDirection = function () { return direction }
|
||||
exports.getHead = function () { return head }
|
||||
@@ -1,692 +0,0 @@
|
||||
/* parser generated by jison 0.4.17 */
|
||||
/*
|
||||
Returns a Parser object of the following structure:
|
||||
|
||||
Parser: {
|
||||
yy: {}
|
||||
}
|
||||
|
||||
Parser.prototype: {
|
||||
yy: {},
|
||||
trace: function(),
|
||||
symbols_: {associative list: name ==> number},
|
||||
terminals_: {associative list: number ==> name},
|
||||
productions_: [...],
|
||||
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
|
||||
table: [...],
|
||||
defaultActions: {...},
|
||||
parseError: function(str, hash),
|
||||
parse: function(input),
|
||||
|
||||
lexer: {
|
||||
EOF: 1,
|
||||
parseError: function(str, hash),
|
||||
setInput: function(input),
|
||||
input: function(),
|
||||
unput: function(str),
|
||||
more: function(),
|
||||
less: function(n),
|
||||
pastInput: function(),
|
||||
upcomingInput: function(),
|
||||
showPosition: function(),
|
||||
test_match: function(regex_match_array, rule_index),
|
||||
next: function(),
|
||||
lex: function(),
|
||||
begin: function(condition),
|
||||
popState: function(),
|
||||
_currentRules: function(),
|
||||
topState: function(),
|
||||
pushState: function(condition),
|
||||
|
||||
options: {
|
||||
ranges: boolean (optional: true ==> token location info will include a .range[] member)
|
||||
flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
|
||||
backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
|
||||
},
|
||||
|
||||
performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
|
||||
rules: [...],
|
||||
conditions: {associative list: name ==> set},
|
||||
}
|
||||
}
|
||||
|
||||
token location info (@$, _$, etc.): {
|
||||
first_line: n,
|
||||
last_line: n,
|
||||
first_column: n,
|
||||
last_column: n,
|
||||
range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based)
|
||||
}
|
||||
|
||||
the parseError function receives a 'hash' object with these members for lexer and parser errors: {
|
||||
text: (matched text)
|
||||
token: (the produced terminal token, if any)
|
||||
line: (yylineno)
|
||||
}
|
||||
while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
|
||||
loc: (yylloc)
|
||||
expected: (string describing the set of expected tokens)
|
||||
recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
|
||||
}
|
||||
*/
|
||||
var parser = (function () {
|
||||
var o = function (k, v, o, l) { for (o = o || {}, l = k.length; l--; o[k[l]] = v);return o }, $V0 = [2, 3], $V1 = [1, 7], $V2 = [7, 12, 15, 17, 19, 20, 21], $V3 = [7, 11, 12, 15, 17, 19, 20, 21], $V4 = [2, 20], $V5 = [1, 32]
|
||||
var parser = {trace: function trace () { },
|
||||
yy: {},
|
||||
symbols_: {'error': 2, 'start': 3, 'GG': 4, ':': 5, 'document': 6, 'EOF': 7, 'DIR': 8, 'options': 9, 'body': 10, 'OPT': 11, 'NL': 12, 'line': 13, 'statement': 14, 'COMMIT': 15, 'commit_arg': 16, 'BRANCH': 17, 'ID': 18, 'CHECKOUT': 19, 'MERGE': 20, 'RESET': 21, 'reset_arg': 22, 'STR': 23, 'HEAD': 24, 'reset_parents': 25, 'CARET': 26, '$accept': 0, '$end': 1},
|
||||
terminals_: {2: 'error', 4: 'GG', 5: ':', 7: 'EOF', 8: 'DIR', 11: 'OPT', 12: 'NL', 15: 'COMMIT', 17: 'BRANCH', 18: 'ID', 19: 'CHECKOUT', 20: 'MERGE', 21: 'RESET', 23: 'STR', 24: 'HEAD', 26: 'CARET'},
|
||||
productions_: [0, [3, 4], [3, 5], [6, 0], [6, 2], [9, 2], [9, 1], [10, 0], [10, 2], [13, 2], [13, 1], [14, 2], [14, 2], [14, 2], [14, 2], [14, 2], [16, 0], [16, 1], [22, 2], [22, 2], [25, 0], [25, 2]],
|
||||
performAction: function anonymous (yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
|
||||
/* this == yyval */
|
||||
|
||||
var $0 = $$.length - 1
|
||||
switch (yystate) {
|
||||
case 1:
|
||||
return $$[$0 - 1]
|
||||
break
|
||||
case 2:
|
||||
yy.setDirection($$[$0 - 3]); return $$[$0 - 1]
|
||||
break
|
||||
case 4:
|
||||
yy.setOptions($$[$0 - 1]); this.$ = $$[$0]
|
||||
break
|
||||
case 5:
|
||||
$$[$0 - 1] += $$[$0]; this.$ = $$[$0 - 1]
|
||||
break
|
||||
case 7:
|
||||
this.$ = []
|
||||
break
|
||||
case 8:
|
||||
$$[$0 - 1].push($$[$0]); this.$ = $$[$0 - 1]
|
||||
break
|
||||
case 9:
|
||||
this.$ = $$[$0 - 1]
|
||||
break
|
||||
case 11:
|
||||
yy.commit($$[$0])
|
||||
break
|
||||
case 12:
|
||||
yy.branch($$[$0])
|
||||
break
|
||||
case 13:
|
||||
yy.checkout($$[$0])
|
||||
break
|
||||
case 14:
|
||||
yy.merge($$[$0])
|
||||
break
|
||||
case 15:
|
||||
yy.reset($$[$0])
|
||||
break
|
||||
case 16:
|
||||
this.$ = ''
|
||||
break
|
||||
case 17:
|
||||
this.$ = $$[$0]
|
||||
break
|
||||
case 18:
|
||||
this.$ = $$[$0 - 1] + ':' + $$[$0]
|
||||
break
|
||||
case 19:
|
||||
this.$ = $$[$0 - 1] + ':' + yy.count; yy.count = 0
|
||||
break
|
||||
case 20:
|
||||
yy.count = 0
|
||||
break
|
||||
case 21:
|
||||
yy.count += 1
|
||||
break
|
||||
}
|
||||
},
|
||||
table: [{3: 1, 4: [1, 2]}, {1: [3]}, {5: [1, 3], 8: [1, 4]}, {6: 5, 7: $V0, 9: 6, 12: $V1}, {5: [1, 8]}, {7: [1, 9]}, o($V2, [2, 7], {10: 10, 11: [1, 11]}), o($V3, [2, 6]), {6: 12, 7: $V0, 9: 6, 12: $V1}, {1: [2, 1]}, {7: [2, 4], 12: [1, 15], 13: 13, 14: 14, 15: [1, 16], 17: [1, 17], 19: [1, 18], 20: [1, 19], 21: [1, 20]}, o($V3, [2, 5]), {7: [1, 21]}, o($V2, [2, 8]), {12: [1, 22]}, o($V2, [2, 10]), {12: [2, 16], 16: 23, 23: [1, 24]}, {18: [1, 25]}, {18: [1, 26]}, {18: [1, 27]}, {18: [1, 30], 22: 28, 24: [1, 29]}, {1: [2, 2]}, o($V2, [2, 9]), {12: [2, 11]}, {12: [2, 17]}, {12: [2, 12]}, {12: [2, 13]}, {12: [2, 14]}, {12: [2, 15]}, {12: $V4, 25: 31, 26: $V5}, {12: $V4, 25: 33, 26: $V5}, {12: [2, 18]}, {12: $V4, 25: 34, 26: $V5}, {12: [2, 19]}, {12: [2, 21]}],
|
||||
defaultActions: {9: [2, 1], 21: [2, 2], 23: [2, 11], 24: [2, 17], 25: [2, 12], 26: [2, 13], 27: [2, 14], 28: [2, 15], 31: [2, 18], 33: [2, 19], 34: [2, 21]},
|
||||
parseError: function parseError (str, hash) {
|
||||
if (hash.recoverable) {
|
||||
this.trace(str)
|
||||
} else {
|
||||
function _parseError (msg, hash) {
|
||||
this.message = msg
|
||||
this.hash = hash
|
||||
}
|
||||
_parseError.prototype = Error
|
||||
|
||||
throw new _parseError(str, hash)
|
||||
}
|
||||
},
|
||||
parse: function parse (input) {
|
||||
var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1
|
||||
var args = lstack.slice.call(arguments, 1)
|
||||
var lexer = Object.create(this.lexer)
|
||||
var sharedState = { yy: {} }
|
||||
for (var k in this.yy) {
|
||||
if (Object.prototype.hasOwnProperty.call(this.yy, k)) {
|
||||
sharedState.yy[k] = this.yy[k]
|
||||
}
|
||||
}
|
||||
lexer.setInput(input, sharedState.yy)
|
||||
sharedState.yy.lexer = lexer
|
||||
sharedState.yy.parser = this
|
||||
if (typeof lexer.yylloc === 'undefined') {
|
||||
lexer.yylloc = {}
|
||||
}
|
||||
var yyloc = lexer.yylloc
|
||||
lstack.push(yyloc)
|
||||
var ranges = lexer.options && lexer.options.ranges
|
||||
if (typeof sharedState.yy.parseError === 'function') {
|
||||
this.parseError = sharedState.yy.parseError
|
||||
} else {
|
||||
this.parseError = Object.getPrototypeOf(this).parseError
|
||||
}
|
||||
function popStack (n) {
|
||||
stack.length = stack.length - 2 * n
|
||||
vstack.length = vstack.length - n
|
||||
lstack.length = lstack.length - n
|
||||
}
|
||||
var lex = function () {
|
||||
var token
|
||||
token = lexer.lex() || EOF
|
||||
if (typeof token !== 'number') {
|
||||
token = self.symbols_[token] || token
|
||||
}
|
||||
return token
|
||||
}
|
||||
var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected
|
||||
while (true) {
|
||||
state = stack[stack.length - 1]
|
||||
if (this.defaultActions[state]) {
|
||||
action = this.defaultActions[state]
|
||||
} else {
|
||||
if (symbol === null || typeof symbol === 'undefined') {
|
||||
symbol = lex()
|
||||
}
|
||||
action = table[state] && table[state][symbol]
|
||||
}
|
||||
if (typeof action === 'undefined' || !action.length || !action[0]) {
|
||||
var errStr = ''
|
||||
expected = []
|
||||
for (p in table[state]) {
|
||||
if (this.terminals_[p] && p > TERROR) {
|
||||
expected.push('\'' + this.terminals_[p] + '\'')
|
||||
}
|
||||
}
|
||||
if (lexer.showPosition) {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\''
|
||||
} else {
|
||||
errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'')
|
||||
}
|
||||
this.parseError(errStr, {
|
||||
text: lexer.match,
|
||||
token: this.terminals_[symbol] || symbol,
|
||||
line: lexer.yylineno,
|
||||
loc: yyloc,
|
||||
expected: expected
|
||||
})
|
||||
}
|
||||
if (action[0] instanceof Array && action.length > 1) {
|
||||
throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol)
|
||||
}
|
||||
switch (action[0]) {
|
||||
case 1:
|
||||
stack.push(symbol)
|
||||
vstack.push(lexer.yytext)
|
||||
lstack.push(lexer.yylloc)
|
||||
stack.push(action[1])
|
||||
symbol = null
|
||||
if (!preErrorSymbol) {
|
||||
yyleng = lexer.yyleng
|
||||
yytext = lexer.yytext
|
||||
yylineno = lexer.yylineno
|
||||
yyloc = lexer.yylloc
|
||||
if (recovering > 0) {
|
||||
recovering--
|
||||
}
|
||||
} else {
|
||||
symbol = preErrorSymbol
|
||||
preErrorSymbol = null
|
||||
}
|
||||
break
|
||||
case 2:
|
||||
len = this.productions_[action[1]][1]
|
||||
yyval.$ = vstack[vstack.length - len]
|
||||
yyval._$ = {
|
||||
first_line: lstack[lstack.length - (len || 1)].first_line,
|
||||
last_line: lstack[lstack.length - 1].last_line,
|
||||
first_column: lstack[lstack.length - (len || 1)].first_column,
|
||||
last_column: lstack[lstack.length - 1].last_column
|
||||
}
|
||||
if (ranges) {
|
||||
yyval._$.range = [
|
||||
lstack[lstack.length - (len || 1)].range[0],
|
||||
lstack[lstack.length - 1].range[1]
|
||||
]
|
||||
}
|
||||
r = this.performAction.apply(yyval, [
|
||||
yytext,
|
||||
yyleng,
|
||||
yylineno,
|
||||
sharedState.yy,
|
||||
action[1],
|
||||
vstack,
|
||||
lstack
|
||||
].concat(args))
|
||||
if (typeof r !== 'undefined') {
|
||||
return r
|
||||
}
|
||||
if (len) {
|
||||
stack = stack.slice(0, -1 * len * 2)
|
||||
vstack = vstack.slice(0, -1 * len)
|
||||
lstack = lstack.slice(0, -1 * len)
|
||||
}
|
||||
stack.push(this.productions_[action[1]][0])
|
||||
vstack.push(yyval.$)
|
||||
lstack.push(yyval._$)
|
||||
newState = table[stack[stack.length - 2]][stack[stack.length - 1]]
|
||||
stack.push(newState)
|
||||
break
|
||||
case 3:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return true
|
||||
}}
|
||||
/* generated by jison-lex 0.3.4 */
|
||||
var lexer = (function () {
|
||||
var lexer = ({
|
||||
|
||||
EOF: 1,
|
||||
|
||||
parseError: function parseError (str, hash) {
|
||||
if (this.yy.parser) {
|
||||
this.yy.parser.parseError(str, hash)
|
||||
} else {
|
||||
throw new Error(str)
|
||||
}
|
||||
},
|
||||
|
||||
// resets the lexer, sets new input
|
||||
setInput: function (input, yy) {
|
||||
this.yy = yy || this.yy || {}
|
||||
this._input = input
|
||||
this._more = this._backtrack = this.done = false
|
||||
this.yylineno = this.yyleng = 0
|
||||
this.yytext = this.matched = this.match = ''
|
||||
this.conditionStack = ['INITIAL']
|
||||
this.yylloc = {
|
||||
first_line: 1,
|
||||
first_column: 0,
|
||||
last_line: 1,
|
||||
last_column: 0
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [0, 0]
|
||||
}
|
||||
this.offset = 0
|
||||
return this
|
||||
},
|
||||
|
||||
// consumes and returns one char from the input
|
||||
input: function () {
|
||||
var ch = this._input[0]
|
||||
this.yytext += ch
|
||||
this.yyleng++
|
||||
this.offset++
|
||||
this.match += ch
|
||||
this.matched += ch
|
||||
var lines = ch.match(/(?:\r\n?|\n).*/g)
|
||||
if (lines) {
|
||||
this.yylineno++
|
||||
this.yylloc.last_line++
|
||||
} else {
|
||||
this.yylloc.last_column++
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range[1]++
|
||||
}
|
||||
|
||||
this._input = this._input.slice(1)
|
||||
return ch
|
||||
},
|
||||
|
||||
// unshifts one char (or a string) into the input
|
||||
unput: function (ch) {
|
||||
var len = ch.length
|
||||
var lines = ch.split(/(?:\r\n?|\n)/g)
|
||||
|
||||
this._input = ch + this._input
|
||||
this.yytext = this.yytext.substr(0, this.yytext.length - len)
|
||||
// this.yyleng -= len;
|
||||
this.offset -= len
|
||||
var oldLines = this.match.split(/(?:\r\n?|\n)/g)
|
||||
this.match = this.match.substr(0, this.match.length - 1)
|
||||
this.matched = this.matched.substr(0, this.matched.length - 1)
|
||||
|
||||
if (lines.length - 1) {
|
||||
this.yylineno -= lines.length - 1
|
||||
}
|
||||
var r = this.yylloc.range
|
||||
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: lines
|
||||
? (lines.length === oldLines.length ? this.yylloc.first_column : 0) +
|
||||
oldLines[oldLines.length - lines.length].length - lines[0].length
|
||||
: this.yylloc.first_column - len
|
||||
}
|
||||
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [r[0], r[0] + this.yyleng - len]
|
||||
}
|
||||
this.yyleng = this.yytext.length
|
||||
return this
|
||||
},
|
||||
|
||||
// When called from action, caches matched text and appends it on next action
|
||||
more: function () {
|
||||
this._more = true
|
||||
return this
|
||||
},
|
||||
|
||||
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
|
||||
reject: function () {
|
||||
if (this.options.backtrack_lexer) {
|
||||
this._backtrack = true
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
|
||||
text: '',
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
})
|
||||
}
|
||||
return this
|
||||
},
|
||||
|
||||
// retain first n characters of the match
|
||||
less: function (n) {
|
||||
this.unput(this.match.slice(n))
|
||||
},
|
||||
|
||||
// displays already matched input, i.e. for error messages
|
||||
pastInput: function () {
|
||||
var past = this.matched.substr(0, this.matched.length - this.match.length)
|
||||
return (past.length > 20 ? '...' : '') + past.substr(-20).replace(/\n/g, '')
|
||||
},
|
||||
|
||||
// displays upcoming input, i.e. for error messages
|
||||
upcomingInput: function () {
|
||||
var next = this.match
|
||||
if (next.length < 20) {
|
||||
next += this._input.substr(0, 20 - next.length)
|
||||
}
|
||||
return (next.substr(0, 20) + (next.length > 20 ? '...' : '')).replace(/\n/g, '')
|
||||
},
|
||||
|
||||
// displays the character position where the lexing error occurred, i.e. for error messages
|
||||
showPosition: function () {
|
||||
var pre = this.pastInput()
|
||||
var c = new Array(pre.length + 1).join('-')
|
||||
return pre + this.upcomingInput() + '\n' + c + '^'
|
||||
},
|
||||
|
||||
// test the lexed token: return FALSE when not a match, otherwise return token
|
||||
test_match: function (match, indexed_rule) {
|
||||
var token,
|
||||
lines,
|
||||
backup
|
||||
|
||||
if (this.options.backtrack_lexer) {
|
||||
// save context
|
||||
backup = {
|
||||
yylineno: this.yylineno,
|
||||
yylloc: {
|
||||
first_line: this.yylloc.first_line,
|
||||
last_line: this.last_line,
|
||||
first_column: this.yylloc.first_column,
|
||||
last_column: this.yylloc.last_column
|
||||
},
|
||||
yytext: this.yytext,
|
||||
match: this.match,
|
||||
matches: this.matches,
|
||||
matched: this.matched,
|
||||
yyleng: this.yyleng,
|
||||
offset: this.offset,
|
||||
_more: this._more,
|
||||
_input: this._input,
|
||||
yy: this.yy,
|
||||
conditionStack: this.conditionStack.slice(0),
|
||||
done: this.done
|
||||
}
|
||||
if (this.options.ranges) {
|
||||
backup.yylloc.range = this.yylloc.range.slice(0)
|
||||
}
|
||||
}
|
||||
|
||||
lines = match[0].match(/(?:\r\n?|\n).*/g)
|
||||
if (lines) {
|
||||
this.yylineno += lines.length
|
||||
}
|
||||
this.yylloc = {
|
||||
first_line: this.yylloc.last_line,
|
||||
last_line: this.yylineno + 1,
|
||||
first_column: this.yylloc.last_column,
|
||||
last_column: lines
|
||||
? lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length
|
||||
: this.yylloc.last_column + match[0].length
|
||||
}
|
||||
this.yytext += match[0]
|
||||
this.match += match[0]
|
||||
this.matches = match
|
||||
this.yyleng = this.yytext.length
|
||||
if (this.options.ranges) {
|
||||
this.yylloc.range = [this.offset, this.offset += this.yyleng]
|
||||
}
|
||||
this._more = false
|
||||
this._backtrack = false
|
||||
this._input = this._input.slice(match[0].length)
|
||||
this.matched += match[0]
|
||||
token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1])
|
||||
if (this.done && this._input) {
|
||||
this.done = false
|
||||
}
|
||||
if (token) {
|
||||
return token
|
||||
} else if (this._backtrack) {
|
||||
// recover context
|
||||
for (var k in backup) {
|
||||
this[k] = backup[k]
|
||||
}
|
||||
return false // rule action called reject() implying the next rule should be tested instead.
|
||||
}
|
||||
return false
|
||||
},
|
||||
|
||||
// return next match in input
|
||||
next: function () {
|
||||
if (this.done) {
|
||||
return this.EOF
|
||||
}
|
||||
if (!this._input) {
|
||||
this.done = true
|
||||
}
|
||||
|
||||
var token,
|
||||
match,
|
||||
tempMatch,
|
||||
index
|
||||
if (!this._more) {
|
||||
this.yytext = ''
|
||||
this.match = ''
|
||||
}
|
||||
var rules = this._currentRules()
|
||||
for (var i = 0; i < rules.length; i++) {
|
||||
tempMatch = this._input.match(this.rules[rules[i]])
|
||||
if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
|
||||
match = tempMatch
|
||||
index = i
|
||||
if (this.options.backtrack_lexer) {
|
||||
token = this.test_match(tempMatch, rules[i])
|
||||
if (token !== false) {
|
||||
return token
|
||||
} else if (this._backtrack) {
|
||||
match = false
|
||||
continue // rule action called reject() implying a rule MISmatch.
|
||||
} else {
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false
|
||||
}
|
||||
} else if (!this.options.flex) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if (match) {
|
||||
token = this.test_match(match, rules[index])
|
||||
if (token !== false) {
|
||||
return token
|
||||
}
|
||||
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||
return false
|
||||
}
|
||||
if (this._input === '') {
|
||||
return this.EOF
|
||||
} else {
|
||||
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
|
||||
text: '',
|
||||
token: null,
|
||||
line: this.yylineno
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
// return next match that has a token
|
||||
lex: function lex () {
|
||||
var r = this.next()
|
||||
if (r) {
|
||||
return r
|
||||
} else {
|
||||
return this.lex()
|
||||
}
|
||||
},
|
||||
|
||||
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
|
||||
begin: function begin (condition) {
|
||||
this.conditionStack.push(condition)
|
||||
},
|
||||
|
||||
// pop the previously active lexer condition state off the condition stack
|
||||
popState: function popState () {
|
||||
var n = this.conditionStack.length - 1
|
||||
if (n > 0) {
|
||||
return this.conditionStack.pop()
|
||||
} else {
|
||||
return this.conditionStack[0]
|
||||
}
|
||||
},
|
||||
|
||||
// produce the lexer rule set which is active for the currently active lexer condition state
|
||||
_currentRules: function _currentRules () {
|
||||
if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
|
||||
return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules
|
||||
} else {
|
||||
return this.conditions['INITIAL'].rules
|
||||
}
|
||||
},
|
||||
|
||||
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
|
||||
topState: function topState (n) {
|
||||
n = this.conditionStack.length - 1 - Math.abs(n || 0)
|
||||
if (n >= 0) {
|
||||
return this.conditionStack[n]
|
||||
} else {
|
||||
return 'INITIAL'
|
||||
}
|
||||
},
|
||||
|
||||
// alias for begin(condition)
|
||||
pushState: function pushState (condition) {
|
||||
this.begin(condition)
|
||||
},
|
||||
|
||||
// return the number of states currently on the stack
|
||||
stateStackSize: function stateStackSize () {
|
||||
return this.conditionStack.length
|
||||
},
|
||||
options: {'case-insensitive': true},
|
||||
performAction: function anonymous (yy, yy_, $avoiding_name_collisions, YY_START) {
|
||||
var YYSTATE = YY_START
|
||||
switch ($avoiding_name_collisions) {
|
||||
case 0:return 12
|
||||
break
|
||||
case 1:/* skip all whitespace */
|
||||
break
|
||||
case 2:/* skip comments */
|
||||
break
|
||||
case 3:/* skip comments */
|
||||
break
|
||||
case 4:return 4
|
||||
break
|
||||
case 5:return 15
|
||||
break
|
||||
case 6:return 17
|
||||
break
|
||||
case 7:return 20
|
||||
break
|
||||
case 8:return 21
|
||||
break
|
||||
case 9:return 19
|
||||
break
|
||||
case 10:return 8
|
||||
break
|
||||
case 11:return 8
|
||||
break
|
||||
case 12:return 5
|
||||
break
|
||||
case 13:return 26
|
||||
break
|
||||
case 14:this.begin('options')
|
||||
break
|
||||
case 15:this.popState()
|
||||
break
|
||||
case 16:return 11
|
||||
break
|
||||
case 17:this.begin('string')
|
||||
break
|
||||
case 18:this.popState()
|
||||
break
|
||||
case 19:return 23
|
||||
break
|
||||
case 20:return 18
|
||||
break
|
||||
case 21:return 7
|
||||
break
|
||||
}
|
||||
},
|
||||
rules: [/^(?:(\r?\n)+)/i, /^(?:\s+)/i, /^(?:#[^\n]*)/i, /^(?:%[^\n]*)/i, /^(?:gitGraph\b)/i, /^(?:commit\b)/i, /^(?:branch\b)/i, /^(?:merge\b)/i, /^(?:reset\b)/i, /^(?:checkout\b)/i, /^(?:LR\b)/i, /^(?:BT\b)/i, /^(?::)/i, /^(?:\^)/i, /^(?:options\r?\n)/i, /^(?:end\r?\n)/i, /^(?:[^\n]+\r?\n)/i, /^(?:["])/i, /^(?:["])/i, /^(?:[^"]*)/i, /^(?:[a-zA-Z][a-zA-Z0-9_]+)/i, /^(?:$)/i],
|
||||
conditions: {'options': {'rules': [15, 16], 'inclusive': false}, 'string': {'rules': [18, 19], 'inclusive': false}, 'INITIAL': {'rules': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 17, 20, 21], 'inclusive': true}}
|
||||
})
|
||||
return lexer
|
||||
})()
|
||||
parser.lexer = lexer
|
||||
function Parser () {
|
||||
this.yy = {}
|
||||
}
|
||||
Parser.prototype = parser; parser.Parser = Parser
|
||||
return new Parser()
|
||||
})()
|
||||
|
||||
if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
|
||||
exports.parser = parser
|
||||
exports.Parser = parser.Parser
|
||||
exports.parse = function () { return parser.parse.apply(parser, arguments) }
|
||||
exports.main = function commonjsMain (args) {
|
||||
if (!args[1]) {
|
||||
console.log('Usage: ' + args[0] + ' FILE')
|
||||
process.exit(1)
|
||||
}
|
||||
var source = require('fs').readFileSync(require('path').normalize(args[1]), 'utf8')
|
||||
return exports.parser.parse(source)
|
||||
}
|
||||
if (typeof module !== 'undefined' && require.main === module) {
|
||||
exports.main(process.argv.slice(1))
|
||||
}
|
||||
}
|
||||
15
src/diagrams/info/info.spec.js
Normal file
@@ -0,0 +1,15 @@
|
||||
/* eslint-env jasmine */
|
||||
describe('when parsing an info graph it', function () {
|
||||
var ex
|
||||
beforeEach(function () {
|
||||
ex = require('./parser/info').parser
|
||||
ex.yy = require('./infoDb')
|
||||
})
|
||||
|
||||
it('should handle an info definition', function () {
|
||||
var str = `info
|
||||
showInfo`
|
||||
|
||||
ex.parse(str)
|
||||
})
|
||||
})
|
||||
36
src/diagrams/info/infoDb.js
Normal file
@@ -0,0 +1,36 @@
|
||||
/**
|
||||
* Created by knut on 15-01-14.
|
||||
*/
|
||||
import { logger } from '../../logger'
|
||||
|
||||
var message = ''
|
||||
var info = false
|
||||
|
||||
export const setMessage = txt => {
|
||||
logger.debug('Setting message to: ' + txt)
|
||||
message = txt
|
||||
}
|
||||
|
||||
export const getMessage = () => {
|
||||
return message
|
||||
}
|
||||
|
||||
export const setInfo = inf => {
|
||||
info = inf
|
||||
}
|
||||
|
||||
export const getInfo = () => {
|
||||
return info
|
||||
}
|
||||
|
||||
// export const parseError = (err, hash) => {
|
||||
// global.mermaidAPI.parseError(err, hash)
|
||||
// }
|
||||
|
||||
export default {
|
||||
setMessage,
|
||||
getMessage,
|
||||
setInfo,
|
||||
getInfo
|
||||
// parseError
|
||||
}
|
||||
57
src/diagrams/info/infoRenderer.js
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Created by knut on 14-12-11.
|
||||
*/
|
||||
import * as d3 from 'd3'
|
||||
import db from './infoDb'
|
||||
import infoParser from './parser/info.js'
|
||||
import { logger } from '../../logger'
|
||||
|
||||
const conf = {
|
||||
}
|
||||
export const setConf = function (cnf) {
|
||||
const keys = Object.keys(cnf)
|
||||
|
||||
keys.forEach(function (key) {
|
||||
conf[key] = cnf[key]
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Draws a an info picture in the tag with id: id based on the graph definition in text.
|
||||
* @param text
|
||||
* @param id
|
||||
*/
|
||||
export const draw = (txt, id, ver) => {
|
||||
try {
|
||||
const parser = infoParser.parser
|
||||
parser.yy = db
|
||||
logger.debug('Renering info diagram\n' + txt)
|
||||
// Parse the graph definition
|
||||
parser.parse(txt)
|
||||
logger.debug('Parsed info diagram')
|
||||
// Fetch the default direction, use TD if none was found
|
||||
const svg = d3.select('#' + id)
|
||||
|
||||
const g = svg.append('g')
|
||||
|
||||
g.append('text') // text label for the x axis
|
||||
.attr('x', 100)
|
||||
.attr('y', 40)
|
||||
.attr('class', 'version')
|
||||
.attr('font-size', '32px')
|
||||
.style('text-anchor', 'middle')
|
||||
.text('v ' + ver)
|
||||
|
||||
svg.attr('height', 100)
|
||||
svg.attr('width', 400)
|
||||
// svg.attr('viewBox', '0 0 300 150');
|
||||
} catch (e) {
|
||||
logger.error('Error while rendering info diagram')
|
||||
logger.error(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
setConf,
|
||||
draw
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/** mermaid
|
||||
* https://mermaidjs.github.io/
|
||||
* http://knsv.github.io/mermaid/
|
||||
* (c) 2015 Knut Sveidqvist
|
||||
* MIT license.
|
||||
*/
|
||||
@@ -13,13 +13,12 @@
|
||||
|
||||
%%
|
||||
|
||||
[\n]+ return 'NL';
|
||||
"info" return 'info' ;
|
||||
[\s\n\r]+ return 'NL' ;
|
||||
[\s]+ return 'space';
|
||||
"showInfo" return 'showInfo';
|
||||
"info" return 'info';
|
||||
"say" return 'say';
|
||||
":"[^#\n;]+ return 'TXT';
|
||||
<<EOF>> return 'EOF';
|
||||
. return 'INVALID';
|
||||
<<EOF>> return 'EOF' ;
|
||||
. return 'TXT' ;
|
||||
|
||||
/lex
|
||||
|
||||
@@ -28,6 +27,7 @@
|
||||
%% /* language grammar */
|
||||
|
||||
start
|
||||
// %{ : info document 'EOF' { return yy; } }
|
||||
: info document 'EOF' { return yy; }
|
||||
;
|
||||
|
||||
@@ -43,11 +43,6 @@ line
|
||||
|
||||
statement
|
||||
: showInfo { yy.setInfo(true); }
|
||||
| message { yy.setMessage($1); }
|
||||
;
|
||||
|
||||
message
|
||||
: 'say' TXT { $$ = $1.substring(1).trim().replace(/\\n/gm, "\n"); }
|
||||
;
|
||||
|
||||
%%
|
||||
%%
|
||||