#457 Added grammar for double directional arrows

This commit is contained in:
Knut Sveidqvist
2019-06-28 12:10:16 +02:00
parent 231aaedec8
commit 40d10ea741
4 changed files with 540 additions and 142 deletions

View File

@@ -36,45 +36,45 @@
"," return 'COMMA';
"*" return 'MULT';
\s*\-\-[x]\s* return 'ARROW_CROSS';
\s*[x]\-\-[x]\s* return 'DOUBLE_ARROW_CROSS';
\s*\-\-\>\s* return 'ARROW_POINT';
\s*\<\-\-\>\s* return 'DOUBLE_ARROW_POINT';
\s*\-\-[o]\s* return 'ARROW_CIRCLE';
\s*[x]\-\-[x]\s* return 'DOUBLE_ARROW_CROSS';
\s*[o]\-\-[o]\s* return 'DOUBLE_ARROW_CIRCLE';
\s*[o]\.\-[o]\s* return 'DOUBLE_DOTTED_ARROW_CIRCLE';
\s*\<\=\=\>\s* return 'DOUBLE_THICK_ARROW_POINT';
\s*[o]\=\=[o]\s* return 'DOUBLE_THICK_ARROW_CIRCLE';
\s*[x]\=\=[x]\s* return 'DOUBLE_THICK_ARROW_CROSS';
\s*[x].\-[x]\s* return 'DOUBLE_DOTTED_ARROW_CROSS';
\s*[x]\-\.\-[x]\s* return 'DOUBLE_DOTTED_ARROW_CROSS';
\s*\<\.\-\>\s* return 'DOUBLE_DOTTED_ARROW_POINT';
\s*\<\-\.\-\>\s* return 'DOUBLE_DOTTED_ARROW_POINT';
\s*[o]\-\.\-[o]\s* return 'DOUBLE_DOTTED_ARROW_CIRCLE';
\s*\-\-[o]\s* return 'ARROW_CIRCLE';
\s*\-\-\-\s* return 'ARROW_OPEN';
\s*\-\.\-[x]\s* return 'DOTTED_ARROW_CROSS';
\s*[x]\-\.\-[x]\s* return 'DOUBLE_DOTTED_ARROW_CROSS';
\s*\-\.\-\>\s* return 'DOTTED_ARROW_POINT';
\s*\<\-\.\-\>\s* return 'DOUBLE_DOTTED_ARROW_POINT';
\s*\-\.\-[o]\s* return 'DOTTED_ARROW_CIRCLE';
\s*[o]\-\.\-[o]\s* return 'DOUBLE_DOTTED_ARROW_CIRCLE';
\s*\-\.\-\s* return 'DOTTED_ARROW_OPEN';
\s*.\-[x]\s* return 'DOTTED_ARROW_CROSS';
\s*[x].\-[x]\s* return 'DOUBLE_DOTTED_ARROW_CROSS';
\s*\.\-\>\s* return 'DOTTED_ARROW_POINT';
\s*\<\.\-\>\s* return 'DOUBLE_DOTTED_ARROW_POINT';
\s*\.\-[o]\s* return 'DOTTED_ARROW_CIRCLE';
\s*[o]\.\-[o]\s* return 'DOUBLE_DOTTED_ARROW_CIRCLE';
\s*\.\-\s* return 'DOTTED_ARROW_OPEN';
\s*\=\=[x]\s* return 'THICK_ARROW_CROSS';
\s*[x]\=\=[x]\s* return 'DOUBLE_THICK_ARROW_CROSS';
\s*\=\=\>\s* return 'THICK_ARROW_POINT';
\s*\<\=\=\>\s* return 'DOUBLE_THICK_ARROW_POINT';
\s*\=\=[o]\s* return 'THICK_ARROW_CIRCLE';
\s*[o]\=\=[o]\s* return 'DOUBLE_THICK_ARROW_CIRCLE';
\s*\=\=[\=]\s* return 'THICK_ARROW_OPEN';
\s*\<\-\-\s* return 'START_DOUBLE_ARROW_POINT';
\s*[x]\-\-\s* return 'START_DOUBLE_ARROW_CROSS';
\s*[o]\-\-\s* return 'START_DOUBLE_ARROW_CIRCLE';
\s*\<\-\.\s* return 'START_DOUBLE_DOTTED_ARROW_POINT';
\s*[x]\-\.\s* return 'START_DOUBLE_DOTTED_ARROW_CROSS';
\s*[o]\-\.\s* return 'START_DOUBLE_DOTTED_ARROW_CIRCLE';
\s*\<\=\=\s* return 'START_DOUBLE_THICK_ARROW_POINT';
\s*[x]\=\=\s* return 'START_DOUBLE_THICK_ARROW_CROSS';
\s*[o]\=\=\s* return 'START_DOUBLE_THICK_ARROW_CIRCLE';
\s*\-\-\s* return '--';
\s*\-\.\s* return '-.';
\s*\=\=\s* return '==';
\s*\<\-\-\s* return 'START_DOUBLE_ARROW_POINT';
\s*\[x]\-\-\s* return 'START_DOUBLE_ARROW_CROSS';
\s*\[o]\-\-\s* return 'START_DOUBLE_ARROW_CIRCLE';
\s*\<\-\.\s* return 'START_DOUBLE_DOTTED_ARROW_POINT';
\s*\[x]\-\.\s* return 'START_DOUBLE_DOTTED_ARROW_CROSS';
\s*\[o]\-\.\s* return 'START_DOUBLE_DOTTED_ARROW_CIRCLE';
\s*\<\=\=\s* return 'START_DOUBLE_THICK_ARROW_POINT';
\s*\[x]\=\=\s* return 'START_DOUBLE_THICK_ARROW_CROSS';
\s*\[o]\=\=\s* return 'START_DOUBLE_THICK_ARROW_CIRCLE';
"(-" return '(-';
"-)" return '-)';
\- return 'MINUS';
@@ -331,50 +331,84 @@ link: linkStatement arrowText
{$$ = {"type":"double_arrow_point","stroke":"normal","text":$2};}
| '--' text ARROW_CIRCLE
{$$ = {"type":"arrow_circle","stroke":"normal","text":$2};}
| 'START_DOUBLE_ARROW_CIRCLE' text ARROW_CIRCLE
{$$ = {"type":"double_arrow_circle","stroke":"normal","text":$2};}
| '--' text ARROW_CROSS
{$$ = {"type":"arrow_cross","stroke":"normal","text":$2};}
| 'START_DOUBLE_ARROW_CROSS' text ARROW_CROSS
{$$ = {"type":"double_arrow_cross","stroke":"normal","text":$2};}
| '--' text ARROW_OPEN
{$$ = {"type":"arrow_open","stroke":"normal","text":$2};}
| '-.' text DOTTED_ARROW_POINT
{$$ = {"type":"arrow","stroke":"dotted","text":$2};}
| 'START_DOUBLE_DOTTED_ARROW_POINT' text DOTTED_ARROW_POINT
{$$ = {"type":"double_arrow_point","stroke":"dotted","text":$2};}
| '-.' text DOTTED_ARROW_CIRCLE
{$$ = {"type":"arrow_circle","stroke":"dotted","text":$2};}
| 'START_DOUBLE_DOTTED_ARROW_CIRCLE' text DOTTED_ARROW_CIRCLE
{$$ = {"type":"double_arrow_circle","stroke":"dotted","text":$2};}
| '-.' text DOTTED_ARROW_CROSS
{$$ = {"type":"arrow_cross","stroke":"dotted","text":$2};}
| 'START_DOUBLE_DOTTED_ARROW_CROSS' text DOTTED_ARROW_CROSS
{$$ = {"type":"double_arrow_cross","stroke":"dotted","text":$2};}
| '-.' text DOTTED_ARROW_OPEN
{$$ = {"type":"arrow_open","stroke":"dotted","text":$2};}
| '==' text THICK_ARROW_POINT
{$$ = {"type":"arrow","stroke":"thick","text":$2};}
| 'START_DOUBLE_THICK_ARROW_POINT' text THICK_ARROW_POINT
{$$ = {"type":"double_arrow_point","stroke":"thick","text":$2};}
| '==' text THICK_ARROW_CIRCLE
{$$ = {"type":"arrow_circle","stroke":"thick","text":$2};}
| 'START_DOUBLE_THICK_ARROW_CIRCLE' text THICK_ARROW_CIRCLE
{$$ = {"type":"double_arrow_circle","stroke":"thick","text":$2};}
| '==' text THICK_ARROW_CROSS
{$$ = {"type":"arrow_cross","stroke":"thick","text":$2};}
| 'START_DOUBLE_THICK_ARROW_CROSS' text THICK_ARROW_CROSS
{$$ = {"type":"double_arrow_cross","stroke":"thick","text":$2};}
| '==' text THICK_ARROW_OPEN
{$$ = {"type":"arrow_open","stroke":"thick","text":$2};}
;
linkStatement: ARROW_POINT
{$$ = {"type":"arrow","stroke":"normal"};}
| DOUBLE_ARROW_POINT
{$$ = {"type":"double_arrow_point","stroke":"normal"};}
| ARROW_CIRCLE
{$$ = {"type":"arrow_circle","stroke":"normal"};}
| DOUBLE_ARROW_CIRCLE
{$$ = {"type":"double_arrow_circle","stroke":"normal"};}
| ARROW_CROSS
{$$ = {"type":"arrow_cross","stroke":"normal"};}
| DOUBLE_ARROW_CROSS
{$$ = {"type":"double_arrow_cross","stroke":"normal"};}
| ARROW_OPEN
{$$ = {"type":"arrow_open","stroke":"normal"};}
| DOTTED_ARROW_POINT
{$$ = {"type":"arrow","stroke":"dotted"};}
| DOUBLE_DOTTED_ARROW_POINT
{$$ = {"type":"double_arrow_point","stroke":"dotted"};}
| DOTTED_ARROW_CIRCLE
{$$ = {"type":"arrow_circle","stroke":"dotted"};}
| DOUBLE_DOTTED_ARROW_CIRCLE
{$$ = {"type":"double_arrow_circle","stroke":"dotted"};}
| DOTTED_ARROW_CROSS
{$$ = {"type":"arrow_cross","stroke":"dotted"};}
| DOUBLE_DOTTED_ARROW_CROSS
{$$ = {"type":"double_arrow_cross","stroke":"dotted"};}
| DOTTED_ARROW_OPEN
{$$ = {"type":"arrow_open","stroke":"dotted"};}
| THICK_ARROW_POINT
{$$ = {"type":"arrow","stroke":"thick"};}
| DOUBLE_THICK_ARROW_POINT
{$$ = {"type":"double_arrow_point","stroke":"thick"};}
| THICK_ARROW_CIRCLE
{$$ = {"type":"arrow_circle","stroke":"thick"};}
| DOUBLE_THICK_ARROW_CIRCLE
{$$ = {"type":"double_arrow_circle","stroke":"thick"};}
| THICK_ARROW_CROSS
{$$ = {"type":"arrow_cross","stroke":"thick"};}
| DOUBLE_THICK_ARROW_CROSS
{$$ = {"type":"double_arrow_cross","stroke":"thick"};}
| THICK_ARROW_OPEN
{$$ = {"type":"arrow_open","stroke":"thick"};}
;

File diff suppressed because one or more lines are too long

View File

@@ -592,6 +592,282 @@ describe('when parsing ', function () {
})
})
describe('it should multi directional arrows', function () {
describe('point', function () {
it('should handle double edged nodes and edges', function () {
const res = flow.parser.parse('graph TD;\nA<-->B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_point')
expect(edges[0].text).toBe('')
})
it('should handle double edged nodes with text', function () {
const res = flow.parser.parse('graph TD;\nA<-- text -->B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_point')
expect(edges[0].stroke).toBe('normal')
expect(edges[0].text).toBe('text')
})
it('should handle double edged nodes and edges on thick arrows', function () {
const res = flow.parser.parse('graph TD;\nA<==>B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_point')
expect(edges[0].stroke).toBe('thick')
expect(edges[0].text).toBe('')
})
it('should handle double edged nodes with text on thick arrows', function () {
const res = flow.parser.parse('graph TD;\nA<== text ==>B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_point')
expect(edges[0].stroke).toBe('thick')
expect(edges[0].text).toBe('text')
})
it('should handle double edged nodes and edges on dotted arrows', function () {
const res = flow.parser.parse('graph TD;\nA<-.->B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_point')
expect(edges[0].stroke).toBe('dotted')
expect(edges[0].text).toBe('')
})
it('should handle double edged nodes with text on dotted arrows', function () {
const res = flow.parser.parse('graph TD;\nA<-. text .->B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_point')
expect(edges[0].stroke).toBe('dotted')
expect(edges[0].text).toBe('text')
})
})
describe('cross', function () {
it('should handle double edged nodes and edges', function () {
const res = flow.parser.parse('graph TD;\nA x--x B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_cross')
expect(edges[0].text).toBe('')
})
it('should handle double edged nodes with text', function () {
const res = flow.parser.parse('graph TD;\nA x-- text --x B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_cross')
expect(edges[0].stroke).toBe('normal')
expect(edges[0].text).toBe('text')
})
it('should handle double edged nodes and edges on thick arrows', function () {
const res = flow.parser.parse('graph TD;\nA x==x B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_cross')
expect(edges[0].stroke).toBe('thick')
expect(edges[0].text).toBe('')
})
it('should handle double edged nodes with text on thick arrows', function () {
const res = flow.parser.parse('graph TD;\nA x== text ==x B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_cross')
expect(edges[0].stroke).toBe('thick')
expect(edges[0].text).toBe('text')
})
it('should handle double edged nodes and edges on dotted arrows', function () {
const res = flow.parser.parse('graph TD;\nA x-.-x B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_cross')
expect(edges[0].stroke).toBe('dotted')
expect(edges[0].text).toBe('')
})
it('should handle double edged nodes with text on dotted arrows', function () {
const res = flow.parser.parse('graph TD;\nA x-. text .-x B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_cross')
expect(edges[0].stroke).toBe('dotted')
expect(edges[0].text).toBe('text')
})
})
describe('circle', function () {
it('should handle double edged nodes and edges', function () {
const res = flow.parser.parse('graph TD;\nA o--o B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_circle')
expect(edges[0].text).toBe('')
})
it('should handle double edged nodes with text', function () {
const res = flow.parser.parse('graph TD;\nA o-- text --o B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_circle')
expect(edges[0].stroke).toBe('normal')
expect(edges[0].text).toBe('text')
})
it('should handle double edged nodes and edges on thick arrows', function () {
const res = flow.parser.parse('graph TD;\nA o==o B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_circle')
expect(edges[0].stroke).toBe('thick')
expect(edges[0].text).toBe('')
})
it('should handle double edged nodes with text on thick arrows', function () {
const res = flow.parser.parse('graph TD;\nA o== text ==o B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_circle')
expect(edges[0].stroke).toBe('thick')
expect(edges[0].text).toBe('text')
})
it('should handle double edged nodes and edges on dotted arrows', function () {
const res = flow.parser.parse('graph TD;\nA o-.-o B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_circle')
expect(edges[0].stroke).toBe('dotted')
expect(edges[0].text).toBe('')
})
it('should handle double edged nodes with text on dotted arrows', function () {
const res = flow.parser.parse('graph TD;\nA o-. text .-o B;')
const vert = flow.parser.yy.getVertices()
const edges = flow.parser.yy.getEdges()
expect(vert['A'].id).toBe('A')
expect(vert['B'].id).toBe('B')
expect(edges.length).toBe(1)
expect(edges[0].start).toBe('A')
expect(edges[0].end).toBe('B')
expect(edges[0].type).toBe('double_arrow_circle')
expect(edges[0].stroke).toBe('dotted')
expect(edges[0].text).toBe('text')
})
})
})
describe('it should handle text on edges', function () {
it('it should handle text without space', function () {
const res = flow.parser.parse('graph TD;A--x|textNoSpace|B;')

View File

@@ -72,12 +72,12 @@
}
*/
var parser = (function(){
var o=function(k,v,o,l){for(o=o||{},l=k.length;l--;o[k[l]]=v);return o},$V0=[6,9,10,12];
var o=function(k,v,o,l){for(o=o||{},l=k.length;l--;o[k[l]]=v);return o},$V0=[6,9,10];
var parser = {trace: function trace () { },
yy: {},
symbols_: {"error":2,"start":3,"info":4,"document":5,"EOF":6,"line":7,"statement":8,"NL":9,"showInfo":10,"message":11,"say":12,"TXT":13,"$accept":0,"$end":1},
terminals_: {2:"error",4:"info",6:"EOF",9:"NL",10:"showInfo",12:"say",13:"TXT"},
productions_: [0,[3,3],[5,0],[5,2],[7,1],[7,1],[8,1],[8,1],[11,2]],
symbols_: {"error":2,"start":3,"info":4,"document":5,"EOF":6,"line":7,"statement":8,"NL":9,"showInfo":10,"$accept":0,"$end":1},
terminals_: {2:"error",4:"info",6:"EOF",9:"NL",10:"showInfo"},
productions_: [0,[3,3],[5,0],[5,2],[7,1],[7,1],[8,1]],
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
/* this == yyval */
@@ -92,15 +92,9 @@ break;
case 6:
yy.setInfo(true);
break;
case 7:
yy.setMessage($$[$0]);
break;
case 8:
this.$ = $$[$0-1].substring(1).trim().replace(/\\n/gm, "\n");
break;
}
},
table: [{3:1,4:[1,2]},{1:[3]},o($V0,[2,2],{5:3}),{6:[1,4],7:5,8:6,9:[1,7],10:[1,8],11:9,12:[1,10]},{1:[2,1]},o($V0,[2,3]),o($V0,[2,4]),o($V0,[2,5]),o($V0,[2,6]),o($V0,[2,7]),{13:[1,11]},o($V0,[2,8])],
table: [{3:1,4:[1,2]},{1:[3]},o($V0,[2,2],{5:3}),{6:[1,4],7:5,8:6,9:[1,7],10:[1,8]},{1:[2,1]},o($V0,[2,3]),o($V0,[2,4]),o($V0,[2,5]),o($V0,[2,6])],
defaultActions: {4:[2,1]},
parseError: function parseError (str, hash) {
if (hash.recoverable) {
@@ -585,20 +579,18 @@ case 0:return 4 ;
break;
case 1:return 9 ;
break;
case 2:return 'space'
case 2:return 'space';
break;
case 3:return 10;
break;
case 4:return 12 ;
case 4:return 6 ;
break;
case 5:return 6 ;
break;
case 6:return 13 ;
case 5:return 'TXT' ;
break;
}
},
rules: [/^(?:info\b)/i,/^(?:[\s\n\r]+)/i,/^(?:[\s]+)/i,/^(?:showInfo\b)/i,/^(?:say\b)/i,/^(?:$)/i,/^(?:.)/i],
conditions: {"INITIAL":{"rules":[0,1,2,3,4,5,6],"inclusive":true}}
rules: [/^(?:info\b)/i,/^(?:[\s\n\r]+)/i,/^(?:[\s]+)/i,/^(?:showInfo\b)/i,/^(?:$)/i,/^(?:.)/i],
conditions: {"INITIAL":{"rules":[0,1,2,3,4,5],"inclusive":true}}
});
return lexer;
})();