mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-09-14 12:59:46 +02:00
WIP - fixing grammar separating SimpleNode from ComplexNode
This commit is contained in:
@@ -1,127 +1,94 @@
|
|||||||
/** mermaid
|
/**
|
||||||
* https://knsv.github.io/mermaid
|
* Mindmap grammar for Langium
|
||||||
* (c) 2015 Knut Sveidqvist
|
* Converted from mermaid's jison grammar
|
||||||
* MIT license.
|
|
||||||
*/
|
*/
|
||||||
%lex
|
grammar Mindmap
|
||||||
|
|
||||||
%options case-insensitive
|
// Entry rule - equivalent to the 'start' rule in jison
|
||||||
|
entry MindmapDocument:
|
||||||
|
// The document starts with the 'mindmap' keyword
|
||||||
|
(spaceLines+=SPACELINE)*
|
||||||
|
'mindmap' (NL)?
|
||||||
|
(documentContent=DocumentContent)?;
|
||||||
|
|
||||||
%{
|
// Document contains multiple statements separated by newlines
|
||||||
// Pre-lexer code can go here
|
DocumentContent:
|
||||||
%}
|
statements+=Statement (stop+=Stop statements+=Statement)* (stop+=Stop)?;
|
||||||
%x NODE
|
|
||||||
%x NSTR
|
|
||||||
%x NSTR2
|
|
||||||
%x ICON
|
|
||||||
%x CLASS
|
|
||||||
|
|
||||||
%%
|
// A stop is a newline, EOF, or a spaceline - used to separate statements
|
||||||
|
Stop:
|
||||||
|
NL | EOF | SPACELINE;
|
||||||
|
|
||||||
\s*\%\%.* {yy.getLogger().trace('Found comment',yytext); return 'SPACELINE';}
|
// Statements can be nodes, icons, classes, or empty lines
|
||||||
// \%\%[^\n]*\n /* skip comments */
|
Statement:
|
||||||
"mindmap" return 'MINDMAP';
|
// The whitespace prefix determines nesting level in the mindmap
|
||||||
":::" { this.begin('CLASS'); }
|
(indent=INDENT)? (
|
||||||
<CLASS>.+ { this.popState();return 'CLASS'; }
|
node=Node | // A node in the mindmap
|
||||||
<CLASS>\n { this.popState();}
|
icon=IconDecoration | // Icon decoration for a node
|
||||||
// [\s]*"::icon(" { this.begin('ICON'); }
|
cssClass=ClassDecoration // CSS class for a node
|
||||||
"::icon(" { yy.getLogger().trace('Begin icon');this.begin('ICON'); }
|
) |
|
||||||
[\s]+[\n] {yy.getLogger().trace('SPACELINE');return 'SPACELINE' /* skip all whitespace */ ;}
|
SPACELINE; // Empty or comment lines
|
||||||
[\n]+ return 'NL';
|
|
||||||
<ICON>[^\)]+ { return 'ICON'; }
|
|
||||||
<ICON>\) {yy.getLogger().trace('end icon');this.popState();}
|
|
||||||
"-)" { yy.getLogger().trace('Exploding node'); this.begin('NODE');return 'NODE_DSTART'; }
|
|
||||||
"(-" { yy.getLogger().trace('Cloud'); this.begin('NODE');return 'NODE_DSTART'; }
|
|
||||||
"))" { yy.getLogger().trace('Explosion Bang'); this.begin('NODE');return 'NODE_DSTART'; }
|
|
||||||
")" { yy.getLogger().trace('Cloud Bang'); this.begin('NODE');return 'NODE_DSTART'; }
|
|
||||||
"((" { this.begin('NODE');return 'NODE_DSTART'; }
|
|
||||||
"{{" { this.begin('NODE');return 'NODE_DSTART'; }
|
|
||||||
"(" { this.begin('NODE');return 'NODE_DSTART'; }
|
|
||||||
"[" { this.begin('NODE');return 'NODE_DSTART'; }
|
|
||||||
[\s]+ return 'SPACELIST' /* skip all whitespace */ ;
|
|
||||||
// !(-\() return 'NODE_ID';
|
|
||||||
[^\(\[\n\)\{\}]+ return 'NODE_ID';
|
|
||||||
<<EOF>> return 'EOF';
|
|
||||||
<NODE>["][`] { this.begin("NSTR2");}
|
|
||||||
<NSTR2>[^`"]+ { return "NODE_DESCR";}
|
|
||||||
<NSTR2>[`]["] { this.popState();}
|
|
||||||
<NODE>["] { yy.getLogger().trace('Starting NSTR');this.begin("NSTR");}
|
|
||||||
<NSTR>[^"]+ { yy.getLogger().trace('description:', yytext); return "NODE_DESCR";}
|
|
||||||
<NSTR>["] {this.popState();}
|
|
||||||
<NODE>[\)]\) {this.popState();yy.getLogger().trace('node end ))');return "NODE_DEND";}
|
|
||||||
<NODE>[\)] {this.popState();yy.getLogger().trace('node end )');return "NODE_DEND";}
|
|
||||||
<NODE>[\]] {this.popState();yy.getLogger().trace('node end ...',yytext);return "NODE_DEND";}
|
|
||||||
<NODE>"}}" {this.popState();yy.getLogger().trace('node end ((');return "NODE_DEND";}
|
|
||||||
<NODE>"(-" {this.popState();yy.getLogger().trace('node end (-');return "NODE_DEND";}
|
|
||||||
<NODE>"-)" {this.popState();yy.getLogger().trace('node end (-');return "NODE_DEND";}
|
|
||||||
<NODE>"((" {this.popState();yy.getLogger().trace('node end ((');return "NODE_DEND";}
|
|
||||||
<NODE>"(" {this.popState();yy.getLogger().trace('node end ((');return "NODE_DEND";}
|
|
||||||
<NODE>[^\)\]\(\}]+ { yy.getLogger().trace('Long description:', yytext); return 'NODE_DESCR';}
|
|
||||||
<NODE>.+(?!\(\() { yy.getLogger().trace('Long description:', yytext); return 'NODE_DESCR';}
|
|
||||||
// [\[] return 'NODE_START';
|
|
||||||
// .+ return 'TXT' ;
|
|
||||||
|
|
||||||
/lex
|
// A node can be either simple (just ID) or complex (with description)
|
||||||
|
Node:
|
||||||
|
SimpleNode | ComplexNode;
|
||||||
|
|
||||||
%start start
|
// Simple node is just an identifier
|
||||||
|
SimpleNode:
|
||||||
|
id=NODE_ID;
|
||||||
|
|
||||||
%% /* language grammar */
|
// Complex node has a description enclosed in brackets, parentheses, etc.
|
||||||
|
ComplexNode:
|
||||||
|
// Optional ID followed by a description with delimiters
|
||||||
|
(id=NODE_ID)? start=NODE_DSTART description=NODE_DESCR end=NODE_DEND;
|
||||||
|
|
||||||
start
|
// Icon decoration for nodes
|
||||||
// %{ : info document 'EOF' { return yy; } }
|
IconDecoration:
|
||||||
: mindMap
|
'::icon(' name=ICON ')';
|
||||||
| spaceLines mindMap
|
|
||||||
;
|
|
||||||
|
|
||||||
spaceLines
|
// CSS class decoration for nodes
|
||||||
: SPACELINE
|
ClassDecoration:
|
||||||
| spaceLines SPACELINE
|
':::' name=CLASS;
|
||||||
| spaceLines NL
|
|
||||||
;
|
|
||||||
|
|
||||||
mindMap
|
// Hidden terminal rules (comments, whitespace that should be ignored during parsing)
|
||||||
: MINDMAP document { return yy; }
|
hidden terminal WS: /[ \t]+/;
|
||||||
| MINDMAP NL document { return yy; }
|
|
||||||
;
|
|
||||||
|
|
||||||
stop
|
// Terminal rules (lexer rules)
|
||||||
: NL {yy.getLogger().trace('Stop NL ');}
|
terminal INDENT: /[ \t]+/;
|
||||||
| EOF {yy.getLogger().trace('Stop EOF ');}
|
terminal SPACELINE: /\s*\%\%.*|[ \t]+\n/;
|
||||||
| SPACELINE
|
terminal NL: /\n+/;
|
||||||
| stop NL {yy.getLogger().trace('Stop NL2 ');}
|
terminal EOF: /$/;
|
||||||
| stop EOF {yy.getLogger().trace('Stop EOF2 ');}
|
|
||||||
;
|
|
||||||
document
|
|
||||||
: document statement stop
|
|
||||||
| statement stop
|
|
||||||
;
|
|
||||||
|
|
||||||
statement
|
// Node related terminals with refined regex patterns to match the jison lexer
|
||||||
: SPACELIST node { yy.getLogger().info('Node: ',$2.id);yy.addNode($1.length, $2.id, $2.descr, $2.type); }
|
terminal NODE_ID: /[^\(\[\n\)\{\}]+/;
|
||||||
| SPACELIST ICON { yy.getLogger().trace('Icon: ',$2);yy.decorateNode({icon: $2}); }
|
terminal NODE_DSTART: /\(\(|\{\{|\(|\[|\-\)|\(\-|\)\)|\)/;
|
||||||
| SPACELIST CLASS { yy.decorateNode({class: $2}); }
|
terminal NODE_DEND: /\)\)|\}\}|\)|\]|\(\-|\-\)|\(\(/;
|
||||||
| SPACELINE { yy.getLogger().trace('SPACELIST');}
|
terminal NODE_DESCR: /[^"\)`\]]+/;
|
||||||
| node { yy.getLogger().trace('Node: ',$1.id);yy.addNode(0, $1.id, $1.descr, $1.type); }
|
terminal ICON: /[^\)]+/;
|
||||||
| ICON { yy.decorateNode({icon: $1}); }
|
terminal CLASS: /[^\n]+/;
|
||||||
| CLASS { yy.decorateNode({class: $1}); }
|
|
||||||
| SPACELIST
|
|
||||||
;
|
|
||||||
|
|
||||||
|
// We also need to implement these semantic actions from the jison grammar:
|
||||||
|
// - addNode(level, id, description, type)
|
||||||
|
// - decorateNode({icon: iconName})
|
||||||
|
// - decorateNode({class: className})
|
||||||
|
// - getType(startDelimiter, endDelimiter)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface for a MindmapNode.
|
||||||
|
* This represents the AST node for a mindmap node.
|
||||||
|
*/
|
||||||
|
interface MindmapNode {
|
||||||
|
id: string;
|
||||||
|
description?: string;
|
||||||
|
type: NodeType;
|
||||||
|
level: number; // Indentation level (derived from the INDENT token)
|
||||||
|
icon?: string;
|
||||||
|
cssClass?: string;
|
||||||
|
children?: MindmapNode[];
|
||||||
|
}
|
||||||
|
|
||||||
node
|
/**
|
||||||
:nodeWithId
|
* The different node types in mindmap based on delimiters.
|
||||||
|nodeWithoutId
|
* This corresponds to the yy.getType() function in the jison grammar.
|
||||||
;
|
*/
|
||||||
|
type NodeType = 'DEFAULT' | 'CIRCLE' | 'CLOUD' | 'BANG' | 'HEXAGON' | 'ROUND';
|
||||||
nodeWithoutId
|
|
||||||
: NODE_DSTART NODE_DESCR NODE_DEND
|
|
||||||
{ yy.getLogger().trace("node found ..", $1); $$ = { id: $2, descr: $2, type: yy.getType($1, $3) }; }
|
|
||||||
;
|
|
||||||
|
|
||||||
nodeWithId
|
|
||||||
: NODE_ID { $$ = { id: $1, descr: $1, type: yy.nodeType.DEFAULT }; }
|
|
||||||
| NODE_ID NODE_DSTART NODE_DESCR NODE_DEND
|
|
||||||
{ yy.getLogger().trace("node found ..", $1); $$ = { id: $1, descr: $3, type: yy.getType($2, $4) }; }
|
|
||||||
;
|
|
||||||
%%
|
|
||||||
|
@@ -4,32 +4,58 @@
|
|||||||
{
|
{
|
||||||
"id": "info",
|
"id": "info",
|
||||||
"grammar": "src/language/info/info.langium",
|
"grammar": "src/language/info/info.langium",
|
||||||
"fileExtensions": [".mmd", ".mermaid"]
|
"fileExtensions": [
|
||||||
|
".mmd",
|
||||||
|
".mermaid"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "packet",
|
"id": "packet",
|
||||||
"grammar": "src/language/packet/packet.langium",
|
"grammar": "src/language/packet/packet.langium",
|
||||||
"fileExtensions": [".mmd", ".mermaid"]
|
"fileExtensions": [
|
||||||
|
".mmd",
|
||||||
|
".mermaid"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "pie",
|
"id": "pie",
|
||||||
"grammar": "src/language/pie/pie.langium",
|
"grammar": "src/language/pie/pie.langium",
|
||||||
"fileExtensions": [".mmd", ".mermaid"]
|
"fileExtensions": [
|
||||||
|
".mmd",
|
||||||
|
".mermaid"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "architecture",
|
"id": "architecture",
|
||||||
"grammar": "src/language/architecture/architecture.langium",
|
"grammar": "src/language/architecture/architecture.langium",
|
||||||
"fileExtensions": [".mmd", ".mermaid"]
|
"fileExtensions": [
|
||||||
|
".mmd",
|
||||||
|
".mermaid"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "gitGraph",
|
"id": "gitGraph",
|
||||||
"grammar": "src/language/gitGraph/gitGraph.langium",
|
"grammar": "src/language/gitGraph/gitGraph.langium",
|
||||||
"fileExtensions": [".mmd", ".mermaid"]
|
"fileExtensions": [
|
||||||
|
".mmd",
|
||||||
|
".mermaid"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "radar",
|
"id": "radar",
|
||||||
"grammar": "src/language/radar/radar.langium",
|
"grammar": "src/language/radar/radar.langium",
|
||||||
"fileExtensions": [".mmd", ".mermaid"]
|
"fileExtensions": [
|
||||||
|
".mmd",
|
||||||
|
".mermaid"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "mindmap",
|
||||||
|
"grammar": "src/language/mindmap/mindmap.langium",
|
||||||
|
"fileExtensions": [
|
||||||
|
".mmd",
|
||||||
|
".mermaid"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"mode": "production",
|
"mode": "production",
|
||||||
|
@@ -11,6 +11,7 @@ export {
|
|||||||
Branch,
|
Branch,
|
||||||
Commit,
|
Commit,
|
||||||
Merge,
|
Merge,
|
||||||
|
MindmapDoc as Mindmap,
|
||||||
Statement,
|
Statement,
|
||||||
isInfo,
|
isInfo,
|
||||||
isPacket,
|
isPacket,
|
||||||
@@ -32,6 +33,7 @@ export {
|
|||||||
ArchitectureGeneratedModule,
|
ArchitectureGeneratedModule,
|
||||||
GitGraphGeneratedModule,
|
GitGraphGeneratedModule,
|
||||||
RadarGeneratedModule,
|
RadarGeneratedModule,
|
||||||
|
MindmapGeneratedModule,
|
||||||
} from './generated/module.js';
|
} from './generated/module.js';
|
||||||
|
|
||||||
export * from './gitGraph/index.js';
|
export * from './gitGraph/index.js';
|
||||||
|
@@ -1,49 +1,64 @@
|
|||||||
grammar MindMap
|
/**
|
||||||
|
* Mindmap grammar for Langium
|
||||||
|
* Converted from mermaid's jison grammar
|
||||||
|
*/
|
||||||
|
grammar Mindmap
|
||||||
|
|
||||||
import 'Terminals'
|
entry MindmapDoc:
|
||||||
|
MINDMAP_KEYWORD (newline=NL)?
|
||||||
entry Diagram:
|
(statements+=Statement)*;
|
||||||
keyword='mindmap'
|
|
||||||
statements+=Statement*;
|
|
||||||
|
|
||||||
Statement:
|
Statement:
|
||||||
RootNode | Node;
|
(indent=INDENTATION)? element=Element (terminator=NL)?;
|
||||||
|
|
||||||
RootNode:
|
Element:
|
||||||
root=Text (child=Node)?;
|
Node | IconDecoration | ClassDecoration;
|
||||||
|
|
||||||
Node:
|
Node:
|
||||||
depth=DEPTH text=Text (child=Node)?;
|
ComplexNode | SimpleNode;
|
||||||
|
|
||||||
terminal DEPTH:
|
SimpleNode:
|
||||||
/\t+/ | / {2,}/;
|
id=ID
|
||||||
|
// Ensure it does not match the structure of a ComplexNode
|
||||||
|
(NL | INDENTATION)?;
|
||||||
|
|
||||||
Text:
|
ComplexNode:
|
||||||
content=TEXT_CONTENT shorthand=SHORTHAND?;
|
(id=ID)?
|
||||||
|
start=(LPAREN|LBRACKET|LCURLY|START_CLOUD|DOUBLE_PAREN)
|
||||||
|
desc=(ID|STRING)
|
||||||
|
end=(RPAREN|RBRACKET|RCURLY|END_CLOUD|DOUBLE_PAREN);
|
||||||
|
|
||||||
terminal TEXT_CONTENT:
|
IconDecoration:
|
||||||
/[^\n\r\[\]]+/;
|
ICON_KEYWORD content=(ID|STRING) RPAREN;
|
||||||
|
|
||||||
terminal SHORTHAND:
|
ClassDecoration:
|
||||||
/\[[^\]]*\]/;
|
CLASS_KEYWORD content=(ID|STRING);
|
||||||
|
|
||||||
terminal ICON:
|
// Keywords with fixed text patterns
|
||||||
/::icon\([^\)]+\)/;
|
terminal MINDMAP_KEYWORD: 'mindmap';
|
||||||
|
terminal ICON_KEYWORD: '::icon(';
|
||||||
|
terminal CLASS_KEYWORD: ':::';
|
||||||
|
|
||||||
terminal CLASSNAME:
|
// Delimiters - using unique string literals
|
||||||
/:::[^\n]*/;
|
terminal LPAREN: '(';
|
||||||
|
terminal RPAREN: ')';
|
||||||
|
terminal DOUBLE_PAREN: '((' | '))'; // Combined to avoid regex conflicts
|
||||||
|
terminal LBRACKET: '[';
|
||||||
|
terminal RBRACKET: ']';
|
||||||
|
terminal LCURLY: '{{';
|
||||||
|
terminal RCURLY: '}}';
|
||||||
|
terminal START_CLOUD: '(-';
|
||||||
|
terminal END_CLOUD: '-)';
|
||||||
|
|
||||||
hidden terminal WS:
|
// Basic token types
|
||||||
/\s+/;
|
terminal ID: /[a-zA-Z0-9_\-\.\/]+/;
|
||||||
|
terminal STRING: /"[^"]*"|'[^']*'/;
|
||||||
|
terminal INDENTATION: /[ \t]{2,}/; // Two or more spaces/tabs for indentation
|
||||||
|
terminal NL: /\r?\n/;
|
||||||
|
|
||||||
hidden terminal NEWLINE:
|
// Hidden tokens
|
||||||
/\r?\n/;
|
hidden terminal WS: /[ \t]/; // Single space or tab for hidden whitespace
|
||||||
|
hidden terminal ML_COMMENT: /\%\%[^\n]*/;
|
||||||
|
|
||||||
hidden terminal ML_COMMENT:
|
// Type definition for node types
|
||||||
/\/\*[\s\S]*?\*\//;
|
type NodeType = 'DEFAULT' | 'CIRCLE' | 'CLOUD' | 'BANG' | 'HEXAGON' | 'ROUND';
|
||||||
|
|
||||||
hidden terminal SL_COMMENT:
|
|
||||||
/\/\/[^\n\r]*/;
|
|
||||||
|
|
||||||
hidden terminal DIRECTIVE:
|
|
||||||
/%%[^\n\r]*/;
|
|
||||||
|
@@ -1,56 +0,0 @@
|
|||||||
grammar GitGraph
|
|
||||||
import "../common/common";
|
|
||||||
import "reference";
|
|
||||||
|
|
||||||
entry GitGraph:
|
|
||||||
NEWLINE*
|
|
||||||
('gitGraph' | 'gitGraph' ':' | 'gitGraph:' | ('gitGraph' Direction ':'))
|
|
||||||
(
|
|
||||||
NEWLINE
|
|
||||||
| TitleAndAccessibilities
|
|
||||||
| statements+=Statement
|
|
||||||
)*
|
|
||||||
;
|
|
||||||
|
|
||||||
Statement
|
|
||||||
: Commit
|
|
||||||
| Branch
|
|
||||||
| Merge
|
|
||||||
| Checkout
|
|
||||||
| CherryPicking
|
|
||||||
;
|
|
||||||
|
|
||||||
Direction:
|
|
||||||
dir=('LR' | 'TB' | 'BT');
|
|
||||||
|
|
||||||
Commit:
|
|
||||||
'commit'
|
|
||||||
(
|
|
||||||
'id:' id=STRING
|
|
||||||
|'msg:'? message=STRING
|
|
||||||
|'tag:' tags+=STRING
|
|
||||||
|'type:' type=('NORMAL' | 'REVERSE' | 'HIGHLIGHT')
|
|
||||||
)* EOL;
|
|
||||||
Branch:
|
|
||||||
'branch' name=(REFERENCE|STRING)
|
|
||||||
('order:' order=INT)?
|
|
||||||
EOL;
|
|
||||||
|
|
||||||
Merge:
|
|
||||||
'merge' branch=(REFERENCE|STRING)
|
|
||||||
(
|
|
||||||
'id:' id=STRING
|
|
||||||
|'tag:' tags+=STRING
|
|
||||||
|'type:' type=('NORMAL' | 'REVERSE' | 'HIGHLIGHT')
|
|
||||||
)* EOL;
|
|
||||||
|
|
||||||
Checkout:
|
|
||||||
('checkout'|'switch') branch=(REFERENCE|STRING) EOL;
|
|
||||||
|
|
||||||
CherryPicking:
|
|
||||||
'cherry-pick'
|
|
||||||
(
|
|
||||||
'id:' id=STRING
|
|
||||||
|'tag:' tags+=STRING
|
|
||||||
|'parent:' parent=STRING
|
|
||||||
)* EOL;
|
|
@@ -4,242 +4,74 @@ import type {
|
|||||||
LangiumSharedCoreServices,
|
LangiumSharedCoreServices,
|
||||||
Module,
|
Module,
|
||||||
PartialLangiumCoreServices,
|
PartialLangiumCoreServices,
|
||||||
LanguageMetaData,
|
|
||||||
Grammar,
|
|
||||||
} from 'langium';
|
} from 'langium';
|
||||||
import {
|
import {
|
||||||
inject,
|
EmptyFileSystem,
|
||||||
createDefaultCoreModule,
|
createDefaultCoreModule,
|
||||||
createDefaultSharedCoreModule,
|
createDefaultSharedCoreModule,
|
||||||
EmptyFileSystem,
|
inject,
|
||||||
loadGrammarFromJson,
|
|
||||||
} from 'langium';
|
} from 'langium';
|
||||||
|
|
||||||
|
import { MermaidGeneratedSharedModule, MindmapGeneratedModule } from '../generated/module.js';
|
||||||
|
import { MindmapTokenBuilder } from './tokenBuilder.js';
|
||||||
import { CommonValueConverter } from '../common/valueConverter.js';
|
import { CommonValueConverter } from '../common/valueConverter.js';
|
||||||
import { MermaidGeneratedSharedModule } from '../generated/module.js';
|
|
||||||
import { MindMapTokenBuilder } from './tokenBuilder.js';
|
|
||||||
|
|
||||||
export const MindMapLanguageMetaData: LanguageMetaData = {
|
/**
|
||||||
languageId: 'mindmap',
|
* Declaration of `Mindmap` services.
|
||||||
fileExtensions: ['.mmd', '.mermaid'],
|
*/
|
||||||
caseInsensitive: false,
|
interface MindmapAddedServices {
|
||||||
mode: 'production',
|
|
||||||
};
|
|
||||||
|
|
||||||
// Define a minimal grammar directly in JSON format
|
|
||||||
let loadedMindMapGrammar: Grammar | undefined;
|
|
||||||
export const MindMapGrammar = (): Grammar =>
|
|
||||||
loadedMindMapGrammar ??
|
|
||||||
(loadedMindMapGrammar = loadGrammarFromJson(`{
|
|
||||||
"$type": "Grammar",
|
|
||||||
"isDeclared": true,
|
|
||||||
"name": "MindMap",
|
|
||||||
"imports": [],
|
|
||||||
"rules": [
|
|
||||||
{
|
|
||||||
"$type": "ParserRule",
|
|
||||||
"entry": true,
|
|
||||||
"name": "Diagram",
|
|
||||||
"definition": {
|
|
||||||
"$type": "Group",
|
|
||||||
"elements": [
|
|
||||||
{
|
|
||||||
"$type": "Assignment",
|
|
||||||
"feature": "keyword",
|
|
||||||
"operator": "=",
|
|
||||||
"terminal": {
|
|
||||||
"$type": "Keyword",
|
|
||||||
"value": "mindmap"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "Assignment",
|
|
||||||
"feature": "statements",
|
|
||||||
"operator": "+=",
|
|
||||||
"terminal": {
|
|
||||||
"$type": "Alternatives",
|
|
||||||
"elements": [
|
|
||||||
{
|
|
||||||
"$type": "RuleCall",
|
|
||||||
"rule": {"$ref": "#/rules@1"},
|
|
||||||
"arguments": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "RuleCall",
|
|
||||||
"rule": {"$ref": "#/rules@2"},
|
|
||||||
"arguments": []
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"cardinality": "*"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"definesHiddenTokens": false,
|
|
||||||
"fragment": false,
|
|
||||||
"hiddenTokens": [],
|
|
||||||
"parameters": [],
|
|
||||||
"wildcard": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "ParserRule",
|
|
||||||
"name": "RootNode",
|
|
||||||
"definition": {
|
|
||||||
"$type": "Group",
|
|
||||||
"elements": [
|
|
||||||
{
|
|
||||||
"$type": "Assignment",
|
|
||||||
"feature": "content",
|
|
||||||
"operator": "=",
|
|
||||||
"terminal": {
|
|
||||||
"$type": "RuleCall",
|
|
||||||
"rule": {"$ref": "#/rules@3"},
|
|
||||||
"arguments": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"definesHiddenTokens": false,
|
|
||||||
"entry": false,
|
|
||||||
"fragment": false,
|
|
||||||
"hiddenTokens": [],
|
|
||||||
"parameters": [],
|
|
||||||
"wildcard": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "ParserRule",
|
|
||||||
"name": "ChildNode",
|
|
||||||
"definition": {
|
|
||||||
"$type": "Group",
|
|
||||||
"elements": [
|
|
||||||
{
|
|
||||||
"$type": "Assignment",
|
|
||||||
"feature": "depth",
|
|
||||||
"operator": "=",
|
|
||||||
"terminal": {
|
|
||||||
"$type": "RuleCall",
|
|
||||||
"rule": {"$ref": "#/rules@4"},
|
|
||||||
"arguments": []
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "Assignment",
|
|
||||||
"feature": "content",
|
|
||||||
"operator": "=",
|
|
||||||
"terminal": {
|
|
||||||
"$type": "RuleCall",
|
|
||||||
"rule": {"$ref": "#/rules@3"},
|
|
||||||
"arguments": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"definesHiddenTokens": false,
|
|
||||||
"entry": false,
|
|
||||||
"fragment": false,
|
|
||||||
"hiddenTokens": [],
|
|
||||||
"parameters": [],
|
|
||||||
"wildcard": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "TerminalRule",
|
|
||||||
"name": "WORD",
|
|
||||||
"type": {"$type": "ReturnType", "name": "string"},
|
|
||||||
"definition": {
|
|
||||||
"$type": "RegexToken",
|
|
||||||
"regex": "/[a-zA-Z0-9_-]+/"
|
|
||||||
},
|
|
||||||
"fragment": false,
|
|
||||||
"hidden": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "TerminalRule",
|
|
||||||
"name": "INDENT",
|
|
||||||
"type": {"$type": "ReturnType", "name": "string"},
|
|
||||||
"definition": {
|
|
||||||
"$type": "RegexToken",
|
|
||||||
"regex": "/(?:\\\\t+| {2,})/"
|
|
||||||
},
|
|
||||||
"fragment": false,
|
|
||||||
"hidden": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "TerminalRule",
|
|
||||||
"name": "WS",
|
|
||||||
"definition": {
|
|
||||||
"$type": "RegexToken",
|
|
||||||
"regex": "/\\\\s+/"
|
|
||||||
},
|
|
||||||
"fragment": false,
|
|
||||||
"hidden": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "TerminalRule",
|
|
||||||
"name": "NL",
|
|
||||||
"definition": {
|
|
||||||
"$type": "RegexToken",
|
|
||||||
"regex": "/\\\\r?\\\\n/"
|
|
||||||
},
|
|
||||||
"fragment": false,
|
|
||||||
"hidden": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "TerminalRule",
|
|
||||||
"name": "ML_COMMENT",
|
|
||||||
"definition": {
|
|
||||||
"$type": "RegexToken",
|
|
||||||
"regex": "/\\\\/\\\\*[\\\\s\\\\S]*?\\\\*\\\\//"
|
|
||||||
},
|
|
||||||
"fragment": false,
|
|
||||||
"hidden": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$type": "TerminalRule",
|
|
||||||
"name": "SL_COMMENT",
|
|
||||||
"definition": {
|
|
||||||
"$type": "RegexToken",
|
|
||||||
"regex": "/(?:%+|\\\\/{2,})[^\\\\n\\\\r]*/"
|
|
||||||
},
|
|
||||||
"fragment": false,
|
|
||||||
"hidden": true
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"definesHiddenTokens": false,
|
|
||||||
"hiddenTokens": [],
|
|
||||||
"interfaces": [],
|
|
||||||
"types": [],
|
|
||||||
"usedGrammars": []
|
|
||||||
}`));
|
|
||||||
|
|
||||||
interface MindMapAddedServices {
|
|
||||||
parser: {
|
parser: {
|
||||||
TokenBuilder: MindMapTokenBuilder;
|
TokenBuilder: MindmapTokenBuilder;
|
||||||
ValueConverter: CommonValueConverter;
|
ValueConverter: CommonValueConverter;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export type MindMapServices = LangiumCoreServices & MindMapAddedServices;
|
/**
|
||||||
|
* Union of Langium default services and `Mindmap` services.
|
||||||
|
*/
|
||||||
|
export type MindmapServices = LangiumCoreServices & MindmapAddedServices;
|
||||||
|
|
||||||
export const MindMapModule: Module<
|
/**
|
||||||
MindMapServices,
|
* Dependency injection module that overrides Langium default services and
|
||||||
PartialLangiumCoreServices & MindMapAddedServices
|
* contributes the declared `Mindmap` services.
|
||||||
|
*/
|
||||||
|
export const MindmapModule: Module<
|
||||||
|
MindmapServices,
|
||||||
|
PartialLangiumCoreServices & MindmapAddedServices
|
||||||
> = {
|
> = {
|
||||||
parser: {
|
parser: {
|
||||||
TokenBuilder: () => new MindMapTokenBuilder(),
|
TokenBuilder: () => new MindmapTokenBuilder(),
|
||||||
ValueConverter: () => new CommonValueConverter(),
|
ValueConverter: () => new CommonValueConverter(),
|
||||||
},
|
},
|
||||||
Grammar: MindMapGrammar,
|
|
||||||
LanguageMetaData: () => MindMapLanguageMetaData,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export function createMindMapServices(context: DefaultSharedCoreModuleContext = EmptyFileSystem): {
|
/**
|
||||||
|
* Create the full set of services required by Langium.
|
||||||
|
*
|
||||||
|
* First inject the shared services by merging two modules:
|
||||||
|
* - Langium default shared services
|
||||||
|
* - Services generated by langium-cli
|
||||||
|
*
|
||||||
|
* Then inject the language-specific services by merging three modules:
|
||||||
|
* - Langium default language-specific services
|
||||||
|
* - Services generated by langium-cli
|
||||||
|
* - Services specified in this file
|
||||||
|
* @param context - Optional module context with the LSP connection
|
||||||
|
* @returns An object wrapping the shared services and the language-specific services
|
||||||
|
*/
|
||||||
|
export function createMindmapServices(context: DefaultSharedCoreModuleContext = EmptyFileSystem): {
|
||||||
shared: LangiumSharedCoreServices;
|
shared: LangiumSharedCoreServices;
|
||||||
MindMap: MindMapServices;
|
Mindmap: MindmapServices;
|
||||||
} {
|
} {
|
||||||
const shared: LangiumSharedCoreServices = inject(
|
const shared: LangiumSharedCoreServices = inject(
|
||||||
createDefaultSharedCoreModule(context),
|
createDefaultSharedCoreModule(context),
|
||||||
MermaidGeneratedSharedModule
|
MermaidGeneratedSharedModule
|
||||||
);
|
);
|
||||||
const MindMap: MindMapServices = inject(createDefaultCoreModule({ shared }), MindMapModule);
|
const Mindmap: MindmapServices = inject(
|
||||||
shared.ServiceRegistry.register(MindMap);
|
createDefaultCoreModule({ shared }),
|
||||||
return { shared, MindMap };
|
MindmapGeneratedModule,
|
||||||
|
MindmapModule
|
||||||
|
);
|
||||||
|
shared.ServiceRegistry.register(Mindmap);
|
||||||
|
return { shared, Mindmap };
|
||||||
}
|
}
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import { AbstractMermaidTokenBuilder } from '../common/index.js';
|
import { AbstractMermaidTokenBuilder } from '../common/index.js';
|
||||||
|
|
||||||
export class MindMapTokenBuilder extends AbstractMermaidTokenBuilder {
|
export class MindmapTokenBuilder extends AbstractMermaidTokenBuilder {
|
||||||
public constructor() {
|
public constructor() {
|
||||||
super(['mindmap']);
|
super(['mindmap']);
|
||||||
}
|
}
|
||||||
|
7
packages/parser/tests/mindmap-debug.js
Normal file
7
packages/parser/tests/mindmap-debug.js
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
// Debug file to print the structure of a parsed mindmap
|
||||||
|
import { mindMapParse } from './test-util.js';
|
||||||
|
|
||||||
|
const result = mindMapParse('mindmap\nroot\n child1\n child2');
|
||||||
|
|
||||||
|
console.log('Parser result:', result.value);
|
||||||
|
console.log('Statement structure:', JSON.stringify(result.value?.statements[0], null, 2));
|
@@ -1,5 +1,6 @@
|
|||||||
import { describe, expect, it } from 'vitest';
|
import { describe, expect, it } from 'vitest';
|
||||||
import { mindMapParse as parse } from './test-util.js';
|
import { mindmapParse as parse } from './test-util.js';
|
||||||
|
import { keys } from '../../mermaid-flowchart-elk/dist/packages/mermaid/src/diagrams/state/id-cache';
|
||||||
|
|
||||||
// Tests for mindmap parser with simple root and child nodes
|
// Tests for mindmap parser with simple root and child nodes
|
||||||
describe('MindMap Parser Tests', () => {
|
describe('MindMap Parser Tests', () => {
|
||||||
@@ -30,8 +31,9 @@ describe('MindMap Parser Tests', () => {
|
|||||||
expect(rootNode.content).toBe('root');
|
expect(rootNode.content).toBe('root');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should parse a mindmap with child nodes', () => {
|
it.only('should parse a mindmap with child nodes', () => {
|
||||||
const _result = parse(
|
console.log('BEFORE RESULT:');
|
||||||
|
const result = parse(
|
||||||
'mindmap\nroot((Root))\n child1((Child 1))\n child2((Child 2))\n grandchild((Grand Child))'
|
'mindmap\nroot((Root))\n child1((Child 1))\n child2((Child 2))\n grandchild((Grand Child))'
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -40,28 +42,49 @@ describe('MindMap Parser Tests', () => {
|
|||||||
// Statements length: result.value?.statements?.length
|
// Statements length: result.value?.statements?.length
|
||||||
// If statements exist, they would have properties like id, type, text, depth
|
// If statements exist, they would have properties like id, type, text, depth
|
||||||
|
|
||||||
|
const statements = result.value.statements;
|
||||||
|
const s0 = statements[0];
|
||||||
|
const s1 = statements[1];
|
||||||
|
|
||||||
|
console.debug('Statements:', s0);
|
||||||
|
|
||||||
|
expect(result.value.statements[0].$type).toBe('Statement');
|
||||||
|
// expect(result.value.statements[0].element.$type).toBe('ComplexNode');
|
||||||
|
expect(s0.element.$type).toBe('ComplexNode');
|
||||||
|
expect(Object.keys(s0)).toBe('Root');
|
||||||
|
expect(s0.element.ID).toBe('Root');
|
||||||
|
|
||||||
|
expect(result.value.statements[1].$type).toBe('Statement');
|
||||||
|
expect(result.value.statements[1].element.$type).toBe('ComplexNode');
|
||||||
|
expect(result.value.statements[1].element.ID).toBe('Root');
|
||||||
|
expect(result.value.statements[1].element.desc).toBe('Root');
|
||||||
|
expect(Object.keys(result.value.statements[1].element)).toBe('root');
|
||||||
|
expect(result.value.statements[1].indent).toBe('indent');
|
||||||
|
expect(Object.keys(result.value.statements[1].element)).toBe(true);
|
||||||
|
expect(result.value.statements[1].element.id).toBe('SimpleNode');
|
||||||
|
|
||||||
// Temporarily commenting out failing assertions
|
// Temporarily commenting out failing assertions
|
||||||
// expect(result.successful).toBe(true);
|
// expect(result.successful).toBe(true);
|
||||||
// // Check that there are 4 statements: mindmap, root, child1, child2, grandchild
|
// Check that there are 4 statements: mindmap, root, child1, child2, grandchild
|
||||||
// expect(result.value.statements.length).toBe(5);
|
expect(result.value.statements.length).toBe(5);
|
||||||
// // Check that the first statement is the mindmap
|
// Check that the first statement is the mindmap
|
||||||
// expect(result.value.statements[0].type).toBe('mindmap');
|
expect(result.value.statements[0].type).toBe('mindmap');
|
||||||
// // Check that the second statement is the root
|
// Check that the second statement is the root
|
||||||
// expect(result.value.statements[1].type.type).toBe('circle');
|
expect(result.value.statements[1].type.type).toBe('circle');
|
||||||
// expect(result.value.statements[1].text).toBe('Root');
|
expect(result.value.statements[1].text).toBe('Root');
|
||||||
// expect(result.value.statements[1].depth).toBe(0);
|
expect(result.value.statements[1].depth).toBe(0);
|
||||||
// // Check that the third statement is the first child
|
// Check that the third statement is the first child
|
||||||
// expect(result.value.statements[2].type.type).toBe('circle');
|
expect(result.value.statements[2].type.type).toBe('circle');
|
||||||
// expect(result.value.statements[2].text).toBe('Child 1');
|
expect(result.value.statements[2].text).toBe('Child 1');
|
||||||
// expect(result.value.statements[2].depth).toBe(1);
|
expect(result.value.statements[2].depth).toBe(1);
|
||||||
// // Check that the fourth statement is the second child
|
// Check that the fourth statement is the second child
|
||||||
// expect(result.value.statements[3].type.type).toBe('circle');
|
expect(result.value.statements[3].type.type).toBe('circle');
|
||||||
// expect(result.value.statements[3].text).toBe('Child 2');
|
expect(result.value.statements[3].text).toBe('Child 2');
|
||||||
// expect(result.value.statements[3].depth).toBe(1);
|
expect(result.value.statements[3].depth).toBe(1);
|
||||||
// // Check that the fifth statement is the grandchild
|
// Check that the fifth statement is the grandchild
|
||||||
// expect(result.value.statements[4].type.type).toBe('circle');
|
expect(result.value.statements[4].type.type).toBe('circle');
|
||||||
// expect(result.value.statements[4].text).toBe('Grand Child');
|
expect(result.value.statements[4].text).toBe('Grand Child');
|
||||||
// expect(result.value.statements[4].depth).toBe(2);
|
expect(result.value.statements[4].depth).toBe(2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
24
packages/parser/tests/test-print-esm.js
Normal file
24
packages/parser/tests/test-print-esm.js
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
// Test file to print the parsed structure of a mindmap
|
||||||
|
import { createMindMapServices } from '../src/language/index.js';
|
||||||
|
|
||||||
|
// Create services
|
||||||
|
const mindMapServices = createMindMapServices().MindMap;
|
||||||
|
const mindMapParser = mindMapServices.parser.LangiumParser;
|
||||||
|
|
||||||
|
// Function to parse mindmap
|
||||||
|
function parse(input) {
|
||||||
|
return mindMapParser.parse(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with a simple mindmap
|
||||||
|
const result = parse('mindmap\nroot\n child1\n child2');
|
||||||
|
|
||||||
|
// Print the result structure
|
||||||
|
console.log('Parse result:');
|
||||||
|
console.log(JSON.stringify(result.value, null, 2));
|
||||||
|
|
||||||
|
// Print the first statement
|
||||||
|
if (result.value?.statements?.[0]) {
|
||||||
|
console.log('\nFirst statement:');
|
||||||
|
console.log(JSON.stringify(result.value.statements[0], null, 2));
|
||||||
|
}
|
27
packages/parser/tests/test-print.js
Normal file
27
packages/parser/tests/test-print.js
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
// Test parsing
|
||||||
|
import { createMindMapServices } from '../lib/language/mindmap/module.js';
|
||||||
|
import { parseDocument } from 'langium';
|
||||||
|
|
||||||
|
// Create services for handling the language
|
||||||
|
const services = createMindMapServices();
|
||||||
|
// Get the service for parsing documents
|
||||||
|
const documentBuilder = services.MindMap.shared.workspace.DocumentBuilder;
|
||||||
|
|
||||||
|
// Sample mindmap text to parse
|
||||||
|
const text = 'mindmap\nroot\n child1\n child2';
|
||||||
|
|
||||||
|
// Parse the document
|
||||||
|
const doc = documentBuilder.buildDocuments([
|
||||||
|
{
|
||||||
|
uri: 'file:///test.mindmap',
|
||||||
|
content: text,
|
||||||
|
version: 1,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get the parsed document
|
||||||
|
const result = Array.isArray(doc) ? doc[0] : undefined;
|
||||||
|
if (result) {
|
||||||
|
console.log('AST:', JSON.stringify(result.parseResult.value, null, 2));
|
||||||
|
console.log('First node:', JSON.stringify(result.parseResult.value.statements?.[0], null, 2));
|
||||||
|
}
|
@@ -13,8 +13,8 @@ import type {
|
|||||||
PacketServices,
|
PacketServices,
|
||||||
GitGraph,
|
GitGraph,
|
||||||
GitGraphServices,
|
GitGraphServices,
|
||||||
MindMap,
|
Mindmap,
|
||||||
MindMapServices,
|
MindmapServices,
|
||||||
} from '../src/language/index.js';
|
} from '../src/language/index.js';
|
||||||
import {
|
import {
|
||||||
createArchitectureServices,
|
createArchitectureServices,
|
||||||
@@ -23,7 +23,7 @@ import {
|
|||||||
createRadarServices,
|
createRadarServices,
|
||||||
createPacketServices,
|
createPacketServices,
|
||||||
createGitGraphServices,
|
createGitGraphServices,
|
||||||
createMindMapServices,
|
createMindmapServices,
|
||||||
} from '../src/language/index.js';
|
} from '../src/language/index.js';
|
||||||
|
|
||||||
const consoleMock = vi.spyOn(console, 'log').mockImplementation(() => undefined);
|
const consoleMock = vi.spyOn(console, 'log').mockImplementation(() => undefined);
|
||||||
@@ -108,13 +108,13 @@ export function createGitGraphTestServices() {
|
|||||||
}
|
}
|
||||||
export const gitGraphParse = createGitGraphTestServices().parse;
|
export const gitGraphParse = createGitGraphTestServices().parse;
|
||||||
|
|
||||||
const mindMapServices: MindMapServices = createMindMapServices().MindMap;
|
const mindmapServices: MindmapServices = createMindmapServices().Mindmap;
|
||||||
const mindMapParser: LangiumParser = mindMapServices.parser.LangiumParser;
|
const mindmapParser: LangiumParser = mindmapServices.parser.LangiumParser;
|
||||||
export function createMindMapTestServices() {
|
export function createMindmapTestServices() {
|
||||||
const parse = (input: string) => {
|
const parse = (input: string) => {
|
||||||
return mindMapParser.parse<MindMap>(input);
|
return mindmapParser.parse<Mindmap>(input);
|
||||||
};
|
};
|
||||||
|
|
||||||
return { services: mindMapServices, parse };
|
return { services: mindmapServices, parse };
|
||||||
}
|
}
|
||||||
export const mindMapParse = createMindMapTestServices().parse;
|
export const mindmapParse = createMindmapTestServices().parse;
|
||||||
|
Reference in New Issue
Block a user