mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-09-18 23:09:49 +02:00
flow-chev-edges.spec.js going through
This commit is contained in:
31
debug-edge-parsing.js
Normal file
31
debug-edge-parsing.js
Normal file
@@ -0,0 +1,31 @@
|
||||
import { FlowDB } from './packages/mermaid/src/diagrams/flowchart/flowDb.ts';
|
||||
import flow from './packages/mermaid/src/diagrams/flowchart/parser/flowParserAdapter.ts';
|
||||
|
||||
// Set up the test environment
|
||||
flow.yy = new FlowDB();
|
||||
flow.yy.clear();
|
||||
|
||||
console.log('=== Testing basic edge parsing ===');
|
||||
console.log('Input: "graph TD;A-->B;"');
|
||||
|
||||
try {
|
||||
const result = flow.parse('graph TD;A-->B;');
|
||||
console.log('Parse result:', result);
|
||||
|
||||
const vertices = flow.yy.getVertices();
|
||||
const edges = flow.yy.getEdges();
|
||||
|
||||
console.log('Vertices:', vertices);
|
||||
console.log('Vertices size:', vertices.size);
|
||||
console.log('Vertices keys:', Array.from(vertices.keys()));
|
||||
|
||||
console.log('Edges:', edges);
|
||||
console.log('Edges length:', edges.length);
|
||||
|
||||
// Check specific vertices
|
||||
console.log('Vertex A:', vertices.get('A'));
|
||||
console.log('Vertex B:', vertices.get('B'));
|
||||
} catch (error) {
|
||||
console.error('Parse error:', error);
|
||||
console.error('Error stack:', error.stack);
|
||||
}
|
@@ -59,8 +59,7 @@ const EOF = createToken({
|
||||
// Complex pattern to handle all edge cases including punctuation at start/end
|
||||
const NODE_STRING = createToken({
|
||||
name: 'NODE_STRING',
|
||||
pattern:
|
||||
/\\\w+|\w+\\|&[\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|-[\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|[<>^v][\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|:[\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|,[\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|[\w!"#$%&'*+,./:?\\`](?:[\w!"#$%&'*+,./:?\\`]|-(?![.=-])|\.(?!-))*[\w!"#$%&'*+,./:?\\`-]|[\w!"#$%&'*+,./:?\\`]|&|-|\\|\//,
|
||||
pattern: /[A-Za-z0-9_]+/,
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
@@ -207,42 +206,36 @@ const ShapeDataStart = createToken({
|
||||
const LINK = createToken({
|
||||
name: 'LINK',
|
||||
pattern: /[<ox]?--+[>ox-]/,
|
||||
longer_alt: NODE_STRING,
|
||||
});
|
||||
|
||||
const START_LINK = createToken({
|
||||
name: 'START_LINK',
|
||||
pattern: /[<ox]?--/,
|
||||
push_mode: 'edgeText_mode',
|
||||
longer_alt: NODE_STRING,
|
||||
});
|
||||
|
||||
// Regular thick links without text
|
||||
const THICK_LINK = createToken({
|
||||
name: 'THICK_LINK',
|
||||
pattern: /[<ox]?==+[=>ox-]?/,
|
||||
longer_alt: NODE_STRING,
|
||||
});
|
||||
|
||||
const START_THICK_LINK = createToken({
|
||||
name: 'START_THICK_LINK',
|
||||
pattern: /[<ox]?==/,
|
||||
push_mode: 'thickEdgeText_mode',
|
||||
longer_alt: NODE_STRING,
|
||||
});
|
||||
|
||||
// Regular dotted links without text
|
||||
const DOTTED_LINK = createToken({
|
||||
name: 'DOTTED_LINK',
|
||||
pattern: /[<ox]?-?\.+-[>ox-]?/,
|
||||
longer_alt: NODE_STRING,
|
||||
});
|
||||
|
||||
const START_DOTTED_LINK = createToken({
|
||||
name: 'START_DOTTED_LINK',
|
||||
pattern: /[<ox]?-\./,
|
||||
push_mode: 'dottedEdgeText_mode',
|
||||
longer_alt: NODE_STRING,
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
@@ -295,13 +288,11 @@ const DiamondStart = createToken({
|
||||
const Colon = createToken({
|
||||
name: 'Colon',
|
||||
pattern: /:/,
|
||||
longer_alt: NODE_STRING,
|
||||
});
|
||||
|
||||
const Comma = createToken({
|
||||
name: 'Comma',
|
||||
pattern: /,/,
|
||||
longer_alt: NODE_STRING,
|
||||
});
|
||||
|
||||
const Pipe = createToken({
|
||||
@@ -319,7 +310,6 @@ const Ampersand = createToken({
|
||||
const Minus = createToken({
|
||||
name: 'Minus',
|
||||
pattern: /-/,
|
||||
longer_alt: NODE_STRING,
|
||||
});
|
||||
|
||||
// Additional special character tokens for node IDs - currently unused but kept for future reference
|
||||
@@ -578,6 +568,9 @@ const multiModeLexerDefinition = {
|
||||
// Basic punctuation (must come before NODE_STRING for proper tokenization)
|
||||
Pipe,
|
||||
Ampersand,
|
||||
Minus,
|
||||
Colon,
|
||||
Comma,
|
||||
|
||||
// Node strings and numbers (must come after punctuation)
|
||||
NODE_STRING,
|
||||
|
16
simple-arrow-test.js
Normal file
16
simple-arrow-test.js
Normal file
@@ -0,0 +1,16 @@
|
||||
import { FlowDB } from './packages/mermaid/src/diagrams/flowchart/flowDb.ts';
|
||||
import flow from './packages/mermaid/src/diagrams/flowchart/parser/flowParserAdapter.ts';
|
||||
|
||||
// Set up the test environment
|
||||
flow.yy = new FlowDB();
|
||||
flow.yy.clear();
|
||||
|
||||
console.log('=== Testing simple arrow ===');
|
||||
console.log('Input: "-->"');
|
||||
|
||||
try {
|
||||
const result = flow.parse('-->');
|
||||
console.log('Parse result:', result);
|
||||
} catch (error) {
|
||||
console.error('Parse error:', error.message);
|
||||
}
|
21
test-lexer.mjs
Normal file
21
test-lexer.mjs
Normal file
@@ -0,0 +1,21 @@
|
||||
// Test the actual lexer to see what tokens are generated
|
||||
import { FlowchartLexer } from './packages/mermaid/src/diagrams/flowchart/parser/flowLexer.ts';
|
||||
|
||||
const testInputs = ['A', 'A-->B', 'graph TD;A-->B;', '-->', 'A-', '>B'];
|
||||
|
||||
console.log('Testing actual lexer:');
|
||||
testInputs.forEach((input) => {
|
||||
console.log(`\nInput: "${input}"`);
|
||||
try {
|
||||
const result = FlowchartLexer.tokenize(input);
|
||||
if (result.errors.length > 0) {
|
||||
console.log('Errors:', result.errors);
|
||||
}
|
||||
console.log(
|
||||
'Tokens:',
|
||||
result.tokens.map((t) => [t.image, t.tokenType.name])
|
||||
);
|
||||
} catch (error) {
|
||||
console.log('Error:', error.message);
|
||||
}
|
||||
});
|
Reference in New Issue
Block a user