mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-09-19 15:30:03 +02:00
flow-chev-edges.spec.js going through
This commit is contained in:
31
debug-edge-parsing.js
Normal file
31
debug-edge-parsing.js
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import { FlowDB } from './packages/mermaid/src/diagrams/flowchart/flowDb.ts';
|
||||||
|
import flow from './packages/mermaid/src/diagrams/flowchart/parser/flowParserAdapter.ts';
|
||||||
|
|
||||||
|
// Set up the test environment
|
||||||
|
flow.yy = new FlowDB();
|
||||||
|
flow.yy.clear();
|
||||||
|
|
||||||
|
console.log('=== Testing basic edge parsing ===');
|
||||||
|
console.log('Input: "graph TD;A-->B;"');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = flow.parse('graph TD;A-->B;');
|
||||||
|
console.log('Parse result:', result);
|
||||||
|
|
||||||
|
const vertices = flow.yy.getVertices();
|
||||||
|
const edges = flow.yy.getEdges();
|
||||||
|
|
||||||
|
console.log('Vertices:', vertices);
|
||||||
|
console.log('Vertices size:', vertices.size);
|
||||||
|
console.log('Vertices keys:', Array.from(vertices.keys()));
|
||||||
|
|
||||||
|
console.log('Edges:', edges);
|
||||||
|
console.log('Edges length:', edges.length);
|
||||||
|
|
||||||
|
// Check specific vertices
|
||||||
|
console.log('Vertex A:', vertices.get('A'));
|
||||||
|
console.log('Vertex B:', vertices.get('B'));
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Parse error:', error);
|
||||||
|
console.error('Error stack:', error.stack);
|
||||||
|
}
|
@@ -59,8 +59,7 @@ const EOF = createToken({
|
|||||||
// Complex pattern to handle all edge cases including punctuation at start/end
|
// Complex pattern to handle all edge cases including punctuation at start/end
|
||||||
const NODE_STRING = createToken({
|
const NODE_STRING = createToken({
|
||||||
name: 'NODE_STRING',
|
name: 'NODE_STRING',
|
||||||
pattern:
|
pattern: /[A-Za-z0-9_]+/,
|
||||||
/\\\w+|\w+\\|&[\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|-[\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|[<>^v][\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|:[\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|,[\w!"#$%&'*+,./:?\\`]+[\w!"#$%&'*+,./:?\\`-]*|[\w!"#$%&'*+,./:?\\`](?:[\w!"#$%&'*+,./:?\\`]|-(?![.=-])|\.(?!-))*[\w!"#$%&'*+,./:?\\`-]|[\w!"#$%&'*+,./:?\\`]|&|-|\\|\//,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -207,42 +206,36 @@ const ShapeDataStart = createToken({
|
|||||||
const LINK = createToken({
|
const LINK = createToken({
|
||||||
name: 'LINK',
|
name: 'LINK',
|
||||||
pattern: /[<ox]?--+[>ox-]/,
|
pattern: /[<ox]?--+[>ox-]/,
|
||||||
longer_alt: NODE_STRING,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const START_LINK = createToken({
|
const START_LINK = createToken({
|
||||||
name: 'START_LINK',
|
name: 'START_LINK',
|
||||||
pattern: /[<ox]?--/,
|
pattern: /[<ox]?--/,
|
||||||
push_mode: 'edgeText_mode',
|
push_mode: 'edgeText_mode',
|
||||||
longer_alt: NODE_STRING,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Regular thick links without text
|
// Regular thick links without text
|
||||||
const THICK_LINK = createToken({
|
const THICK_LINK = createToken({
|
||||||
name: 'THICK_LINK',
|
name: 'THICK_LINK',
|
||||||
pattern: /[<ox]?==+[=>ox-]?/,
|
pattern: /[<ox]?==+[=>ox-]?/,
|
||||||
longer_alt: NODE_STRING,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const START_THICK_LINK = createToken({
|
const START_THICK_LINK = createToken({
|
||||||
name: 'START_THICK_LINK',
|
name: 'START_THICK_LINK',
|
||||||
pattern: /[<ox]?==/,
|
pattern: /[<ox]?==/,
|
||||||
push_mode: 'thickEdgeText_mode',
|
push_mode: 'thickEdgeText_mode',
|
||||||
longer_alt: NODE_STRING,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Regular dotted links without text
|
// Regular dotted links without text
|
||||||
const DOTTED_LINK = createToken({
|
const DOTTED_LINK = createToken({
|
||||||
name: 'DOTTED_LINK',
|
name: 'DOTTED_LINK',
|
||||||
pattern: /[<ox]?-?\.+-[>ox-]?/,
|
pattern: /[<ox]?-?\.+-[>ox-]?/,
|
||||||
longer_alt: NODE_STRING,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const START_DOTTED_LINK = createToken({
|
const START_DOTTED_LINK = createToken({
|
||||||
name: 'START_DOTTED_LINK',
|
name: 'START_DOTTED_LINK',
|
||||||
pattern: /[<ox]?-\./,
|
pattern: /[<ox]?-\./,
|
||||||
push_mode: 'dottedEdgeText_mode',
|
push_mode: 'dottedEdgeText_mode',
|
||||||
longer_alt: NODE_STRING,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -295,13 +288,11 @@ const DiamondStart = createToken({
|
|||||||
const Colon = createToken({
|
const Colon = createToken({
|
||||||
name: 'Colon',
|
name: 'Colon',
|
||||||
pattern: /:/,
|
pattern: /:/,
|
||||||
longer_alt: NODE_STRING,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const Comma = createToken({
|
const Comma = createToken({
|
||||||
name: 'Comma',
|
name: 'Comma',
|
||||||
pattern: /,/,
|
pattern: /,/,
|
||||||
longer_alt: NODE_STRING,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const Pipe = createToken({
|
const Pipe = createToken({
|
||||||
@@ -319,7 +310,6 @@ const Ampersand = createToken({
|
|||||||
const Minus = createToken({
|
const Minus = createToken({
|
||||||
name: 'Minus',
|
name: 'Minus',
|
||||||
pattern: /-/,
|
pattern: /-/,
|
||||||
longer_alt: NODE_STRING,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Additional special character tokens for node IDs - currently unused but kept for future reference
|
// Additional special character tokens for node IDs - currently unused but kept for future reference
|
||||||
@@ -578,6 +568,9 @@ const multiModeLexerDefinition = {
|
|||||||
// Basic punctuation (must come before NODE_STRING for proper tokenization)
|
// Basic punctuation (must come before NODE_STRING for proper tokenization)
|
||||||
Pipe,
|
Pipe,
|
||||||
Ampersand,
|
Ampersand,
|
||||||
|
Minus,
|
||||||
|
Colon,
|
||||||
|
Comma,
|
||||||
|
|
||||||
// Node strings and numbers (must come after punctuation)
|
// Node strings and numbers (must come after punctuation)
|
||||||
NODE_STRING,
|
NODE_STRING,
|
||||||
|
16
simple-arrow-test.js
Normal file
16
simple-arrow-test.js
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { FlowDB } from './packages/mermaid/src/diagrams/flowchart/flowDb.ts';
|
||||||
|
import flow from './packages/mermaid/src/diagrams/flowchart/parser/flowParserAdapter.ts';
|
||||||
|
|
||||||
|
// Set up the test environment
|
||||||
|
flow.yy = new FlowDB();
|
||||||
|
flow.yy.clear();
|
||||||
|
|
||||||
|
console.log('=== Testing simple arrow ===');
|
||||||
|
console.log('Input: "-->"');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = flow.parse('-->');
|
||||||
|
console.log('Parse result:', result);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Parse error:', error.message);
|
||||||
|
}
|
21
test-lexer.mjs
Normal file
21
test-lexer.mjs
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
// Test the actual lexer to see what tokens are generated
|
||||||
|
import { FlowchartLexer } from './packages/mermaid/src/diagrams/flowchart/parser/flowLexer.ts';
|
||||||
|
|
||||||
|
const testInputs = ['A', 'A-->B', 'graph TD;A-->B;', '-->', 'A-', '>B'];
|
||||||
|
|
||||||
|
console.log('Testing actual lexer:');
|
||||||
|
testInputs.forEach((input) => {
|
||||||
|
console.log(`\nInput: "${input}"`);
|
||||||
|
try {
|
||||||
|
const result = FlowchartLexer.tokenize(input);
|
||||||
|
if (result.errors.length > 0) {
|
||||||
|
console.log('Errors:', result.errors);
|
||||||
|
}
|
||||||
|
console.log(
|
||||||
|
'Tokens:',
|
||||||
|
result.tokens.map((t) => [t.image, t.tokenType.name])
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.log('Error:', error.message);
|
||||||
|
}
|
||||||
|
});
|
Reference in New Issue
Block a user