mirror of
https://github.com/mermaid-js/mermaid.git
synced 2025-09-17 14:29:48 +02:00
pref: remove CommonLexer
and use EOF
This commit is contained in:
@@ -5,7 +5,11 @@ interface Common {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fragment TitleAndAccessibilities:
|
fragment TitleAndAccessibilities:
|
||||||
((accDescr=ACC_DESCR | accTitle=ACC_TITLE | title=TITLE) NEWLINE+)+
|
((accDescr=ACC_DESCR | accTitle=ACC_TITLE | title=TITLE) EOL)+
|
||||||
|
;
|
||||||
|
|
||||||
|
fragment EOL returns string:
|
||||||
|
NEWLINE+ | EOF
|
||||||
;
|
;
|
||||||
|
|
||||||
terminal NEWLINE: /\r?\n/;
|
terminal NEWLINE: /\r?\n/;
|
||||||
@@ -14,6 +18,6 @@ terminal ACC_TITLE: /accTitle[\t ]*:[^\n\r]*(?=%%)|accTitle[\t ]*:[^\n\r]*/;
|
|||||||
terminal TITLE: /title([\t ][^\n\r]*|)(?=%%)|title([\t ][^\n\r]*|)/;
|
terminal TITLE: /title([\t ][^\n\r]*|)(?=%%)|title([\t ][^\n\r]*|)/;
|
||||||
|
|
||||||
hidden terminal WHITESPACE: /[\t ]+/;
|
hidden terminal WHITESPACE: /[\t ]+/;
|
||||||
hidden terminal YAML: /---[\t ]*\r?\n[\S\s]*?---[\t ]*(?!.)/;
|
hidden terminal YAML: /---[\t ]*\r?\n(?:[\S\s]*?\r?\n)?---(?:\r?\n|(?!\S))/;
|
||||||
hidden terminal DIRECTIVE: /[\t ]*%%{[\S\s]*?}%%\s*/;
|
hidden terminal DIRECTIVE: /[\t ]*%%{[\S\s]*?}%%(?:\r?\n|(?!\S))/;
|
||||||
hidden terminal SINGLE_LINE_COMMENT: /[\t ]*%%[^\n\r]*/;
|
hidden terminal SINGLE_LINE_COMMENT: /[\t ]*%%[^\n\r]*/;
|
||||||
|
@@ -1,3 +1,2 @@
|
|||||||
export * from './lexer.js';
|
|
||||||
export * from './tokenBuilder.js';
|
export * from './tokenBuilder.js';
|
||||||
export { MermaidValueConverter } from './valueConverter.js';
|
export { MermaidValueConverter } from './valueConverter.js';
|
||||||
|
@@ -1,8 +0,0 @@
|
|||||||
import type { LexerResult } from 'langium';
|
|
||||||
import { DefaultLexer } from 'langium';
|
|
||||||
|
|
||||||
export class CommonLexer extends DefaultLexer {
|
|
||||||
public override tokenize(text: string): LexerResult {
|
|
||||||
return super.tokenize(text + '\n');
|
|
||||||
}
|
|
||||||
}
|
|
@@ -7,7 +7,6 @@ import type {
|
|||||||
} from 'langium';
|
} from 'langium';
|
||||||
import { EmptyFileSystem, createDefaultModule, createDefaultSharedModule, inject } from 'langium';
|
import { EmptyFileSystem, createDefaultModule, createDefaultSharedModule, inject } from 'langium';
|
||||||
|
|
||||||
import { CommonLexer } from '../common/lexer.js';
|
|
||||||
import { CommonValueConverter } from '../common/valueConverter.js';
|
import { CommonValueConverter } from '../common/valueConverter.js';
|
||||||
import { InfoGeneratedModule, MermaidGeneratedSharedModule } from '../generated/module.js';
|
import { InfoGeneratedModule, MermaidGeneratedSharedModule } from '../generated/module.js';
|
||||||
import { InfoTokenBuilder } from './tokenBuilder.js';
|
import { InfoTokenBuilder } from './tokenBuilder.js';
|
||||||
@@ -17,7 +16,6 @@ import { InfoTokenBuilder } from './tokenBuilder.js';
|
|||||||
*/
|
*/
|
||||||
type InfoAddedServices = {
|
type InfoAddedServices = {
|
||||||
parser: {
|
parser: {
|
||||||
Lexer: CommonLexer;
|
|
||||||
TokenBuilder: InfoTokenBuilder;
|
TokenBuilder: InfoTokenBuilder;
|
||||||
ValueConverter: CommonValueConverter;
|
ValueConverter: CommonValueConverter;
|
||||||
};
|
};
|
||||||
@@ -34,7 +32,6 @@ export type InfoServices = LangiumServices & InfoAddedServices;
|
|||||||
*/
|
*/
|
||||||
export const InfoModule: Module<InfoServices, PartialLangiumServices & InfoAddedServices> = {
|
export const InfoModule: Module<InfoServices, PartialLangiumServices & InfoAddedServices> = {
|
||||||
parser: {
|
parser: {
|
||||||
Lexer: (services: InfoServices) => new CommonLexer(services),
|
|
||||||
TokenBuilder: () => new InfoTokenBuilder(),
|
TokenBuilder: () => new InfoTokenBuilder(),
|
||||||
ValueConverter: () => new CommonValueConverter(),
|
ValueConverter: () => new CommonValueConverter(),
|
||||||
},
|
},
|
||||||
|
@@ -7,7 +7,6 @@ import type {
|
|||||||
} from 'langium';
|
} from 'langium';
|
||||||
import { EmptyFileSystem, createDefaultModule, createDefaultSharedModule, inject } from 'langium';
|
import { EmptyFileSystem, createDefaultModule, createDefaultSharedModule, inject } from 'langium';
|
||||||
|
|
||||||
import { CommonLexer } from '../common/lexer.js';
|
|
||||||
import { MermaidGeneratedSharedModule, PieGeneratedModule } from '../generated/module.js';
|
import { MermaidGeneratedSharedModule, PieGeneratedModule } from '../generated/module.js';
|
||||||
import { PieTokenBuilder } from './tokenBuilder.js';
|
import { PieTokenBuilder } from './tokenBuilder.js';
|
||||||
import { PieValueConverter } from './valueConverter.js';
|
import { PieValueConverter } from './valueConverter.js';
|
||||||
@@ -17,7 +16,6 @@ import { PieValueConverter } from './valueConverter.js';
|
|||||||
*/
|
*/
|
||||||
type PieAddedServices = {
|
type PieAddedServices = {
|
||||||
parser: {
|
parser: {
|
||||||
Lexer: CommonLexer;
|
|
||||||
TokenBuilder: PieTokenBuilder;
|
TokenBuilder: PieTokenBuilder;
|
||||||
ValueConverter: PieValueConverter;
|
ValueConverter: PieValueConverter;
|
||||||
};
|
};
|
||||||
@@ -34,7 +32,6 @@ export type PieServices = LangiumServices & PieAddedServices;
|
|||||||
*/
|
*/
|
||||||
export const PieModule: Module<PieServices, PartialLangiumServices & PieAddedServices> = {
|
export const PieModule: Module<PieServices, PartialLangiumServices & PieAddedServices> = {
|
||||||
parser: {
|
parser: {
|
||||||
Lexer: (services: PieServices) => new CommonLexer(services),
|
|
||||||
TokenBuilder: () => new PieTokenBuilder(),
|
TokenBuilder: () => new PieTokenBuilder(),
|
||||||
ValueConverter: () => new PieValueConverter(),
|
ValueConverter: () => new PieValueConverter(),
|
||||||
},
|
},
|
||||||
|
@@ -12,7 +12,7 @@ entry Pie:
|
|||||||
;
|
;
|
||||||
|
|
||||||
PieSection:
|
PieSection:
|
||||||
label=PIE_SECTION_LABEL ":" value=PIE_SECTION_VALUE NEWLINE+
|
label=PIE_SECTION_LABEL ":" value=PIE_SECTION_VALUE EOF
|
||||||
;
|
;
|
||||||
|
|
||||||
terminal PIE_SECTION_LABEL: /"[^"]+"/;
|
terminal PIE_SECTION_LABEL: /"[^"]+"/;
|
||||||
|
Reference in New Issue
Block a user