Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const { createToken, Lexer } = require("chevrotain")
// numbers Tokens
const One = createToken({ name: "One", pattern: /1/ })
const Two = createToken({ name: "Two", pattern: /2/ })
const Three = createToken({ name: "Three", pattern: /3/ })
// Letter Tokens
const Alpha = createToken({ name: "Alpha", pattern: /A/ })
const Beta = createToken({ name: "Beta", pattern: /B/ })
const Gamma = createToken({ name: "Gamma", pattern: /G/ })
// signs Tokens
const Hash = createToken({ name: "Hash", pattern: /#/ })
const Caret = createToken({ name: "Caret", pattern: /\^/ })
const Amp = createToken({ name: "Amp", pattern: /&/ })
// Tokens which control entering a new mode.
const EnterNumbers = createToken({
name: "EnterNumbers",
pattern: /NUMBERS/,
push_mode: "numbers_mode"
})
const EnterLetters = createToken({
const ThisTok = createToken({ name: "ThisTok", categories: AbsKeyword })
const WithTok = createToken({ name: "WithTok", categories: AbsKeyword })
const DefaultTok = createToken({ name: "DefaultTok", categories: AbsKeyword })
const IfTok = createToken({ name: "IfTok", categories: AbsKeyword })
const ThrowTok = createToken({ name: "ThrowTok", categories: AbsKeyword })
const DeleteTok = createToken({ name: "DeleteTok", categories: AbsKeyword })
const InTok = createToken({ name: "InTok", categories: AbsKeyword })
const TryTok = createToken({ name: "TryTok", categories: AbsKeyword })
// An IdentifierName, but not a reservedKeyword
const Identifier = createToken({
name: "Identifier",
categories: IdentifierName
})
// Set/Get are not reservedKeywords so they are modeled as a TypeOf Identifier.
const SetTok = createToken({ name: "SetTok", categories: Identifier })
const GetTok = createToken({ name: "SetTok", categories: Identifier })
// TODO: Missing the future reservedKeywords here.
// Link: https://www.ecma-international.org/ecma-262/5.1/#sec-7.7
const AbsPunctuator = createToken({ name: "AbsPunctuator" })
const Identifier = createToken({
name: "Identifier",
categories: IdentifierName
})
// Set/Get are not reservedKeywords so they are modeled as a TypeOf Identifier.
const SetTok = createToken({ name: "SetTok", categories: Identifier })
const GetTok = createToken({ name: "SetTok", categories: Identifier })
// TODO: Missing the future reservedKeywords here.
// Link: https://www.ecma-international.org/ecma-262/5.1/#sec-7.7
const AbsPunctuator = createToken({ name: "AbsPunctuator" })
const LCurly = createToken({ name: "LCurly", categories: AbsPunctuator })
const RCurly = createToken({ name: "RCurly", categories: AbsPunctuator })
const LParen = createToken({ name: "LParen", categories: AbsPunctuator })
const RParen = createToken({ name: "RParen", categories: AbsPunctuator })
const LBracket = createToken({ name: "LBracket", categories: AbsPunctuator })
const RBracket = createToken({ name: "RBracket", categories: AbsPunctuator })
const Dot = createToken({ name: "Dot", categories: AbsPunctuator })
const Semicolon = createToken({ name: "Semicolon", categories: AbsPunctuator })
const Comma = createToken({ name: "Comma", categories: AbsPunctuator })
const PlusPlus = createToken({ name: "PlusPlus", categories: AbsPunctuator })
const MinusMinus = createToken({
name: "MinusMinus",
categories: AbsPunctuator
})
export const LESS_THAN = createToken({ name: 'LESS_THAN', pattern: '<', categories: RELATIONAL_OPERATOR })
export const LESS_THAN_EQUAL = createToken({ name: 'LESS_THAN_EQUAL', pattern: '<=', categories: RELATIONAL_OPERATOR })
export const NOT_EQUAL = createToken({ name: 'NOT_EQUAL', pattern: '<>', categories: RELATIONAL_OPERATOR })
export const EQUAL = createToken({ name: 'EQUAL', pattern: '=', categories: EQUALITY_OPERATOR })
export const OP_ASSIGNMENT_ADD = createToken({
categories: EQUALITY_OPERATOR,
name: 'OP_ASSIGNMENT_ADD',
pattern: '+='
})
export const OP_ASSIGNMENT_BITSHIFT_LEFT = createToken({
categories: EQUALITY_OPERATOR,
name: 'OP_ASSIGNMENT_BITSHIFT_LEFT',
pattern: '<<='
})
export const OP_ASSIGNMENT_BITSHIFT_RIGHT = createToken({
categories: EQUALITY_OPERATOR,
name: 'OP_ASSIGNMENT_BITSHIFT_RIGHT',
pattern: '>>='
})
export const OP_ASSIGNMENT_DIVISION = createToken({
categories: EQUALITY_OPERATOR,
name: 'OP_ASSIGNMENT_DIVISION',
pattern: '/='
})
// prettier-ignore
export const OP_ASSIGNMENT_INTEGER_DIVISION = createToken({ name: 'OP_ASSIGNMENT_INTEGER_DIVISION', pattern: '\\=', categories: EQUALITY_OPERATOR })
export const OP_ASSIGNMENT_MULTIPLY = createToken({
categories: EQUALITY_OPERATOR,
/*
* Example Of using Chevrotain's built in syntactic content assist
* To implement semantic content assist and content assist on partial inputs.
*
* Examples:
* "Public static " --> ["function"]
* "Public sta" --> ["static"]
* "call f" --> ["foo"] // assuming foo is in the symbol table.
*/
const _ = require("lodash")
const { createToken, Lexer, CstParser } = require("chevrotain")
const A = createToken({ name: "A", pattern: /A/ })
const B = createToken({ name: "B", pattern: /B/ })
const C = createToken({ name: "C", pattern: /C/ })
const WhiteSpace = createToken({
name: "WhiteSpace",
pattern: /\s+/,
group: Lexer.SKIPPED
})
const allTokens = [WhiteSpace, A, B, C]
const StatementsLexer = new Lexer(allTokens)
// A completely normal Chevrotain Parser, no changes needed to use the content assist capabilities.
class MyParser extends CstParser {
constructor() {
super(allTokens)
/*
* Example Of using Grammar complex grammar inheritance to implement
* 'Structured natural language' supporting multiple 'spoken languages' using grammar inheritance.
*
* 1. An "Abstract" Base Grammar with two concrete grammars extending it.
* 2. Each concrete grammar has a different lexer
* 3. This also shows an example of using Token inheritance
*/
const { createToken, Lexer, CstParser } = require("chevrotain")
// ----------------- lexer -----------------
const RelationWord = createToken({ name: "RelationWord", pattern: Lexer.NA })
// Token inheritance CONSUME(RelationWord) will work on any Token extending RelationWord
const And = createToken({
name: "And",
pattern: /and/,
categories: RelationWord
})
const Before = createToken({
name: "Before",
pattern: /before/,
categories: RelationWord
})
const After = createToken({
name: "After",
pattern: /after/,
categories: RelationWord
"use strict"
const { createToken, Lexer, Parser, tokenMatcher, EOF } = require("chevrotain")
const _ = require("lodash")
// all keywords (from/select/where/...) belong to the Keyword category thus
// they will be easy to identify for the purpose of content assist.
const Keyword = createToken({ name: "Keyword", pattern: Lexer.NA })
const Select = createToken({
name: "Select",
pattern: /SELECT/,
categories: Keyword
})
const From = createToken({ name: "From", pattern: /FROM/, categories: Keyword })
const Where = createToken({
name: "Where",
pattern: /WHERE/,
categories: Keyword
})
const Comma = createToken({ name: "Comma", pattern: /,/ })
const Identifier = createToken({ name: "Identifier", pattern: /\w+/ })
const Integer = createToken({ name: "Integer", pattern: /0|[1-9]\d+/ })
const GreaterThan = createToken({ name: "GreaterThan", pattern: // })
const WhiteSpace = createToken({
name: "WhiteSpace",
pattern: /\s+/,
group: Lexer.SKIPPED,
line_breaks: true
})
export const THEN = keyword('Then')
export const TO = keyword('To')
export const WHILE = keyword('While')
export const STRING_LITERAL = createToken({
categories: LITERAL,
name: 'STRING_LITERAL',
pattern: /"([^"]|"")*"/
})
export const BOOLEAN_LITERAL = createToken({ name: 'BOOLEAN_LITERAL', pattern: Lexer.NA, categories: LITERAL })
export const TRUE = keyword('true', { categories: BOOLEAN_LITERAL })
export const FALSE = keyword('false', { categories: BOOLEAN_LITERAL })
export const NUMBER_LITERAL = createToken({
categories: LITERAL,
name: 'NUMBER_LITERAL',
pattern: /(?:\d*\.?\d+|\d+\.?\d*)(?:[eEdD][-+]?\d+)?/
})
export const HEX_LITERAL = createToken({
categories: LITERAL,
name: 'HEX_LITERAL',
pattern: /&[hHFf0-9EeDdCcBbAa]+&?/
})
export const GREATER_THAN = createToken({ name: 'GREATER_THAN', pattern: '>', categories: RELATIONAL_OPERATOR })
export const GREATER_THAN_EQUAL = createToken({
categories: RELATIONAL_OPERATOR,
name: 'GREATER_THAN_EQUAL',
pattern: '>='
name: 'TYPE_DECLARATION',
pattern: /[\$%!#&]/
})
export const OPEN_BRACKET = createToken({ name: 'OPEN_BRACKET', pattern: '[', line_breaks: false })
export const OPEN_CURLY_BRACE = createToken({ name: 'OPEN_CURLY_BRACE', pattern: '{', line_breaks: false })
export const OPEN_PAREN = createToken({ name: 'OPEN_PAREN', pattern: '(', line_breaks: false })
export const CLOSE_BRACKET = createToken({ name: 'CLOSE_BRACKET', pattern: ']', line_breaks: false })
export const CLOSE_CURLY_BRACE = createToken({ name: 'CLOSE_CURLY_BRACE', pattern: '}', line_breaks: false })
export const CLOSE_PAREN = createToken({ name: 'CLOSE_PAREN', pattern: ')', line_breaks: false })
export const PERIOD = createToken({ name: 'PERIOD', pattern: '.' })
export const FULL_PRINT = keyword('Print', { name: 'FULL_PRINT', categories: PRINT, line_breaks: false })
export const SHORT_PRINT = createToken({ pattern: '?', name: 'SHORT_PRINT', categories: PRINT, line_breaks: false })
export const RETURN = keyword('return')
export const STEP = keyword('step')
export const STOP = keyword('stop')
export const BOOLEAN = keyword('Boolean', { categories: BASE_TYPE })
export const INTEGER = keyword('Integer', { categories: BASE_TYPE })
export const LONGINTEGER = keyword('LongInteger', { categories: BASE_TYPE })
export const FLOAT = keyword('Float', { categories: BASE_TYPE })
export const DOUBLE = keyword('Double', { categories: BASE_TYPE })
export const STRING = keyword('String', { categories: BASE_TYPE })
export const OBJECT = keyword('Object', { categories: BASE_TYPE })
export const FUNCTION = keyword('Function', { categories: BASE_TYPE })
export const INTERFACE = keyword('Interface', { categories: BASE_TYPE })
export const INVALID = keyword('Invalid', { categories: [BASE_TYPE, LITERAL] })
export const DYNAMIC = keyword('Dynamic', { categories: BASE_TYPE })
name: 'HEX_LITERAL',
pattern: /&[hHFf0-9EeDdCcBbAa]+&?/
})
export const GREATER_THAN = createToken({ name: 'GREATER_THAN', pattern: '>', categories: RELATIONAL_OPERATOR })
export const GREATER_THAN_EQUAL = createToken({
categories: RELATIONAL_OPERATOR,
name: 'GREATER_THAN_EQUAL',
pattern: '>='
})
export const LESS_THAN = createToken({ name: 'LESS_THAN', pattern: '<', categories: RELATIONAL_OPERATOR })
export const LESS_THAN_EQUAL = createToken({ name: 'LESS_THAN_EQUAL', pattern: '<=', categories: RELATIONAL_OPERATOR })
export const NOT_EQUAL = createToken({ name: 'NOT_EQUAL', pattern: '<>', categories: RELATIONAL_OPERATOR })
export const EQUAL = createToken({ name: 'EQUAL', pattern: '=', categories: EQUALITY_OPERATOR })
export const OP_ASSIGNMENT_ADD = createToken({
categories: EQUALITY_OPERATOR,
name: 'OP_ASSIGNMENT_ADD',
pattern: '+='
})
export const OP_ASSIGNMENT_BITSHIFT_LEFT = createToken({
categories: EQUALITY_OPERATOR,
name: 'OP_ASSIGNMENT_BITSHIFT_LEFT',
pattern: '<<='
})
export const OP_ASSIGNMENT_BITSHIFT_RIGHT = createToken({
categories: EQUALITY_OPERATOR,
name: 'OP_ASSIGNMENT_BITSHIFT_RIGHT',
pattern: '>>='
})
export const OP_ASSIGNMENT_DIVISION = createToken({