How to use the chevrotain.createTokenInstance function in chevrotain

To help you get started, we’ve selected a few chevrotain examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github christianvoigt / argdown / packages / argdown-parser / src / plugins / ParserPlugin.js View on Github external
if (response.lexerErrors && response.lexerErrors.length > 0) {
            logger.log("verbose", response.lexerErrors);
        }
        if (response.parserErrors && response.parserErrors.length > 0) {
            // //add location if token is EOF
            var lastToken = _.last(response.tokens);
            for (let error of response.parserErrors) {
                if (error.token && tokenMatcher(error.token, chevrotain.EOF)) {
                    const startLine = lastToken.endLine;
                    const endLine = startLine;
                    const startOffset = lastToken.endOffset;
                    const endOffset = startOffset;
                    const startColumn = lastToken.endColumn;
                    const endColumn = startColumn;
                    const newToken = chevrotain.createTokenInstance(
                        chevrotain.EOF,
                        "",
                        startOffset,
                        endOffset,
                        startLine,
                        endLine,
                        startColumn,
                        endColumn
                    );
                    error.token = newToken;
                }
            }
        }
        return response;
    }
}
github SAP / chevrotain / examples / lexer / python_indentation / python_indentation.js View on Github external
if (matchIndentIndex === -1) {
        throw Error(`invalid outdent at offset: ${offset}`)
      }

      const numberOfDedents = indentStack.length - matchIndentIndex - 1

      // This is a little tricky
      // 1. If there is no match (0 level indent) than this custom token
      //    matcher would return "null" and so we need to add all the required outdents ourselves.
      // 2. If there was match (> 0 level indent) than we need to add minus one number of outsents
      //    because the lexer would create one due to returning a none null result.
      let iStart = match !== null ? 1 : 0
      for (let i = iStart; i < numberOfDedents; i++) {
        indentStack.pop()
        matchedTokens.push(
          createTokenInstance(Outdent, "", NaN, NaN, NaN, NaN, NaN, NaN)
        )
      }

      // even though we are adding fewer outdents directly we still need to update the indent stack fully.
      if (iStart === 1) {
        indentStack.pop()
      }
      return match
    } else {
      // same indent, this should be lexed as simple whitespace and ignored
      return null
    }
  } else {
    // indentation cannot be matched under other circumstances
    return null
  }
github christianvoigt / argdown / packages / argdown-core / src / plugins / ParserPlugin.ts View on Github external
JSON.stringify(response.lexerErrors)
        );
      }
    }
    if (response.parserErrors && response.parserErrors.length > 0) {
      // //add location if token is EOF
      var lastToken = last(response.tokens);
      for (let error of response.parserErrors) {
        if (error.token && tokenMatcher(error.token, EOF)) {
          const startLine = lastToken!.endLine || 1;
          const endLine = startLine;
          const startOffset = lastToken!.endOffset || 1;
          const endOffset = startOffset;
          const startColumn = lastToken!.endColumn || 1;
          const endColumn = startColumn;
          const newToken = createTokenInstance(
            EOF,
            "",
            startOffset,
            endOffset,
            startLine,
            endLine,
            startColumn,
            endColumn
          );
          error.token = newToken;
        }
      }
      if (settings.throwExceptions) {
        // do throw error instead of returning a response
        throw new ArgdownPluginError(
          this.name,
github SAP / chevrotain / examples / lexer / python_indentation / python_indentation.js View on Github external
tokenize: function(text) {
    // have to reset the indent stack between processing of different text inputs
    indentStack = [0]

    const lexResult = customPatternLexer.tokenize(text)

    //add remaining Outdents
    while (indentStack.length > 1) {
      lexResult.tokens.push(
        createTokenInstance(Outdent, "", NaN, NaN, NaN, NaN, NaN, NaN)
      )
      indentStack.pop()
    }

    if (lexResult.errors.length > 0) {
      throw new Error("sad sad panda lexing errors detected")
    }
    return lexResult
  }
}