How to use the moo.compile function in moo

To help you get started, we’ve selected a few moo examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github magma / magma / nms / app / fbcnms-packages / fbcnms-alarms / components / prometheus / PromQLTokenizer.js View on Github external
throw new SyntaxError(`${unterminatedEscape} (${oct})`);
  }

  return {char: String.fromCodePoint(codePoint), newIndex: i + 3};
}

const unterminatedEscape = 'Unterminated escape sequence';

export type Token = {
  value: string,
  type: TokenType,
};

type TokenType = $Keys;

export const lexer = Moo.compile(lexerRules);
// Ignore whitespace and comment tokens
lexer.next = (next => () => {
  let tok;
  while ((tok = next.call(lexer)) && ['WS', 'comment'].includes(tok.type)) {}
  return tok;
})(lexer.next);

export function Tokenize(input: string): Array {
  lexer.reset(input);

  const tokens = [];
  let token;
  while ((token = lexer.next())) {
    tokens.push({value: token.value, type: token.type});
  }
  return tokens;
github mimic-sussex / sema / lang / eppprocessor3.js View on Github external
(function () {
function id(x) { return x[0]; }

const moo = require("moo"); // this 'require' creates a node dependency

const lexer = moo.compile({
  oscMsg:       ['oscIn'],
  mlModel:       ['mlmodel'],
  osc:          ['osc',    '∞'],
  sinosc:       ['sin',    '~'],
  cososc:       ['cos',    '≈'],
  triosc:       ['tri',    '∆'],
  sawosc:       ['saw',    '◊'],
  phasosc:      ['phasor', 'Ø'],
  squareosc:    ['square', '∏'],
  pulseosc:     ['pulse',  '^'],
  gateosc:      ['gate',   '≠'],
  patternosc:   ['patt',   '¶'],
  bus:          ['bus',    '‡' ],
  wnoise:       ['wnoise', 'Ω'],
  pnoise:       ['pnoise'],
  bnoise:       ['bnoise'],
github Cryptonomic / ConseilJS / src / chain / tezos / lexer / EntryPointTemplate.ts View on Github external
// @ts-ignore
function id(d: any[]): any { return d[0]; }
declare var parameter: any;
declare var semicolon: any;
declare var lparen: any;
declare var rparen: any;
declare var or: any;
declare var annot: any;
declare var pair: any;
declare var singleArgData: any;
declare var doubleArgData: any;
declare var data: any;

    const moo = require("moo");

    const lexer = moo.compile({
        wspace: /[ \t]+/,
        lparen: '(',
        rparen: ')',
        annot: /:[^ );]+|%[^ );]+/,
        parameter: 'parameter',
        or: 'or',
        pair: 'pair',
        data: ['bytes', 'int', 'nat', 'bool', 'string', 'timestamp', 'signature', 'key', 'key_hash', 'mutez', 'address', 'unit', 'operation'],
        singleArgData: ['option', 'list', 'contract'],
        doubleArgData: ['lambda', 'map', 'big_map'],
        semicolon: ';'
    });


    import { Parameter, EntryPoint } from '../../../types/tezos/ContractIntrospectionTypes';
github mimic-sussex / sema / lang / eppprocessor5.js View on Github external
(function () {
function id(x) { return x[0]; }

const moo = require("moo"); // this 'require' creates a node dependency

const lexer = moo.compile({
  separator:    /,/,
  paramEnd:     /}/,
  paramBegin:   /{/,
  sample:       { match: /\\[a-zA-Z0-9]+/, lineBreaks: true, value: x => x.slice(0, x.length)},
  variable:     /:[a-zA-Z0-9]+:/,
  oscAddress:   /(?:\/[a-zA-Z0-9]+)+/,
  sample2:      /@[a-zA-Z0-9]+/,
  number:       /-?(?:[0-9]|[1-9][0-9]+)(?:\.[0-9]+)?(?:[eE][-+]?[0-9]+)?\b/,
  add:          /\+/,
  mult:         /\*/,
  div:          /\//,
  dot:          /\./,
  hash:         /\#/,
  hyphen:       /\-/,
  ndash:        /\–/,
  mdash:        /\—/,
github mimic-sussex / sema / src / language / eppGrammar.js View on Github external
(function () {
function id(x) { return x[0]; }

const moo = require("moo"); // this 'require' creates a node dependency

const lexer = moo.compile({
  osc: ['osc'],
  sinosc: ['sin'],
  cososc: ['cos'],
  sawosc: ['saw'],
  triosc: ['tri'],
  squareosc: ['square'],
  pulseosc: ['pulse'],
  wnoise: ['wnoise'],
  pnoise: ['pnoise'],
  bnoise: ['bnoise'],
  phasosc: ['phasor'],
  tpb: ['tpb'],
  functionkeyword: ['gain', 'adsr', 'dyn', 'dist', 'filter', 'delay', 'flang', 'chorus', 'samp'],
  o: /o/,
  x: /x/,
  at: /@/,
github mimic-sussex / sema / lang / eppprocessor4.js View on Github external
(function () {
function id(x) { return x[0]; }

const moo = require("moo"); // this 'require' creates a node dependency

const lexer = moo.compile({
  separator:      /,/,
  paramEnd:       /}/,
  paramBegin:     /{/,
  oscAddress:     /(?:\/[a-zA-Z0-9]+)+/,
  sample:         /(?:\\[a-zA-Z0-9]+)+/,
  add:            /\+/,
  mult:           /\*/,
  div:            /\//,
  dot:            /\./,
  hash:           /\#/,
  hyphen:         /\-/,
  ndash:          /\–/,
  mdash:          /\—/,
  comma:          /\,/,
  colon:          /\:/,
  semicolon:      /\;/,
github DefinitelyTyped / DefinitelyTyped / types / moo / moo-tests.ts View on Github external
import * as moo from 'moo';

let lexer = moo.compile({
    lparen: '(',
    word:  /[a-z]+/,
    rparen: ')',
    keyword: ['while', 'if', 'else', 'moo', 'cows']
});

lexer = moo.states({
    main: {
        strstart: {match: '`', push: 'lit'},
        ident:    /\w+/,
        lbrace:   {match: '{', push: 'main'},
        rbrace:   {match: '}', pop: 1},
        colon:    ':',
        space:    {match: /\s+/, lineBreaks: true},
    },
    lit: {
github ballercat / walt / packages / walt-compiler / src / parser / index.js View on Github external
function makeLexer() {
  const mooLexer = moo.compile(tokens);

  return {
    current: null,
    lines: [],
    get line() {
      return mooLexer.line;
    },
    get col() {
      return mooLexer.col;
    },
    save() {
      return mooLexer.save();
    },
    reset(chunk, info) {
      this.lines = chunk.split('\n');
      return mooLexer.reset(chunk, info);
github kitspace / electro-grammar / src / lexer.js View on Github external
const moo = require('moo')

const lexer = moo.compile({
    percent: {
        match: /(?:\+\/?-|±)?\s*\d+\.?\d*\s*%(?:\s+|$)/,
        value: x => x.split(' ').join(''),
    },
    number: {
        match: /\d+\.?\d*\s+[^0-9+±]\S*(?:\s+|$)/,
        value: x => x.split(' ').join(''),
    },
    word: {
        match: /\S+(?:\s+|$)/,
        value: x => x.trim(),
    },
})

module.exports = lexer
github linkedin / opticss / packages / @opticss / attr-analysis-dsl / grammar / attrlexer.js View on Github external
const moo = require('moo')

const lexer = moo.compile({
  WS:      /[ \t]+/,
  pipe: '|',
  lparen:  '(',
  rparen:  ')',
  asterisk: '*',
  unknown: '???',
  unknownIdentifier: '?',
  absent: '---',
  constant: /[^|()*\n \t]+/,
});

module.exports = lexer;

moo

Optimised tokenizer/lexer generator! 🐄 Much performance. Moo!

BSD-3-Clause
Latest version published 1 year ago

Package Health Score

74 / 100
Full package analysis