How to use the babylon/lib/tokenizer.prototype function in babylon

To help you get started, we’ve selected a few babylon examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github forivall / tacoscript / packages / str-to-token / src / index.js View on Github external
raise(pos, message) {
    throw new SyntaxError(message);
  }
}

MicroTokenizer.prototype.isKeyword = isKeyword;
MicroTokenizer.prototype.readToken = Tokenizer.prototype.readToken;
MicroTokenizer.prototype.getTokenFromCode = Tokenizer.prototype.getTokenFromCode;
MicroTokenizer.prototype.fullCharCodeAtPos = Tokenizer.prototype.fullCharCodeAtPos;
MicroTokenizer.prototype.readNumber = Tokenizer.prototype.readNumber;
MicroTokenizer.prototype.readInt = Tokenizer.prototype.readInt;
MicroTokenizer.prototype.readRadixNumber = Tokenizer.prototype.readRadixNumber;
MicroTokenizer.prototype.readString = Tokenizer.prototype.readString;
MicroTokenizer.prototype.readWord = Tokenizer.prototype.readWord;
MicroTokenizer.prototype.readWord1 = Tokenizer.prototype.readWord1;
MicroTokenizer.prototype.readRegexp = Tokenizer.prototype.readRegexp;
MicroTokenizer.prototype.readEscapedChar = Tokenizer.prototype.readEscapedChar;
MicroTokenizer.prototype.readHexChar = Tokenizer.prototype.readHexChar;
MicroTokenizer.prototype.readCodePoint = Tokenizer.prototype.readCodePoint;
MicroTokenizer.prototype.readToken_dot = Tokenizer.prototype.readToken_dot;
MicroTokenizer.prototype.readToken_slash = Tokenizer.prototype.readToken_slash;
MicroTokenizer.prototype.readToken_mult_modulo = Tokenizer.prototype.readToken_mult_modulo;
MicroTokenizer.prototype.readToken_pipe_amp = Tokenizer.prototype.readToken_pipe_amp;
MicroTokenizer.prototype.readToken_caret = Tokenizer.prototype.readToken_caret;
MicroTokenizer.prototype.readToken_plus_min = Tokenizer.prototype.readToken_plus_min;
MicroTokenizer.prototype.readToken_lt_gt = Tokenizer.prototype.readToken_lt_gt;
MicroTokenizer.prototype.readToken_eq_excl = Tokenizer.prototype.readToken_eq_excl;
MicroTokenizer.prototype.finishOp = Tokenizer.prototype.finishOp;
MicroTokenizer.prototype.readTmplToken = Tokenizer.prototype.readTmplToken;
MicroTokenizer.prototype.match = function() { return true; };

MicroTokenizer.prototype.skipSpace = Tokenizer.prototype.skipSpace;
github forivall / tacoscript / packages / str-to-token / src / index.js View on Github external
}

  addComment(/*comment*/) {}

  nextToken() {
    if (this.state.pos >= this.input.length) { return; }
    this.raise(0, "Did not consume entire string '" + this.input + "'");
  }

  raise(pos, message) {
    throw new SyntaxError(message);
  }
}

MicroTokenizer.prototype.isKeyword = isKeyword;
MicroTokenizer.prototype.readToken = Tokenizer.prototype.readToken;
MicroTokenizer.prototype.getTokenFromCode = Tokenizer.prototype.getTokenFromCode;
MicroTokenizer.prototype.fullCharCodeAtPos = Tokenizer.prototype.fullCharCodeAtPos;
MicroTokenizer.prototype.readNumber = Tokenizer.prototype.readNumber;
MicroTokenizer.prototype.readInt = Tokenizer.prototype.readInt;
MicroTokenizer.prototype.readRadixNumber = Tokenizer.prototype.readRadixNumber;
MicroTokenizer.prototype.readString = Tokenizer.prototype.readString;
MicroTokenizer.prototype.readWord = Tokenizer.prototype.readWord;
MicroTokenizer.prototype.readWord1 = Tokenizer.prototype.readWord1;
MicroTokenizer.prototype.readRegexp = Tokenizer.prototype.readRegexp;
MicroTokenizer.prototype.readEscapedChar = Tokenizer.prototype.readEscapedChar;
MicroTokenizer.prototype.readHexChar = Tokenizer.prototype.readHexChar;
MicroTokenizer.prototype.readCodePoint = Tokenizer.prototype.readCodePoint;
MicroTokenizer.prototype.readToken_dot = Tokenizer.prototype.readToken_dot;
MicroTokenizer.prototype.readToken_slash = Tokenizer.prototype.readToken_slash;
MicroTokenizer.prototype.readToken_mult_modulo = Tokenizer.prototype.readToken_mult_modulo;
MicroTokenizer.prototype.readToken_pipe_amp = Tokenizer.prototype.readToken_pipe_amp;