How to use the babylon/lib/tokenizer/types.TokenType function in babylon

To help you get started, we’ve selected a few babylon examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github SuperPaintman / babel-plugin-syntax-pipeline / src / index.js View on Github external
'use strict';
/** Imports */
import Parser, { plugins } from 'babylon/lib/parser';
import { TokenType, types as tt } from 'babylon/lib/tokenizer/types';

/** Constants */
const CHAR_CODES = '|>'.split('').map((c) => c.charCodeAt(0));
const PLUGIN_NAME = 'pipeline';

const beforeExpr = true;


/** Types */
tt.pipeline = new TokenType('|>', { beforeExpr, binop: 12 });


/** Parser */
const pp = Parser.prototype;

pp.readToken_pipeline = function readToken_pipeline(code) { // eslint-disable-line camelcase
  return this.finishOp(tt.pipeline, 2);
};


/** Plugin */
function plugin(instance) {
  instance.extend('readToken', (inner) => function readToken(code) {
    const next = this.input.charCodeAt(this.state.pos + 1);

    if (!(code === CHAR_CODES[0] && next === CHAR_CODES[1])) {
github forivall / tacoscript / packages / babylon-plugin-cst / src / types.js View on Github external
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";

// TODO: create and export actual token types for these.
export const tokenTypes = {
  newline: new TokenType("newline"),
  whitespace: new TokenType("whitespace"),
  blockCommentStart: new TokenType("/*"),
  blockCommentBody: new TokenType("blockCommentBody"),
  blockCommentEnd: new TokenType("*/"),
  lineCommentStart: new TokenType("//"),
  lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
  tokenType.whitespace = true;
});

const ttCst = tokenTypes;

export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }

export function getTokenName(tokenType) {
github forivall / tacoscript / packages / babylon-plugin-cst / src / types.js View on Github external
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";

// TODO: create and export actual token types for these.
export const tokenTypes = {
  newline: new TokenType("newline"),
  whitespace: new TokenType("whitespace"),
  blockCommentStart: new TokenType("/*"),
  blockCommentBody: new TokenType("blockCommentBody"),
  blockCommentEnd: new TokenType("*/"),
  lineCommentStart: new TokenType("//"),
  lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
  tokenType.whitespace = true;
});

const ttCst = tokenTypes;

export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }

export function getTokenName(tokenType) {
  if (isString(tokenType)) return tokenType;
github forivall / tacoscript / packages / babylon-plugin-cst / src / types.js View on Github external
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";

// TODO: create and export actual token types for these.
export const tokenTypes = {
  newline: new TokenType("newline"),
  whitespace: new TokenType("whitespace"),
  blockCommentStart: new TokenType("/*"),
  blockCommentBody: new TokenType("blockCommentBody"),
  blockCommentEnd: new TokenType("*/"),
  lineCommentStart: new TokenType("//"),
  lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
  tokenType.whitespace = true;
});

const ttCst = tokenTypes;

export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }

export function getTokenName(tokenType) {
  if (isString(tokenType)) return tokenType;
  return tokenToName.get(tokenType)
github forivall / tacoscript / packages / babylon-plugin-cst / src / types.js View on Github external
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";

// TODO: create and export actual token types for these.
export const tokenTypes = {
  newline: new TokenType("newline"),
  whitespace: new TokenType("whitespace"),
  blockCommentStart: new TokenType("/*"),
  blockCommentBody: new TokenType("blockCommentBody"),
  blockCommentEnd: new TokenType("*/"),
  lineCommentStart: new TokenType("//"),
  lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
  tokenType.whitespace = true;
});

const ttCst = tokenTypes;

export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }
github forivall / tacoscript / packages / babylon-plugin-cst / src / types.js View on Github external
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";

// TODO: create and export actual token types for these.
export const tokenTypes = {
  newline: new TokenType("newline"),
  whitespace: new TokenType("whitespace"),
  blockCommentStart: new TokenType("/*"),
  blockCommentBody: new TokenType("blockCommentBody"),
  blockCommentEnd: new TokenType("*/"),
  lineCommentStart: new TokenType("//"),
  lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
  tokenType.whitespace = true;
});

const ttCst = tokenTypes;

export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }

export function getTokenName(tokenType) {
  if (isString(tokenType)) return tokenType;
  return tokenToName.get(tokenType)
}
github forivall / tacoscript / packages / babylon-plugin-cst / src / types.js View on Github external
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";

// TODO: create and export actual token types for these.
export const tokenTypes = {
  newline: new TokenType("newline"),
  whitespace: new TokenType("whitespace"),
  blockCommentStart: new TokenType("/*"),
  blockCommentBody: new TokenType("blockCommentBody"),
  blockCommentEnd: new TokenType("*/"),
  lineCommentStart: new TokenType("//"),
  lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
  tokenType.whitespace = true;
});

const ttCst = tokenTypes;

export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }
github forivall / tacoscript / packages / babylon-plugin-cst / src / types.js View on Github external
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";

// TODO: create and export actual token types for these.
export const tokenTypes = {
  newline: new TokenType("newline"),
  whitespace: new TokenType("whitespace"),
  blockCommentStart: new TokenType("/*"),
  blockCommentBody: new TokenType("blockCommentBody"),
  blockCommentEnd: new TokenType("*/"),
  lineCommentStart: new TokenType("//"),
  lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
  tokenType.whitespace = true;
});

const ttCst = tokenTypes;

export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }

export function getTokenName(tokenType) {
  if (isString(tokenType)) return tokenType;
  return tokenToName.get(tokenType)
}