How to use the pygments.lexer.RegexLexer function in Pygments

To help you get started, we’ve selected a few Pygments examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pygments / pygments / pygments / lexers / python.py View on Github external
(r'^(    )(.+)(\n)',
             bygroups(Text, using(PythonLexer), Text)),
            (r'^([ \t]*)(\.\.\.)(\n)',
             bygroups(Text, Comment, Text)),  # for doctests...
            (r'^([^:]+)(: )(.+)(\n)',
             bygroups(Generic.Error, Text, Name, Text), '#pop'),
            (r'^([a-zA-Z_]\w*)(:?\n)',
             bygroups(Generic.Error, Text), '#pop')
        ],
    }


Python3TracebackLexer = PythonTracebackLexer


class Python2TracebackLexer(RegexLexer):
    """
    For Python tracebacks.

    .. versionadded:: 0.7

    .. versionchanged:: 2.5
       This class has been renamed from ``PythonTracebackLexer``.
       ``PythonTracebackLexer`` now refers to the Python 3 variant.
    """

    name = 'Python 2.x Traceback'
    aliases = ['py2tb']
    filenames = ['*.py2tb']
    mimetypes = ['text/x-python2-traceback']

    tokens = {
github tmm1 / pygments.rb / vendor / pygments-main / pygments / lexers / esoteric.py View on Github external
Lexers for esoteric languages.

    :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
"""

from pygments.lexer import RegexLexer, include, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
    Number, Punctuation, Error

__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
           'CapDLLexer', 'AheuiLexer']


class BrainfuckLexer(RegexLexer):
    """
    Lexer for the esoteric `BrainFuck `_
    language.
    """

    name = 'Brainfuck'
    aliases = ['brainfuck', 'bf']
    filenames = ['*.bf', '*.b']
    mimetypes = ['application/x-brainfuck']

    tokens = {
        'common': [
            # use different colors for different instruction types
            (r'[.,]+', Name.Tag),
            (r'[+-]+', Name.Builtin),
            (r'[<>]+', Name.Variable),
github wandb / client / wandb / vendor / pygments / lexers / d.py View on Github external
(r'>', String, '#pop'),
        ],
        'delimited_curly': [
            (r'[^{}]+', String),
            (r'\{', String, 'delimited_inside_curly'),
            (r'\}"', String, '#pop'),
        ],
        'delimited_inside_curly': [
            (r'[^{}]+', String),
            (r'\{', String, '#push'),
            (r'\}', String, '#pop'),
        ],
    }


class CrocLexer(RegexLexer):
    """
    For `Croc `_ source.
    """
    name = 'Croc'
    filenames = ['*.croc']
    aliases = ['croc']
    mimetypes = ['text/x-crocsrc']

    tokens = {
        'root': [
            (r'\n', Text),
            (r'\s+', Text),
            # Comments
            (r'//(.*?)\n', Comment.Single),
            (r'/\*', Comment.Multiline, 'nestedcomment'),
            # Keywords
github mmcgrana / gobyexample / third_party / pygments / pygments / lexers / templates.py View on Github external
name = 'JavaScript+Cheetah'
    aliases = ['js+cheetah', 'javascript+cheetah',
               'js+spitfire', 'javascript+spitfire']
    mimetypes = ['application/x-javascript+cheetah',
                 'text/x-javascript+cheetah',
                 'text/javascript+cheetah',
                 'application/x-javascript+spitfire',
                 'text/x-javascript+spitfire',
                 'text/javascript+spitfire']

    def __init__(self, **options):
        super(CheetahJavascriptLexer, self).__init__(JavascriptLexer,
                                                     CheetahLexer, **options)


class GenshiTextLexer(RegexLexer):
    """
    A lexer that highlights `genshi `_ text
    templates.
    """

    name = 'Genshi Text'
    aliases = ['genshitext']
    mimetypes = ['application/x-genshi-text', 'text/x-genshi']

    tokens = {
        'root': [
            (r'[^#$\s]+', Other),
            (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
            (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
            include('variable'),
            (r'[#$\s]', Other),
github mmcgrana / gobyexample / third_party / pygments / pygments / lexers / lisp.py View on Github external
# special constants
            (r'(t|nil)' + terminated, Name.Constant),

            # functions and variables
            (r'\*' + symbol + '\*', Name.Variable.Global),
            (symbol, Name.Variable),

            # parentheses
            (r'\(', Punctuation, 'body'),
            (r'\)', Punctuation, '#pop'),
        ],
    }


class HyLexer(RegexLexer):
    """
    Lexer for `Hy `_ source code.

    .. versionadded:: 2.0
    """
    name = 'Hy'
    aliases = ['hylang']
    filenames = ['*.hy']
    mimetypes = ['text/x-hy', 'application/x-hy']

    special_forms = (
        'cond', 'for', '->', '->>', 'car',
        'cdr', 'first', 'rest', 'let', 'when', 'unless',
        'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
        ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
        'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
github yesudeep / greatshipgroup / app / lib / console / app / pygments / lexers / functional.py View on Github external
try:
    set
except NameError:
    from sets import Set as set

from pygments.lexer import Lexer, RegexLexer, bygroups, using, this, include, \
     do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, \
     String, Number, Punctuation, Literal


__all__ = ['SchemeLexer', 'CommonLispLexer', 'HaskellLexer', 'LiterateHaskellLexer',
           'OcamlLexer', 'ErlangLexer']


class SchemeLexer(RegexLexer):
    """
    A Scheme lexer, parsing a stream and outputting the tokens
    needed to highlight scheme code.
    This lexer could be most probably easily subclassed to parse
    other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.

    This parser is checked with pastes from the LISP pastebin
    at http://paste.lisp.org/ to cover as much syntax as possible.

    It supports the full Scheme syntax as defined in R5RS.

    *New in Pygments 0.6.*
    """
    name = 'Scheme'
    aliases = ['scheme', 'scm']
    filenames = ['*.scm']
github OpenCobolIDE / OpenCobolIDE / open_cobol_ide / extlibs / pygments / lexers / templates.py View on Github external
}

    def analyse_text(text):
        rv = 0.0
        if re.search('\{if\s+.*?\}.*?\{/if\}', text):
            rv += 0.15
        if re.search('\{include\s+file=.*?\}', text):
            rv += 0.15
        if re.search('\{foreach\s+.*?\}.*?\{/foreach\}', text):
            rv += 0.15
        if re.search('\{\$.*?\}', text):
            rv += 0.01
        return rv


class VelocityLexer(RegexLexer):
    """
    Generic `Velocity `_ template lexer.

    Just highlights velocity directives and variable references, other
    data is left untouched by the lexer.
    """

    name = 'Velocity'
    aliases = ['velocity']
    filenames = ['*.vm', '*.fhtml']

    flags = re.MULTILINE | re.DOTALL

    identifier = r'[a-zA-Z_]\w*'

    tokens = {
github niwinz / Green-Mine / src / extern / pygments / lexers / other.py View on Github external
'plot': [
            (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
                             'mat$rix', 's$mooth', 'thru$', 't$itle',
                             'not$itle', 'u$sing', 'w$ith'),
             Name.Builtin),
            include('genericargs'),
        ],
        'save': [
            (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
             Name.Builtin),
            include('genericargs'),
        ],
    }


class PovrayLexer(RegexLexer):
    """
    For `Persistence of Vision Raytracer `_ files.

    *New in Pygments 0.11.*
    """
    name = 'POVRay'
    aliases = ['pov']
    filenames = ['*.pov', '*.inc']
    mimetypes = ['text/x-povray']

    tokens = {
        'root': [
            (r'/\*[\w\W]*?\*/', Comment.Multiline),
            (r'//.*\n', Comment.Single),
            (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
            (r'#(debug|default|else|end|error|fclose|fopen|if|ifdef|ifndef|'
github wandb / client / wandb / vendor / pygments / lexers / javascript.py View on Github external
(r'\\\\', String.Backtick),
            (r'\\`', String.Backtick),
            (r'\$\{', String.Interpol, 'interp-inside'),
            (r'\$', String.Backtick),
            (r'[^`\\$]+', String.Backtick),
        ],
        'interp-inside': [
            # TODO: should this include single-line comments and allow nesting strings?
            (r'\}', String.Interpol, '#pop'),
            include('root'),
        ],
        # (\\\\|\\`|[^`])*`', String.Backtick),
    }


class KalLexer(RegexLexer):
    """
    For `Kal`_ source code.

    .. _Kal: http://rzimmerman.github.io/kal


    .. versionadded:: 2.0
    """

    name = 'Kal'
    aliases = ['kal']
    filenames = ['*.kal']
    mimetypes = ['text/kal', 'application/kal']

    flags = re.DOTALL
    tokens = {
github mmcgrana / gobyexample / third_party / pygments / pygments / lexers / julia.py View on Github external
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
"""

import re

from pygments.lexer import Lexer, RegexLexer, bygroups, combined, do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
    Number, Punctuation, Generic
from pygments.util import shebang_matches, unirange

__all__ = ['JuliaLexer', 'JuliaConsoleLexer']


class JuliaLexer(RegexLexer):
    """
    For `Julia `_ source code.

    .. versionadded:: 1.6
    """
    name = 'Julia'
    aliases = ['julia', 'jl']
    filenames = ['*.jl']
    mimetypes = ['text/x-julia', 'application/x-julia']

    flags = re.MULTILINE | re.UNICODE

    builtins = [
        'exit', 'whos', 'edit', 'load', 'is', 'isa', 'isequal', 'typeof', 'tuple',
        'ntuple', 'uid', 'hash', 'finalizer', 'convert', 'promote', 'subtype',
        'typemin', 'typemax', 'realmin', 'realmax', 'sizeof', 'eps', 'promote_type',