How to use Pygments - 10 common examples

To help you get started, we’ve selected a few Pygments examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github wakatime / vim-wakatime / plugin / packages / wakatime / packages / pygments / formatters / html.py View on Github external
tmp = []
            for arg in args:
                tmp.append((arg and arg + ' ' or '') + cls)
            return ', '.join(tmp)

        styles = [(level, ttype, cls, style)
                  for cls, (style, ttype, level) in iteritems(self.class2style)
                  if cls and style]
        styles.sort()
        lines = ['%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
                 for (level, ttype, cls, style) in styles]
        if arg and not self.nobackground and \
           self.style.background_color is not None:
            text_style = ''
            if Text in self.ttype2class:
                text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
            lines.insert(0, '%s { background: %s;%s }' %
                         (prefix(''), self.style.background_color, text_style))
        if self.style.highlight_color is not None:
            lines.insert(0, '%s.hll { background-color: %s }' %
                         (prefix(''), self.style.highlight_color))
        return '\n'.join(lines)
github pygments / pygments / tests / test_perllexer.py View on Github external
def test_double_quote_strings(lexer):
    assert_single_token(lexer, r'"foo\tbar\\\"baz"', String)
    assert_fast_tokenization(lexer, '"' + '\\'*999)
github wakatime / sublime-wakatime / packages / wakatime / packages / pygments / lexers / shell.py View on Github external
'disable debug cxnew copy convertto convertfrom convert connect '
        'complete compare clear checkpoint aggregate add').split()

    commenthelp = (
        'component description example externalhelp forwardhelpcategory '
        'forwardhelptargetname functionality inputs link '
        'notes outputs parameter remotehelprunspace role synopsis').split()

    tokens = {
        'root': [
            # we need to count pairs of parentheses for correct highlight
            # of '$(...)' blocks in strings
            (r'\(', Punctuation, 'child'),
            (r'\s+', Text),
            (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
             bygroups(Comment, String.Doc, Comment)),
            (r'#[^\n]*?$', Comment),
            (r'(<|<)#', Comment.Multiline, 'multline'),
            (r'@"\n', String.Heredoc, 'heredoc-double'),
            (r"@'\n.*?\n'@", String.Heredoc),
            # escaped syntax
            (r'`[\'"$@-]', Punctuation),
            (r'"', String.Double, 'string'),
            (r"'([^']|'')*'", String.Single),
            (r'(\$|@@|@)((global|script|private|env):)?\w+',
             Name.Variable),
            (r'(%s)\b' % '|'.join(keywords), Keyword),
            (r'-(%s)\b' % '|'.join(operators), Operator),
            (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
            (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant),  # .net [type]s
            (r'-[a-z_]\w*', Name),
            (r'\w+', Name),
github gleitz / howdoi / howdoi / howdoi.py View on Github external
# or the query arguments
    for keyword in args['query'].split() + args['tags']:
        try:
            lexer = get_lexer_by_name(keyword)
            break
        except ClassNotFound:
            pass

    # no lexer found above, use the guesser
    if not lexer:
        try:
            lexer = guess_lexer(code)
        except ClassNotFound:
            return code

    return highlight(code,
                     lexer,
                     TerminalFormatter(bg='dark'))
github tmm1 / pygments.rb / vendor / pygments-main / scripts / debug_lexer.py View on Github external
def main(fn, lexer=None, options={}):
    if lexer is not None:
        lxcls = get_lexer_by_name(lexer).__class__
    else:
        lxcls = find_lexer_class_for_filename(os.path.basename(fn))
        if lxcls is None:
            name, rest = fn.split('_', 1)
            lxcls = find_lexer_class(name)
            if lxcls is None:
                raise AssertionError('no lexer found for file %r' % fn)
        print('Using lexer: %s (%s.%s)' % (lxcls.name, lxcls.__module__,
                                           lxcls.__name__))
    debug_lexer = False
    # if profile:
    #     # does not work for e.g. ExtendedRegexLexers
    #     if lxcls.__bases__ == (RegexLexer,):
    #         # yes we can!  (change the metaclass)
    #         lxcls.__class__ = ProfilingRegexLexerMeta
    #         lxcls.__bases__ = (ProfilingRegexLexer,)
github mmcgrana / gobyexample / third_party / pygments / pygments / lexers / shell.py View on Github external
Comment.Single)),
            (r'(?=%s)' % _start_label, Text, 'follow%s' % suffix),
            (_space, using(this, state='text')),
            include('redirect%s' % suffix),
            (r'[%s]+' % _nl, Text),
            (r'\(', Punctuation, 'root/compound'),
            (r'@+', Punctuation),
            (r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|'
             r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' %
             (_nl, _token_terminator, _space,
              _core_token_compound if compound else _core_token, _nl, _nl),
             bygroups(Keyword, using(this, state='text')),
             'follow%s' % suffix),
            (r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' %
             (_keyword_terminator, rest, _nl, _nl, rest),
             bygroups(Keyword, using(this, state='text')),
             'follow%s' % suffix),
            (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
                    'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
                    'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
                    'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
                    'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
                    'title', 'type', 'ver', 'verify', 'vol'),
                   suffix=_keyword_terminator), Keyword, 'follow%s' % suffix),
            (r'(call)(%s?)(:)' % _space,
             bygroups(Keyword, using(this, state='text'), Punctuation),
             'call%s' % suffix),
            (r'call%s' % _keyword_terminator, Keyword),
            (r'(for%s(?!\^))(%s)(/f%s)' %
             (_token_terminator, _space, _token_terminator),
             bygroups(Keyword, using(this, state='text'), Keyword),
             ('for/f', 'for')),
github cylc / cylc-flow / doc / src / custom / cylc_lang.py View on Github external
(r'\!\w+', Other),
            (r'\s', Text),
            (r'=>', Operator),
            (r'[\&\|]', Operator),
            (r'[\(\)]', Punctuation),
            (r'\[', Text, 'intercycle-offset'),
            (r'.', Comment)
        ],

        'inter-suite-trigger': [
            (r'(\<)'
             r'([^\>]+)'  # foreign suite
             r'(::)'
             r'([^\>]+)'  # foreign task
             r'(\>)',
             bygroups(Text, EXTERNAL_SUITE_TOKEN, Text,
                      PARAMETERISED_TASK_TOKEN, Text)),
        ],

        # Parameterised syntax:  
        'parameterisation': [
            (r'(\<)'  # Opening greater-than bracket.
             r'(\s?\w+\s?'  # Parameter name (permit whitespace).
             r'(?:[+-=]\s?\w+)?'  # [+-=] for selecting parameters.
             r'\s?'  # Permit whitespace.
             r'(?:'  # BEGIN optional extra parameter groups...
             r'(?:\s?,\s?\w+\s?'  # Comma separated parameters.
             r'(?:[+-=]\s?\w+)?'  # [+-=] for selecting parameters.
             r'\s?)'  # Permit whitespace.
             r'+)?'  # ...END optional extra parameter groups.
             r')(\>)',  # Closing lesser-than bracket.
             bygroups(Text, PARAMETERISED_TASK_TOKEN, Text)),
github OpenCobolIDE / OpenCobolIDE / open_cobol_ide / extlibs / pygments / lexers / templates.py View on Github external
filenames = ['*.mao']
    mimetypes = ['application/x-mako']

    tokens = {
        'root': [
            (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
             bygroups(Text, Comment.Preproc, Keyword, Other)),
            (r'(\s*)(%)([^\n]*)(\n|\Z)',
             bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
            (r'(\s*)(##[^\n]*)(\n|\Z)',
             bygroups(Text, Comment.Preproc, Other)),
            (r'(?s)<%doc>.*?', Comment.Preproc),
            (r'(<%)([\w.:]+)',
             bygroups(Comment.Preproc, Name.Builtin), 'tag'),
            (r'()',
             bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
            (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
            (r'(<%(?:!?))(.*?)(%>)(?s)',
             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
            (r'(\$\{)(.*?)(\})',
             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
            (r'''(?sx)
                (.+?)                # anything, followed by:
                (?:
                 (?<=\n)(?=%|\#\#) | # an eval or comment line
                 (?=\#\*) |          # multiline comment
                 (?=
github mmcgrana / gobyexample / vendor / pygments / pygments / lexers / compiled.py View on Github external
'literals': [
            (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number),   #Duration
            (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number),
                                                             #Duration with dot
            (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float),    #Float/Decimal
            (r'\b-?0x[0-9a-fA-F_]+', Number.Hex),            #Hex
            (r'\b-?[\d_]+', Number.Integer),                 #Int
            (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), #Char
            (r'"', Punctuation, 'insideStr'),                #Opening quote
            (r'`', Punctuation, 'insideUri'),                #Opening accent
            (r'\b(true|false|null)\b', Keyword.Constant),    #Bool & null
            (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)',          #DSL
             bygroups(Name.Namespace, Punctuation, Name.Class,
                      Punctuation, String, Punctuation)),
            (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?',               #Type/slot literal
             bygroups(Name.Namespace, Punctuation, Name.Class,
                      Punctuation, Name.Function)),
            (r'\[,\]', Literal),                             # Empty list
            (s(r'($type)(\[,\])'),                           # Typed empty list
             bygroups(using(this, state = 'inType'), Literal)),
            (r'\[:\]', Literal),                             # Empty Map
            (s(r'($type)(\[:\])'),
             bygroups(using(this, state = 'inType'), Literal)),
        ],
        'insideStr': [
            (r'\\\\', String.Escape),                        #Escaped backslash
            (r'\\"', String.Escape),                         #Escaped "
            (r'\\`', String.Escape),                         #Escaped `
            (r'\$\w+', String.Interpol),                     #Subst var
            (r'\${.*?}', String.Interpol),                   #Subst expr
            (r'"', Punctuation, '#pop'),                     #Closing quot
            (r'.', String)                                   #String content