How to use the pygments.token.Punctuation function in Pygments

To help you get started, we’ve selected a few Pygments examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github wakatime / vim-wakatime / plugin / packages / wakatime / packages / pygments / lexers / dylan.py View on Github external
# decimal integer
            (r'[-+]?\d+', Number.Integer),

            # hex integer
            (r'#x[0-9a-f]+', Number.Hex),

            # Macro parameters
            (r'(\?' + valid_name + ')(:)'
             r'(token|name|variable|expression|body|case-body|\*)',
                bygroups(Name.Tag, Operator, Name.Builtin)),
            (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
                bygroups(Name.Tag, Operator, Name.Builtin)),
            (r'\?' + valid_name, Name.Tag),

            # Punctuation
            (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),

            # Most operators are picked up as names and then re-flagged.
            # This one isn't valid in a name though, so we pick it up now.
            (r':=', Operator),

            # Pick up #t / #f before we match other stuff with #.
            (r'#[tf]', Literal),

            # #"foo" style keywords
            (r'#"', String.Symbol, 'keyword'),

            # #rest, #key, #all-keys, etc.
            (r'#[a-z0-9-]+', Keyword),

            # required-init-keyword: style keywords.
            (valid_name + ':', Keyword),
github tmm1 / pygments.rb / vendor / pygments-main / pygments / lexers / c_cpp.py View on Github external
(r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
            # Open until EOF, so no ending delimeter
            (r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
        ],
        'statements': [
            (r'(L?)(")', bygroups(String.Affix, String), 'string'),
            (r"(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')",
             bygroups(String.Affix, String.Char, String.Char, String.Char)),
            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
            (r'0[0-7]+[LlUu]*', Number.Oct),
            (r'\d+[LlUu]*', Number.Integer),
            (r'\*/', Error),
            (r'[~!%^&*+=|?:<>/-]', Operator),
            (r'[()\[\],.]', Punctuation),
            (words(('asm', 'auto', 'break', 'case', 'const', 'continue',
                    'default', 'do', 'else', 'enum', 'extern', 'for', 'goto',
                    'if', 'register', 'restricted', 'return', 'sizeof',
                    'static', 'struct', 'switch', 'typedef', 'union',
                    'volatile', 'while'),
                   suffix=r'\b'), Keyword),
            (r'(bool|int|long|float|short|double|char|unsigned|signed|void)\b',
             Keyword.Type),
            (words(('inline', '_inline', '__inline', 'naked', 'restrict',
                    'thread', 'typename'), suffix=r'\b'), Keyword.Reserved),
            # Vector intrinsics
            (r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
            # Microsoft-isms
            (words((
                'asm', 'int8', 'based', 'except', 'int16', 'stdcall', 'cdecl',
                'fastcall', 'int32', 'declspec', 'finally', 'int64', 'try',
github sphinx-contrib / matlabdomain / sphinxcontrib / mat_lexer.py View on Github external
(r'[a-zA-Z_]\w*', Name),
            (r'.', Text),
        ],
       'string': [
            (r"[^']*'", String, '#pop'),
        ],
        'blockcomment': [
            (r'^\s*%\}', Comment.Multiline, '#pop'),
            (r'^.*\n', Comment.Multiline),
            (r'.', Comment.Multiline),
        ],
        'deffunc': [
            (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
             bygroups(Whitespace, Text, Whitespace, Punctuation,
                      Whitespace, Name.Function, Punctuation, Text,
                      Punctuation, Whitespace), '#pop'),
            # function with no args
            (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
        ],
github pygments / pygments.rb / vendor / pygments-main / pygments / lexers / archetype.py View on Github external
r'allow_archetype|use_archetype|use_node)\W', Keyword.Type),
            (r'(and|or|not|there_exists|xor|implies|for_all)\W', Keyword.Type),
            (r'(after|before|closed)\W', Keyword.Type),
            (r'(not)\W', Operator),
            (r'(matches|is_in)\W', Operator),
            # is_in / not is_in char
            ('(\u2208|\u2209)', Operator),
            # there_exists / not there_exists / for_all / and / or
            ('(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)',
             Operator),
            # regex in slot or as string constraint
            (r'(\{)(\s*/[^}]+/\s*)(\})',
             bygroups(Punctuation, String.Regex, Punctuation)),
            # regex in slot or as string constraint
            (r'(\{)(\s*\^[^}]+\^\s*)(\})',
             bygroups(Punctuation, String.Regex, Punctuation)),
            (r'/', Punctuation, 'path'),
            # for cardinality etc
            (r'(\{)((?:\d+\.\.)?(?:\d+|\*))'
             r'((?:\s*;\s*(?:ordered|unordered|unique)){,2})(\})',
             bygroups(Punctuation, Number, Number, Punctuation)),
            # [{ is start of a tuple value
            (r'\[\{', Punctuation),
            (r'\}\]', Punctuation),
            (r'\{', Punctuation),
            (r'\}', Punctuation),
            include('constraint_values'),
            # type name
            (r'[A-Z]\w+(<[A-Z]\w+([A-Za-z_<>]*)>)?',  Name.Class),
            # attribute name
            (r'[a-z_]\w*', Name.Class),
            (r'\[', Punctuation, 'any_code'),
github tav / pylibs / pygments / lexers / templates.py View on Github external
(r'\$\w+:[^$\n]*\$', Comment.Multiline),
            # directives: begin, end
            (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
             bygroups(Punctuation, Name.Builtin, Punctuation, None,
                      String, Punctuation, None)),
            # directives: evoque, overlay
            # see doc for handling first name arg: /directives/evoque/
            #+ minor inconsistency: the "name" in e.g. $overlay{name=site_base}
            # should be using(PythonLexer), not passed out as String
            (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
             r'(.*?)((?(4)%)\})',
             bygroups(Punctuation, Name.Builtin, Punctuation, None,
                      String, using(PythonLexer), Punctuation, None)),
            # directives: if, for, prefer, test
            (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
             bygroups(Punctuation, Name.Builtin, Punctuation, None,
                      using(PythonLexer), Punctuation, None)),
            # directive clauses (no {} expression)
            (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
            # expressions
            (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
             bygroups(Punctuation, None, using(PythonLexer),
                      Name.Builtin, None, None, Punctuation, None)),
            (r'#', Other),
        ],
        'comment': [
            (r'[^\]#]', Comment.Multiline),
            (r'#\[', Comment.Multiline, '#push'),
            (r'\]#', Comment.Multiline, '#pop'),
            (r'[\]#]', Comment.Multiline)
        ],
    }
github wakatime / vim-wakatime / plugin / packages / wakatime / packages / py2 / pygments / lexers / console.py View on Github external
mimetypes = ['application/x-pypylog']

    tokens = {
        "root": [
            (r"\[\w+\] \{jit-log-.*?$", Keyword, "jit-log"),
            (r"\[\w+\] \{jit-backend-counts$", Keyword, "jit-backend-counts"),
            include("extra-stuff"),
        ],
        "jit-log": [
            (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
            (r"^\+\d+: ", Comment),
            (r"--end of the loop--", Comment),
            (r"[ifp]\d+", Name),
            (r"ptr\d+", Name),
            (r"(\()(\w+(?:\.\w+)?)(\))",
             bygroups(Punctuation, Name.Builtin, Punctuation)),
            (r"[\[\]=,()]", Punctuation),
            (r"(\d+\.\d+|inf|-inf)", Number.Float),
            (r"-?\d+", Number.Integer),
            (r"'.*'", String),
            (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
            (r"<.*?>+", Name.Builtin),
            (r"(label|debug_merge_point|jump|finish)", Name.Class),
            (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
             r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
             r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
             r"int_is_true|"
             r"uint_floordiv|uint_ge|uint_lt|"
             r"float_add|float_sub|float_mul|float_truediv|float_neg|"
             r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
             r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
             r"cast_int_to_float|cast_float_to_int|"
github c-okelly / org_to_anki / src / org_to_anki / libs / pygments / lexers / templates.py View on Github external
bygroups(Punctuation, Name.Builtin, Punctuation, None,
                      String, Punctuation)),
            # directives: evoque, overlay
            # see doc for handling first name arg: /directives/evoque/
            # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
            # should be using(PythonLexer), not passed out as String
            (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
             r'(.*?)((?(4)%)\})',
             bygroups(Punctuation, Name.Builtin, Punctuation, None,
                      String, using(PythonLexer), Punctuation)),
            # directives: if, for, prefer, test
            (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
             bygroups(Punctuation, Name.Builtin, Punctuation, None,
                      using(PythonLexer), Punctuation)),
            # directive clauses (no {} expression)
            (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
            # expressions
            (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
             bygroups(Punctuation, None, using(PythonLexer),
                      Name.Builtin, None, None, Punctuation)),
            (r'#', Other),
        ],
        'comment': [
            (r'[^\]#]', Comment.Multiline),
            (r'#\[', Comment.Multiline, '#push'),
            (r'\]#', Comment.Multiline, '#pop'),
            (r'[\]#]', Comment.Multiline)
        ],
    }


class EvoqueHtmlLexer(DelegatingLexer):
github Jenyay / outwiker / plugins / source / source / pygments / lexers / asm.py View on Github external
"""
    Common objdump lexer tokens to wrap an ASM lexer.
    """
    hex_re = r'[0-9A-Za-z]'
    return {
        'root': [
            # File name & format:
            ('(.*?)(:)( +file format )(.*?)$',
                bygroups(Name.Label, Punctuation, Text, String)),
            # Section header
            ('(Disassembly of section )(.*?)(:)$',
                bygroups(Text, Name.Label, Punctuation)),
            # Function labels
            # (With offset)
            ('('+hex_re+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
                bygroups(Number.Hex, Text, Punctuation, Name.Function,
                         Punctuation, Number.Hex, Punctuation)),
            # (Without offset)
            ('('+hex_re+'+)( )(<)(.*?)(>:)$',
                bygroups(Number.Hex, Text, Punctuation, Name.Function,
                         Punctuation)),
            # Code line with disassembled instructions
            ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *\t)([a-zA-Z].*?)$',
                bygroups(Text, Name.Label, Text, Number.Hex, Text,
                         using(asm_lexer))),
            # Code line with ascii
            ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *)(.*?)$',
                bygroups(Text, Name.Label, Text, Number.Hex, Text, String)),
            # Continued code line, only raw opcodes without disassembled
            # instruction
            ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)$',
                bygroups(Text, Name.Label, Text, Number.Hex)),
github wandb / client / wandb / vendor / pygments / lexers / asm.py View on Github external
def _objdump_lexer_tokens(asm_lexer):
    """
    Common objdump lexer tokens to wrap an ASM lexer.
    """
    hex_re = r'[0-9A-Za-z]'
    return {
        'root': [
            # File name & format:
            ('(.*?)(:)( +file format )(.*?)$',
                bygroups(Name.Label, Punctuation, Text, String)),
            # Section header
            ('(Disassembly of section )(.*?)(:)$',
                bygroups(Text, Name.Label, Punctuation)),
            # Function labels
            # (With offset)
            ('('+hex_re+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
                bygroups(Number.Hex, Text, Punctuation, Name.Function,
                         Punctuation, Number.Hex, Punctuation)),
            # (Without offset)
            ('('+hex_re+'+)( )(<)(.*?)(>:)$',
                bygroups(Number.Hex, Text, Punctuation, Name.Function,
                         Punctuation)),
            # Code line with disassembled instructions
            ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *\t)([a-zA-Z].*?)$',
                bygroups(Text, Name.Label, Text, Number.Hex, Text,
                         using(asm_lexer))),
github pygments / pygments / pygments / lexers / parsers.py View on Github external
include('comments'),
            (r'\{', Punctuation),
            (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
             + r')?(\s*)(;)',
             bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
                      String, Whitespace, Punctuation)),
            (r'\}', Punctuation, '#pop'),
        ],
        'options': [
            include('whitespace'),
            include('comments'),
            (r'\{', Punctuation),
            (r'(' + _id + r')(\s*)(=)(\s*)(' +
             '|'.join((_id, _STRING_LITERAL, _INT, r'\*')) + r')(\s*)(;)',
             bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
                      Text, Whitespace, Punctuation)),
            (r'\}', Punctuation, '#pop'),
        ],
        'action': [
            (r'(' + r'|'.join((    # keep host code in largest possible chunks
                r'[^${}\'"/\\]+',  # exclude unsafe characters

                # strings and comments may safely contain unsafe characters
                r'"(\\\\|\\"|[^"])*"',  # double quote string
                r"'(\\\\|\\'|[^'])*'",  # single quote string
                r'//.*$\n?',            # single line comment
                r'/\*(.|\n)*?\*/',      # multi-line javadoc-style comment

                # regular expression: There's no reason for it to start
                # with a * and this stops confusion with comments.
                r'/(?!\*)(\\\\|\\/|[^/])*/',