Dre4m Shell
Server IP : 85.214.239.14  /  Your IP : 18.224.38.170
Web Server : Apache/2.4.62 (Debian)
System : Linux h2886529.stratoserver.net 4.9.0 #1 SMP Tue Jan 9 19:45:01 MSK 2024 x86_64
User : www-data ( 33)
PHP Version : 7.4.18
Disable Function : pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,pcntl_unshare,
MySQL : OFF  |  cURL : OFF  |  WGET : ON  |  Perl : ON  |  Python : ON  |  Sudo : ON  |  Pkexec : OFF
Directory :  /proc/self/root/proc/2/cwd/proc/self/root/lib/python3/dist-packages/pygments/lexers/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Command :


[ HOME SHELL ]     

Current File : /proc/self/root/proc/2/cwd/proc/self/root/lib/python3/dist-packages/pygments/lexers//urbi.py
"""
    pygments.lexers.urbi
    ~~~~~~~~~~~~~~~~~~~~

    Lexers for UrbiScript language.

    :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
"""

import re

from pygments.lexer import ExtendedRegexLexer, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
    Number, Punctuation

__all__ = ['UrbiscriptLexer']


class UrbiscriptLexer(ExtendedRegexLexer):
    """
    For UrbiScript source code.

    .. versionadded:: 1.5
    """

    name = 'UrbiScript'
    aliases = ['urbiscript']
    filenames = ['*.u']
    mimetypes = ['application/x-urbiscript']

    flags = re.DOTALL

    # TODO
    # - handle Experimental and deprecated tags with specific tokens
    # - handle Angles and Durations with specific tokens

    def blob_callback(lexer, match, ctx):
        text_before_blob = match.group(1)
        blob_start = match.group(2)
        blob_size_str = match.group(3)
        blob_size = int(blob_size_str)
        yield match.start(), String, text_before_blob
        ctx.pos += len(text_before_blob)

        # if blob size doesn't match blob format (example : "\B(2)(aaa)")
        # yield blob as a string
        if ctx.text[match.end() + blob_size] != ")":
            result = "\\B(" + blob_size_str + ")("
            yield match.start(), String, result
            ctx.pos += len(result)
            return

        # if blob is well formatted, yield as Escape
        blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
        yield match.start(), String.Escape, blob_text
        ctx.pos = match.end() + blob_size + 1  # +1 is the ending ")"

    tokens = {
        'root': [
            (r'\s+', Text),
            # comments
            (r'//.*?\n', Comment),
            (r'/\*', Comment.Multiline, 'comment'),
            (r'(every|for|loop|while)(?:;|&|\||,)', Keyword),
            (words((
                'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl',
                'continue', 'default', 'else', 'enum', 'every', 'external',
                'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return',
                'stopif', 'switch', 'this', 'throw', 'timeout', 'try',
                'waituntil', 'whenever', 'while'), suffix=r'\b'),
             Keyword),
            (words((
                'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double',
                'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend',
                'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register',
                'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast',
                'struct', 'template', 'typedef', 'typeid', 'typename', 'union',
                'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'),
             Keyword.Reserved),
            # deprecated keywords, use a meaningful token when available
            (r'(emit|foreach|internal|loopn|static)\b', Keyword),
            # ignored keywords, use a meaningful token when available
            (r'(private|protected|public)\b', Keyword),
            (r'(var|do|const|function|class)\b', Keyword.Declaration),
            (r'(true|false|nil|void)\b', Keyword.Constant),
            (words((
                'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code',
                'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory',
                'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File',
                'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group',
                'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List',
                'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil',
                'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern',
                'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub',
                'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket',
                'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout',
                'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject',
                'UValue', 'UVar'), suffix=r'\b'),
             Name.Builtin),
            (r'(?:this)\b', Name.Builtin.Pseudo),
            # don't match single | and &
            (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
            (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
             Operator.Word),
            (r'[{}\[\]()]+', Punctuation),
            (r'(?:;|\||,|&|\?|!)+', Punctuation),
            (r'[$a-zA-Z_]\w*', Name.Other),
            (r'0x[0-9a-fA-F]+', Number.Hex),
            # Float, Integer, Angle and Duration
            (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
             r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
            # handle binary blob in strings
            (r'"', String.Double, "string.double"),
            (r"'", String.Single, "string.single"),
        ],
        'string.double': [
            (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
            (r'(\\\\|\\[^\\]|[^"\\])*?"', String.Double, '#pop'),
        ],
        'string.single': [
            (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
            (r"(\\\\|\\[^\\]|[^'\\])*?'", String.Single, '#pop'),
        ],
        # from http://pygments.org/docs/lexerdevelopment/#changing-states
        'comment': [
            (r'[^*/]', Comment.Multiline),
            (r'/\*', Comment.Multiline, '#push'),
            (r'\*/', Comment.Multiline, '#pop'),
            (r'[*/]', Comment.Multiline),
        ]
    }

    def analyse_text(text):
        """This is fairly similar to C and others, but freezeif and
        waituntil are unique keywords."""
        result = 0

        if 'freezeif' in text:
            result += 0.05

        if 'waituntil' in text:
            result += 0.05

        return result

Anon7 - 2022
AnonSec Team