!C99Shell v. 2.5 [PHP 8 Update] [24.05.2025]!

Software: Apache. PHP/8.1.30 

uname -a: Linux server1.tuhinhossain.com 5.15.0-151-generic #161-Ubuntu SMP Tue Jul 22 14:25:40 UTC
2025 x86_64
 

uid=1002(picotech) gid=1003(picotech) groups=1003(picotech),0(root)  

Safe-mode: OFF (not secure)

/usr/lib/python3/dist-packages/pygments/lexers/   drwxr-xr-x
Free 28.52 GB of 117.98 GB (24.18%)
Home    Back    Forward    UPDIR    Refresh    Search    Buffer    Encoder    Tools    Proc.    FTP brute    Sec.    SQL    PHP-code    Update    Self remove    Logout    


Viewing file:     diff.py (5.04 KB)      -rw-r--r--
Select action/file-type:
(+) | (+) | (+) | Code (+) | Session (+) | (+) | SDB (+) | (+) | (+) | (+) | (+) | (+) |
"""
    pygments.lexers.diff
    ~~~~~~~~~~~~~~~~~~~~

    Lexers for diff/patch formats.

    :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
"""

import re

from pygments.lexer import RegexLexer, include, bygroups
from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
    Literal, Whitespace

__all__ = ['DiffLexer', 'DarcsPatchLexer', 'WDiffLexer']


class DiffLexer(RegexLexer):
    """
    Lexer for unified or context-style diffs or patches.
    """

    name = 'Diff'
    aliases = ['diff', 'udiff']
    filenames = ['*.diff', '*.patch']
    mimetypes = ['text/x-diff', 'text/x-patch']

    tokens = {
        'root': [
            (r'( )(.*)(\n)', bygroups(Whitespace, Text, Whitespace)),
            (r'(\+.*)(\n)', bygroups(Generic.Inserted, Whitespace)),
            (r'(-.*)(\n)', bygroups(Generic.Deleted, Whitespace)),
            (r'(!.*)(\n)', bygroups(Generic.Strong, Whitespace)),
            (r'(@.*)(\n)', bygroups(Generic.Subheading, Whitespace)),
            (r'((?:[Ii]ndex|diff).*)(\n)', bygroups(Generic.Heading, Whitespace)),
            (r'(=.*)(\n)', bygroups(Generic.Heading, Whitespace)),
            (r'(.*)(\n)', Whitespace),
        ]
    }

    def analyse_text(text):
        if text[:7] == 'Index: ':
            return True
        if text[:5] == 'diff ':
            return True
        if text[:4] == '--- ':
            return 0.9


class DarcsPatchLexer(RegexLexer):
    """
    DarcsPatchLexer is a lexer for the various versions of the darcs patch
    format.  Examples of this format are derived by commands such as
    ``darcs annotate --patch`` and ``darcs send``.

    .. versionadded:: 0.10
    """

    name = 'Darcs Patch'
    aliases = ['dpatch']
    filenames = ['*.dpatch', '*.darcspatch']

    DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
                       'replace')

    tokens = {
        'root': [
            (r'<', Operator),
            (r'>', Operator),
            (r'\{', Operator),
            (r'\}', Operator),
            (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
             bygroups(Operator, Keyword, Name, Whitespace, Name, Operator,
                      Literal.Date, Whitespace, Operator)),
            (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
             bygroups(Operator, Keyword, Name, Whitespace, Name, Operator,
                      Literal.Date, Whitespace), 'comment'),
            (r'New patches:', Generic.Heading),
            (r'Context:', Generic.Heading),
            (r'Patch bundle hash:', Generic.Heading),
            (r'(\s*)(%s)(.*)(\n)' % '|'.join(DPATCH_KEYWORDS),
                bygroups(Whitespace, Keyword, Text, Whitespace)),
            (r'\+', Generic.Inserted, "insert"),
            (r'-', Generic.Deleted, "delete"),
            (r'(.*)(\n)', bygroups(Text, Whitespace)),
        ],
        'comment': [
            (r'[^\]].*\n', Comment),
            (r'\]', Operator, "#pop"),
        ],
        'specialText': [            # darcs add [_CODE_] special operators for clarity
            (r'\n', Whitespace, "#pop"),  # line-based
            (r'\[_[^_]*_]', Operator),
        ],
        'insert': [
            include('specialText'),
            (r'\[', Generic.Inserted),
            (r'[^\n\[]+', Generic.Inserted),
        ],
        'delete': [
            include('specialText'),
            (r'\[', Generic.Deleted),
            (r'[^\n\[]+', Generic.Deleted),
        ],
    }


class WDiffLexer(RegexLexer):
    """
    A `wdiff <https://www.gnu.org/software/wdiff/>`_ lexer.

    Note that:

    * It only works with normal output (without options like ``-l``).
    * If the target files contain "[-", "-]", "{+", or "+}",
      especially they are unbalanced, the lexer will get confused.

    .. versionadded:: 2.2
    """

    name = 'WDiff'
    aliases = ['wdiff']
    filenames = ['*.wdiff']
    mimetypes = []

    flags = re.MULTILINE | re.DOTALL

    # We can only assume "[-" after "[-" before "-]" is `nested`,
    # for instance wdiff to wdiff outputs. We have no way to
    # distinct these marker is of wdiff output from original text.

    ins_op = r"\{\+"
    ins_cl = r"\+\}"
    del_op = r"\[\-"
    del_cl = r"\-\]"
    normal = r'[^{}[\]+-]+'  # for performance
    tokens = {
        'root': [
            (ins_op, Generic.Inserted, 'inserted'),
            (del_op, Generic.Deleted, 'deleted'),
            (normal, Text),
            (r'.', Text),
        ],
        'inserted': [
            (ins_op, Generic.Inserted, '#push'),
            (del_op, Generic.Inserted, '#push'),
            (del_cl, Generic.Inserted, '#pop'),

            (ins_cl, Generic.Inserted, '#pop'),
            (normal, Generic.Inserted),
            (r'.', Generic.Inserted),
        ],
        'deleted': [
            (del_op, Generic.Deleted, '#push'),
            (ins_op, Generic.Deleted, '#push'),
            (ins_cl, Generic.Deleted, '#pop'),

            (del_cl, Generic.Deleted, '#pop'),
            (normal, Generic.Deleted),
            (r'.', Generic.Deleted),
        ],
    }

:: Command execute ::

Enter:
 
Select:
 

:: Search ::
  - regexp 

:: Upload ::
 
[ Read-Only ]

:: Make Dir ::
 
[ Read-Only ]
:: Make File ::
 
[ Read-Only ]

:: Go Dir ::
 
:: Go File ::
 

--[ c99shell v. 2.5 [PHP 8 Update] [24.05.2025] | Generation time: 0.0037 ]--