🐐 GOAT Shell

Current path: tmp/



⬆️ Go up:

📄 Viewing: phpHz3pEI

U

��.e�@sdZdS)z19.3.1N)�__version__�rr�0/usr/lib/python3.8/site-packages/pip/__init__.py�<module>�U

��.e�@sdZdS)z19.3.1N)�__version__�rr�0/usr/lib/python3.8/site-packages/pip/__init__.py�<module>�U

��.et�@sjddlmZddlZddlZedkrFej�ej�e��Zej�de�ddl	m
Zedkrfe�
e��dS)�)�absolute_importN�)�main�__main__)Z
__future__r�os�sys�__package__�path�dirname�__file__�insertZpip._internal.mainrZ_main�__name__�exit�rr�0/usr/lib/python3.8/site-packages/pip/__main__.py�<module>sU

��.et�@sjddlmZddlZddlZedkrFej�ej�e��Zej�de�ddl	m
Zedkrfe�
e��dS)�)�absolute_importN�)�main�__main__)Z
__future__r�os�sys�__package__�path�dirname�__file__�insertZpip._internal.mainrZ_main�__name__�exit�rr�0/usr/lib/python3.8/site-packages/pip/__main__.py�<module>s#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2005-2010 ActiveState Software Inc.
# Copyright (c) 2013 Eddy Petrișor

"""Utilities for determining application-specific dirs.

See <http://github.com/ActiveState/appdirs> for details and usage.
"""
# Dev Notes:
# - MSDN on where to store app data files:
#   http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html

__version_info__ = (1, 4, 3)
__version__ = '.'.join(map(str, __version_info__))


import sys
import os

PY3 = sys.version_info[0] == 3

if PY3:
    unicode = str

if sys.platform.startswith('java'):
    import platform
    os_name = platform.java_ver()[3][0]
    if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
        system = 'win32'
    elif os_name.startswith('Mac'): # "Mac OS X", etc.
        system = 'darwin'
    else: # "Linux", "SunOS", "FreeBSD", etc.
        # Setting this to "linux2" is not ideal, but only Windows or Mac
        # are actually checked for and the rest of the module expects
        # *sys.platform* style strings.
        system = 'linux2'
else:
    system = sys.platform



def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
    r"""Return full path to the user-specific data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        Mac OS X:               ~/Library/Application Support/<AppName>
        Unix:                   ~/.local/share/<AppName>    # or in $XDG_DATA_HOME, if defined
        Win XP (not roaming):   C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
        Win XP (roaming):       C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
        Win 7  (not roaming):   C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
        Win 7  (roaming):       C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>

    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
    That means, by default "~/.local/share/<AppName>".
    """
    if system == "win32":
        if appauthor is None:
            appauthor = appname
        const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
        path = os.path.normpath(_get_win_folder(const))
        if appname:
            if appauthor is not False:
                path = os.path.join(path, appauthor, appname)
            else:
                path = os.path.join(path, appname)
    elif system == 'darwin':
        path = os.path.expanduser('~/Library/Application Support/')
        if appname:
            path = os.path.join(path, appname)
    else:
        path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
        if appname:
            path = os.path.join(path, appname)
    if appname and version:
        path = os.path.join(path, version)
    return path


def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
    r"""Return full path to the user-shared data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "multipath" is an optional parameter only applicable to *nix
            which indicates that the entire list of data dirs should be
            returned. By default, the first item from XDG_DATA_DIRS is
            returned, or '/usr/local/share/<AppName>',
            if XDG_DATA_DIRS is not set

    Typical site data directories are:
        Mac OS X:   /Library/Application Support/<AppName>
        Unix:       /usr/local/share/<AppName> or /usr/share/<AppName>
        Win XP:     C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
        Win 7:      C:\ProgramData\<AppAuthor>\<AppName>   # Hidden, but writeable on Win 7.

    For Unix, this is using the $XDG_DATA_DIRS[0] default.

    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
    """
    if system == "win32":
        if appauthor is None:
            appauthor = appname
        path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
        if appname:
            if appauthor is not False:
                path = os.path.join(path, appauthor, appname)
            else:
                path = os.path.join(path, appname)
    elif system == 'darwin':
        path = os.path.expanduser('/Library/Application Support')
        if appname:
            path = os.path.join(path, appname)
    else:
        # XDG default for $XDG_DATA_DIRS
        # only first, if multipath is False
        path = os.getenv('XDG_DATA_DIRS',
                         os.pathsep.join(['/usr/local/share', '/usr/share']))
        pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
        if appname:
            if version:
                appname = os.path.join(appname, version)
            pathlist = [os.sep.join([x, appname]) for x in pathlist]

        if multipath:
            path = os.pathsep.join(pathlist)
        else:
            path = pathlist[0]
        return path

    if appname and version:
        path = os.path.join(path, version)
    return path


def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
    r"""Return full path to the user-specific config dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user config directories are:
        Mac OS X:               same as user_data_dir
        Unix:                   ~/.config/<AppName>     # or in $XDG_CONFIG_HOME, if defined
        Win *:                  same as user_data_dir

    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
    That means, by default "~/.config/<AppName>".
    """
    if system in ["win32", "darwin"]:
        path = user_data_dir(appname, appauthor, None, roaming)
    else:
        path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
        if appname:
            path = os.path.join(path, appname)
    if appname and version:
        path = os.path.join(path, version)
    return path


def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
    r"""Return full path to the user-shared data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "multipath" is an optional parameter only applicable to *nix
            which indicates that the entire list of config dirs should be
            returned. By default, the first item from XDG_CONFIG_DIRS is
            returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set

    Typical site config directories are:
        Mac OS X:   same as site_data_dir
        Unix:       /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
                    $XDG_CONFIG_DIRS
        Win *:      same as site_data_dir
        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)

    For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False

    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
    """
    if system in ["win32", "darwin"]:
        path = site_data_dir(appname, appauthor)
        if appname and version:
            path = os.path.join(path, version)
    else:
        # XDG default for $XDG_CONFIG_DIRS
        # only first, if multipath is False
        path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
        pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
        if appname:
            if version:
                appname = os.path.join(appname, version)
            pathlist = [os.sep.join([x, appname]) for x in pathlist]

        if multipath:
            path = os.pathsep.join(pathlist)
        else:
            path = pathlist[0]
    return path


def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
    r"""Return full path to the user-specific cache dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "opinion" (boolean) can be False to disable the appending of
            "Cache" to the base app data dir for Windows. See
            discussion below.

    Typical user cache directories are:
        Mac OS X:   ~/Library/Caches/<AppName>
        Unix:       ~/.cache/<AppName> (XDG default)
        Win XP:     C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
        Vista:      C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache

    On Windows the only suggestion in the MSDN docs is that local settings go in
    the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
    app data dir (the default returned by `user_data_dir` above). Apps typically
    put cache data somewhere *under* the given dir here. Some examples:
        ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
        ...\Acme\SuperApp\Cache\1.0
    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
    This can be disabled with the `opinion=False` option.
    """
    if system == "win32":
        if appauthor is None:
            appauthor = appname
        path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
        if appname:
            if appauthor is not False:
                path = os.path.join(path, appauthor, appname)
            else:
                path = os.path.join(path, appname)
            if opinion:
                path = os.path.join(path, "Cache")
    elif system == 'darwin':
        path = os.path.expanduser('~/Library/Caches')
        if appname:
            path = os.path.join(path, appname)
    else:
        path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
        if appname:
            path = os.path.join(path, appname)
    if appname and version:
        path = os.path.join(path, version)
    return path


def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
    r"""Return full path to the user-specific state dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user state directories are:
        Mac OS X:  same as user_data_dir
        Unix:      ~/.local/state/<AppName>   # or in $XDG_STATE_HOME, if defined
        Win *:     same as user_data_dir

    For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
    to extend the XDG spec and support $XDG_STATE_HOME.

    That means, by default "~/.local/state/<AppName>".
    """
    if system in ["win32", "darwin"]:
        path = user_data_dir(appname, appauthor, None, roaming)
    else:
        path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
        if appname:
            path = os.path.join(path, appname)
    if appname and version:
        path = os.path.join(path, version)
    return path


def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
    r"""Return full path to the user-specific log dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "opinion" (boolean) can be False to disable the appending of
            "Logs" to the base app data dir for Windows, and "log" to the
            base cache dir for Unix. See discussion below.

    Typical user log directories are:
        Mac OS X:   ~/Library/Logs/<AppName>
        Unix:       ~/.cache/<AppName>/log  # or under $XDG_CACHE_HOME if defined
        Win XP:     C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
        Vista:      C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs

    On Windows the only suggestion in the MSDN docs is that local settings
    go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
    examples of what some windows apps use for a logs dir.)

    OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
    value for Windows and appends "log" to the user cache dir for Unix.
    This can be disabled with the `opinion=False` option.
    """
    if system == "darwin":
        path = os.path.join(
            os.path.expanduser('~/Library/Logs'),
            appname)
    elif system == "win32":
        path = user_data_dir(appname, appauthor, version)
        version = False
        if opinion:
            path = os.path.join(path, "Logs")
    else:
        path = user_cache_dir(appname, appauthor, version)
        version = False
        if opinion:
            path = os.path.join(path, "log")
    if appname and version:
        path = os.path.join(path, version)
    return path


class AppDirs(object):
    """Convenience wrapper for getting application dirs."""
    def __init__(self, appname=None, appauthor=None, version=None,
            roaming=False, multipath=False):
        self.appname = appname
        self.appauthor = appauthor
        self.version = version
        self.roaming = roaming
        self.multipath = multipath

    @property
    def user_data_dir(self):
        return user_data_dir(self.appname, self.appauthor,
                             version=self.version, roaming=self.roaming)

    @property
    def site_data_dir(self):
        return site_data_dir(self.appname, self.appauthor,
                             version=self.version, multipath=self.multipath)

    @property
    def user_config_dir(self):
        return user_config_dir(self.appname, self.appauthor,
                               version=self.version, roaming=self.roaming)

    @property
    def site_config_dir(self):
        return site_config_dir(self.appname, self.appauthor,
                             version=self.version, multipath=self.multipath)

    @property
    def user_cache_dir(self):
        return user_cache_dir(self.appname, self.appauthor,
                              version=self.version)

    @property
    def user_state_dir(self):
        return user_state_dir(self.appname, self.appauthor,
                              version=self.version)

    @property
    def user_log_dir(self):
        return user_log_dir(self.appname, self.appauthor,
                            version=self.version)


#---- internal support stuff

def _get_win_folder_from_registry(csidl_name):
    """This is a fallback technique at best. I'm not sure if using the
    registry for this guarantees us the correct answer for all CSIDL_*
    names.
    """
    if PY3:
      import winreg as _winreg
    else:
      import _winreg

    shell_folder_name = {
        "CSIDL_APPDATA": "AppData",
        "CSIDL_COMMON_APPDATA": "Common AppData",
        "CSIDL_LOCAL_APPDATA": "Local AppData",
    }[csidl_name]

    key = _winreg.OpenKey(
        _winreg.HKEY_CURRENT_USER,
        r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
    )
    dir, type = _winreg.QueryValueEx(key, shell_folder_name)
    return dir


def _get_win_folder_with_pywin32(csidl_name):
    from win32com.shell import shellcon, shell
    dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
    # Try to make this a unicode path because SHGetFolderPath does
    # not return unicode strings when there is unicode data in the
    # path.
    try:
        dir = unicode(dir)

        # Downgrade to short path name if have highbit chars. See
        # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
        has_high_char = False
        for c in dir:
            if ord(c) > 255:
                has_high_char = True
                break
        if has_high_char:
            try:
                import win32api
                dir = win32api.GetShortPathName(dir)
            except ImportError:
                pass
    except UnicodeError:
        pass
    return dir


def _get_win_folder_with_ctypes(csidl_name):
    import ctypes

    csidl_const = {
        "CSIDL_APPDATA": 26,
        "CSIDL_COMMON_APPDATA": 35,
        "CSIDL_LOCAL_APPDATA": 28,
    }[csidl_name]

    buf = ctypes.create_unicode_buffer(1024)
    ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)

    # Downgrade to short path name if have highbit chars. See
    # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
    has_high_char = False
    for c in buf:
        if ord(c) > 255:
            has_high_char = True
            break
    if has_high_char:
        buf2 = ctypes.create_unicode_buffer(1024)
        if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
            buf = buf2

    return buf.value

def _get_win_folder_with_jna(csidl_name):
    import array
    from com.sun import jna
    from com.sun.jna.platform import win32

    buf_size = win32.WinDef.MAX_PATH * 2
    buf = array.zeros('c', buf_size)
    shell = win32.Shell32.INSTANCE
    shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
    dir = jna.Native.toString(buf.tostring()).rstrip("\0")

    # Downgrade to short path name if have highbit chars. See
    # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
    has_high_char = False
    for c in dir:
        if ord(c) > 255:
            has_high_char = True
            break
    if has_high_char:
        buf = array.zeros('c', buf_size)
        kernel = win32.Kernel32.INSTANCE
        if kernel.GetShortPathName(dir, buf, buf_size):
            dir = jna.Native.toString(buf.tostring()).rstrip("\0")

    return dir

if system == "win32":
    try:
        from ctypes import windll
        _get_win_folder = _get_win_folder_with_ctypes
    except ImportError:
        try:
            import com.sun.jna
            _get_win_folder = _get_win_folder_with_jna
        except ImportError:
            _get_win_folder = _get_win_folder_from_registry


#---- self test code

if __name__ == "__main__":
    appname = "MyApp"
    appauthor = "MyCompany"

    props = ("user_data_dir",
             "user_config_dir",
             "user_cache_dir",
             "user_state_dir",
             "user_log_dir",
             "site_data_dir",
             "site_config_dir")

    print("-- app dirs %s --" % __version__)

    print("-- app dirs (with optional 'version')")
    dirs = AppDirs(appname, appauthor, version="1.0")
    for prop in props:
        print("%s: %s" % (prop, getattr(dirs, prop)))

    print("\n-- app dirs (without optional 'version')")
    dirs = AppDirs(appname, appauthor)
    for prop in props:
        print("%s: %s" % (prop, getattr(dirs, prop)))

    print("\n-- app dirs (without optional 'appauthor')")
    dirs = AppDirs(appname)
    for prop in props:
        print("%s: %s" % (prop, getattr(dirs, prop)))

    print("\n-- app dirs (with disabled 'appauthor')")
    dirs = AppDirs(appname, appauthor=False)
    for prop in props:
        print("%s: %s" % (prop, getattr(dirs, prop)))
"""
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
"""
from threading import Lock


class BaseCache(object):

    def get(self, key):
        raise NotImplementedError()

    def set(self, key, value):
        raise NotImplementedError()

    def delete(self, key):
        raise NotImplementedError()

    def close(self):
        pass


class DictCache(BaseCache):

    def __init__(self, init_dict=None):
        self.lock = Lock()
        self.data = init_dict or {}

    def get(self, key):
        return self.data.get(key, None)

    def set(self, key, value):
        with self.lock:
            self.data.update({key: value})

    def delete(self, key):
        with self.lock:
            if key in self.data:
                self.data.pop(key)
"""
The httplib2 algorithms ported for use with requests.
"""
import logging
import re
import calendar
import time
from email.utils import parsedate_tz

from pip._vendor.requests.structures import CaseInsensitiveDict

from .cache import DictCache
from .serialize import Serializer


logger = logging.getLogger(__name__)

URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")


def parse_uri(uri):
    """Parses a URI using the regex given in Appendix B of RFC 3986.

        (scheme, authority, path, query, fragment) = parse_uri(uri)
    """
    groups = URI.match(uri).groups()
    return (groups[1], groups[3], groups[4], groups[6], groups[8])


class CacheController(object):
    """An interface to see if request should cached or not.
    """

    def __init__(
        self, cache=None, cache_etags=True, serializer=None, status_codes=None
    ):
        self.cache = cache or DictCache()
        self.cache_etags = cache_etags
        self.serializer = serializer or Serializer()
        self.cacheable_status_codes = status_codes or (200, 203, 300, 301)

    @classmethod
    def _urlnorm(cls, uri):
        """Normalize the URL to create a safe key for the cache"""
        (scheme, authority, path, query, fragment) = parse_uri(uri)
        if not scheme or not authority:
            raise Exception("Only absolute URIs are allowed. uri = %s" % uri)

        scheme = scheme.lower()
        authority = authority.lower()

        if not path:
            path = "/"

        # Could do syntax based normalization of the URI before
        # computing the digest. See Section 6.2.2 of Std 66.
        request_uri = query and "?".join([path, query]) or path
        defrag_uri = scheme + "://" + authority + request_uri

        return defrag_uri

    @classmethod
    def cache_url(cls, uri):
        return cls._urlnorm(uri)

    def parse_cache_control(self, headers):
        known_directives = {
            # https://tools.ietf.org/html/rfc7234#section-5.2
            "max-age": (int, True),
            "max-stale": (int, False),
            "min-fresh": (int, True),
            "no-cache": (None, False),
            "no-store": (None, False),
            "no-transform": (None, False),
            "only-if-cached": (None, False),
            "must-revalidate": (None, False),
            "public": (None, False),
            "private": (None, False),
            "proxy-revalidate": (None, False),
            "s-maxage": (int, True),
        }

        cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))

        retval = {}

        for cc_directive in cc_headers.split(","):
            if not cc_directive.strip():
                continue

            parts = cc_directive.split("=", 1)
            directive = parts[0].strip()

            try:
                typ, required = known_directives[directive]
            except KeyError:
                logger.debug("Ignoring unknown cache-control directive: %s", directive)
                continue

            if not typ or not required:
                retval[directive] = None
            if typ:
                try:
                    retval[directive] = typ(parts[1].strip())
                except IndexError:
                    if required:
                        logger.debug(
                            "Missing value for cache-control " "directive: %s",
                            directive,
                        )
                except ValueError:
                    logger.debug(
                        "Invalid value for cache-control directive " "%s, must be %s",
                        directive,
                        typ.__name__,
                    )

        return retval

    def cached_request(self, request):
        """
        Return a cached response if it exists in the cache, otherwise
        return False.
        """
        cache_url = self.cache_url(request.url)
        logger.debug('Looking up "%s" in the cache', cache_url)
        cc = self.parse_cache_control(request.headers)

        # Bail out if the request insists on fresh data
        if "no-cache" in cc:
            logger.debug('Request header has "no-cache", cache bypassed')
            return False

        if "max-age" in cc and cc["max-age"] == 0:
            logger.debug('Request header has "max_age" as 0, cache bypassed')
            return False

        # Request allows serving from the cache, let's see if we find something
        cache_data = self.cache.get(cache_url)
        if cache_data is None:
            logger.debug("No cache entry available")
            return False

        # Check whether it can be deserialized
        resp = self.serializer.loads(request, cache_data)
        if not resp:
            logger.warning("Cache entry deserialization failed, entry ignored")
            return False

        # If we have a cached 301, return it immediately. We don't
        # need to test our response for other headers b/c it is
        # intrinsically "cacheable" as it is Permanent.
        # See:
        #   https://tools.ietf.org/html/rfc7231#section-6.4.2
        #
        # Client can try to refresh the value by repeating the request
        # with cache busting headers as usual (ie no-cache).
        if resp.status == 301:
            msg = (
                'Returning cached "301 Moved Permanently" response '
                "(ignoring date and etag information)"
            )
            logger.debug(msg)
            return resp

        headers = CaseInsensitiveDict(resp.headers)
        if not headers or "date" not in headers:
            if "etag" not in headers:
                # Without date or etag, the cached response can never be used
                # and should be deleted.
                logger.debug("Purging cached response: no date or etag")
                self.cache.delete(cache_url)
            logger.debug("Ignoring cached response: no date")
            return False

        now = time.time()
        date = calendar.timegm(parsedate_tz(headers["date"]))
        current_age = max(0, now - date)
        logger.debug("Current age based on date: %i", current_age)

        # TODO: There is an assumption that the result will be a
        #       urllib3 response object. This may not be best since we
        #       could probably avoid instantiating or constructing the
        #       response until we know we need it.
        resp_cc = self.parse_cache_control(headers)

        # determine freshness
        freshness_lifetime = 0

        # Check the max-age pragma in the cache control header
        if "max-age" in resp_cc:
            freshness_lifetime = resp_cc["max-age"]
            logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)

        # If there isn't a max-age, check for an expires header
        elif "expires" in headers:
            expires = parsedate_tz(headers["expires"])
            if expires is not None:
                expire_time = calendar.timegm(expires) - date
                freshness_lifetime = max(0, expire_time)
                logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)

        # Determine if we are setting freshness limit in the
        # request. Note, this overrides what was in the response.
        if "max-age" in cc:
            freshness_lifetime = cc["max-age"]
            logger.debug(
                "Freshness lifetime from request max-age: %i", freshness_lifetime
            )

        if "min-fresh" in cc:
            min_fresh = cc["min-fresh"]
            # adjust our current age by our min fresh
            current_age += min_fresh
            logger.debug("Adjusted current age from min-fresh: %i", current_age)

        # Return entry if it is fresh enough
        if freshness_lifetime > current_age:
            logger.debug('The response is "fresh", returning cached response')
            logger.debug("%i > %i", freshness_lifetime, current_age)
            return resp

        # we're not fresh. If we don't have an Etag, clear it out
        if "etag" not in headers:
            logger.debug('The cached response is "stale" with no etag, purging')
            self.cache.delete(cache_url)

        # return the original handler
        return False

    def conditional_headers(self, request):
        cache_url = self.cache_url(request.url)
        resp = self.serializer.loads(request, self.cache.get(cache_url))
        new_headers = {}

        if resp:
            headers = CaseInsensitiveDict(resp.headers)

            if "etag" in headers:
                new_headers["If-None-Match"] = headers["ETag"]

            if "last-modified" in headers:
                new_headers["If-Modified-Since"] = headers["Last-Modified"]

        return new_headers

    def cache_response(self, request, response, body=None, status_codes=None):
        """
        Algorithm for caching requests.

        This assumes a requests Response object.
        """
        # From httplib2: Don't cache 206's since we aren't going to
        #                handle byte range requests
        cacheable_status_codes = status_codes or self.cacheable_status_codes
        if response.status not in cacheable_status_codes:
            logger.debug(
                "Status code %s not in %s", response.status, cacheable_status_codes
            )
            return

        response_headers = CaseInsensitiveDict(response.headers)

        # If we've been given a body, our response has a Content-Length, that
        # Content-Length is valid then we can check to see if the body we've
        # been given matches the expected size, and if it doesn't we'll just
        # skip trying to cache it.
        if (
            body is not None
            and "content-length" in response_headers
            and response_headers["content-length"].isdigit()
            and int(response_headers["content-length"]) != len(body)
        ):
            return

        cc_req = self.parse_cache_control(request.headers)
        cc = self.parse_cache_control(response_headers)

        cache_url = self.cache_url(request.url)
        logger.debug('Updating cache with response from "%s"', cache_url)

        # Delete it from the cache if we happen to have it stored there
        no_store = False
        if "no-store" in cc:
            no_store = True
            logger.debug('Response header has "no-store"')
        if "no-store" in cc_req:
            no_store = True
            logger.debug('Request header has "no-store"')
        if no_store and self.cache.get(cache_url):
            logger.debug('Purging existing cache entry to honor "no-store"')
            self.cache.delete(cache_url)
        if no_store:
            return

        # If we've been given an etag, then keep the response
        if self.cache_etags and "etag" in response_headers:
            logger.debug("Caching due to etag")
            self.cache.set(
                cache_url, self.serializer.dumps(request, response, body=body)
            )

        # Add to the cache any 301s. We do this before looking that
        # the Date headers.
        elif response.status == 301:
            logger.debug("Caching permanant redirect")
            self.cache.set(cache_url, self.serializer.dumps(request, response))

        # Add to the cache if the response headers demand it. If there
        # is no date header then we can't do anything about expiring
        # the cache.
        elif "date" in response_headers:
            # cache when there is a max-age > 0
            if "max-age" in cc and cc["max-age"] > 0:
                logger.debug("Caching b/c date exists and max-age > 0")
                self.cache.set(
                    cache_url, self.serializer.dumps(request, response, body=body)
                )

            # If the request can expire, it means we should cache it
            # in the meantime.
            elif "expires" in response_headers:
                if response_headers["expires"]:
                    logger.debug("Caching b/c of expires header")
                    self.cache.set(
                        cache_url, self.serializer.dumps(request, response, body=body)
                    )

    def update_cached_response(self, request, response):
        """On a 304 we will get a new set of headers that we want to
        update our cached value with, assuming we have one.

        This should only ever be called when we've sent an ETag and
        gotten a 304 as the response.
        """
        cache_url = self.cache_url(request.url)

        cached_response = self.serializer.loads(request, self.cache.get(cache_url))

        if not cached_response:
            # we didn't have a cached response
            return response

        # Lets update our headers with the headers from the new request:
        # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
        #
        # The server isn't supposed to send headers that would make
        # the cached body invalid. But... just in case, we'll be sure
        # to strip out ones we know that might be problmatic due to
        # typical assumptions.
        excluded_headers = ["content-length"]

        cached_response.headers.update(
            dict(
                (k, v)
                for k, v in response.headers.items()
                if k.lower() not in excluded_headers
            )
        )

        # we want a 200 b/c we have content via the cache
        cached_response.status = 200

        # update our cache
        self.cache.set(cache_url, self.serializer.dumps(request, cached_response))

        return cached_response
import logging

from pip._vendor import requests

from pip._vendor.cachecontrol.adapter import CacheControlAdapter
from pip._vendor.cachecontrol.cache import DictCache
from pip._vendor.cachecontrol.controller import logger

from argparse import ArgumentParser


def setup_logging():
    logger.setLevel(logging.DEBUG)
    handler = logging.StreamHandler()
    logger.addHandler(handler)


def get_session():
    adapter = CacheControlAdapter(
        DictCache(), cache_etags=True, serializer=None, heuristic=None
    )
    sess = requests.Session()
    sess.mount("http://", adapter)
    sess.mount("https://", adapter)

    sess.cache_controller = adapter.controller
    return sess


def get_args():
    parser = ArgumentParser()
    parser.add_argument("url", help="The URL to try and cache")
    return parser.parse_args()


def main(args=None):
    args = get_args()
    sess = get_session()

    # Make a request to get a response
    resp = sess.get(args.url)

    # Turn on logging
    setup_logging()

    # try setting the cache
    sess.cache_controller.cache_response(resp.request, resp.raw)

    # Now try to get it
    if sess.cache_controller.cached_request(resp.request):
        print("Cached!")
    else:
        print("Not cached :(")


if __name__ == "__main__":
    main()
try:
    from urllib.parse import urljoin
except ImportError:
    from urlparse import urljoin


try:
    import cPickle as pickle
except ImportError:
    import pickle


# Handle the case where the requests module has been patched to not have
# urllib3 bundled as part of its source.
try:
    from pip._vendor.requests.packages.urllib3.response import HTTPResponse
except ImportError:
    from pip._vendor.urllib3.response import HTTPResponse

try:
    from pip._vendor.requests.packages.urllib3.util import is_fp_closed
except ImportError:
    from pip._vendor.urllib3.util import is_fp_closed

# Replicate some six behaviour
try:
    text_type = unicode
except NameError:
    text_type = str
import base64
import io
import json
import zlib

from pip._vendor import msgpack
from pip._vendor.requests.structures import CaseInsensitiveDict

from .compat import HTTPResponse, pickle, text_type


def _b64_decode_bytes(b):
    return base64.b64decode(b.encode("ascii"))


def _b64_decode_str(s):
    return _b64_decode_bytes(s).decode("utf8")


class Serializer(object):

    def dumps(self, request, response, body=None):
        response_headers = CaseInsensitiveDict(response.headers)

        if body is None:
            body = response.read(decode_content=False)

            # NOTE: 99% sure this is dead code. I'm only leaving it
            #       here b/c I don't have a test yet to prove
            #       it. Basically, before using
            #       `cachecontrol.filewrapper.CallbackFileWrapper`,
            #       this made an effort to reset the file handle. The
            #       `CallbackFileWrapper` short circuits this code by
            #       setting the body as the content is consumed, the
            #       result being a `body` argument is *always* passed
            #       into cache_response, and in turn,
            #       `Serializer.dump`.
            response._fp = io.BytesIO(body)

        # NOTE: This is all a bit weird, but it's really important that on
        #       Python 2.x these objects are unicode and not str, even when
        #       they contain only ascii. The problem here is that msgpack
        #       understands the difference between unicode and bytes and we
        #       have it set to differentiate between them, however Python 2
        #       doesn't know the difference. Forcing these to unicode will be
        #       enough to have msgpack know the difference.
        data = {
            u"response": {
                u"body": body,
                u"headers": dict(
                    (text_type(k), text_type(v)) for k, v in response.headers.items()
                ),
                u"status": response.status,
                u"version": response.version,
                u"reason": text_type(response.reason),
                u"strict": response.strict,
                u"decode_content": response.decode_content,
            }
        }

        # Construct our vary headers
        data[u"vary"] = {}
        if u"vary" in response_headers:
            varied_headers = response_headers[u"vary"].split(",")
            for header in varied_headers:
                header = text_type(header).strip()
                header_value = request.headers.get(header, None)
                if header_value is not None:
                    header_value = text_type(header_value)
                data[u"vary"][header] = header_value

        return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])

    def loads(self, request, data):
        # Short circuit if we've been given an empty set of data
        if not data:
            return

        # Determine what version of the serializer the data was serialized
        # with
        try:
            ver, data = data.split(b",", 1)
        except ValueError:
            ver = b"cc=0"

        # Make sure that our "ver" is actually a version and isn't a false
        # positive from a , being in the data stream.
        if ver[:3] != b"cc=":
            data = ver + data
            ver = b"cc=0"

        # Get the version number out of the cc=N
        ver = ver.split(b"=", 1)[-1].decode("ascii")

        # Dispatch to the actual load method for the given version
        try:
            return getattr(self, "_loads_v{}".format(ver))(request, data)

        except AttributeError:
            # This is a version we don't have a loads function for, so we'll
            # just treat it as a miss and return None
            return

    def prepare_response(self, request, cached):
        """Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        """
        # Special case the '*' Vary value as it means we cannot actually
        # determine if the cached response is suitable for this request.
        if "*" in cached.get("vary", {}):
            return

        # Ensure that the Vary headers for the cached response match our
        # request
        for header, value in cached.get("vary", {}).items():
            if request.headers.get(header, None) != value:
                return

        body_raw = cached["response"].pop("body")

        headers = CaseInsensitiveDict(data=cached["response"]["headers"])
        if headers.get("transfer-encoding", "") == "chunked":
            headers.pop("transfer-encoding")

        cached["response"]["headers"] = headers

        try:
            body = io.BytesIO(body_raw)
        except TypeError:
            # This can happen if cachecontrol serialized to v1 format (pickle)
            # using Python 2. A Python 2 str(byte string) will be unpickled as
            # a Python 3 str (unicode string), which will cause the above to
            # fail with:
            #
            #     TypeError: 'str' does not support the buffer interface
            body = io.BytesIO(body_raw.encode("utf8"))

        return HTTPResponse(body=body, preload_content=False, **cached["response"])

    def _loads_v0(self, request, data):
        # The original legacy cache data. This doesn't contain enough
        # information to construct everything we need, so we'll treat this as
        # a miss.
        return

    def _loads_v1(self, request, data):
        try:
            cached = pickle.loads(data)
        except ValueError:
            return

        return self.prepare_response(request, cached)

    def _loads_v2(self, request, data):
        try:
            cached = json.loads(zlib.decompress(data).decode("utf8"))
        except (ValueError, zlib.error):
            return

        # We need to decode the items that we've base64 encoded
        cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
        cached["response"]["headers"] = dict(
            (_b64_decode_str(k), _b64_decode_str(v))
            for k, v in cached["response"]["headers"].items()
        )
        cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
        cached["vary"] = dict(
            (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
            for k, v in cached["vary"].items()
        )

        return self.prepare_response(request, cached)

    def _loads_v3(self, request, data):
        # Due to Python 2 encoding issues, it's impossible to know for sure
        # exactly how to load v3 entries, thus we'll treat these as a miss so
        # that they get rewritten out as v4 entries.
        return

    def _loads_v4(self, request, data):
        try:
            cached = msgpack.loads(data, encoding="utf-8")
        except ValueError:
            return

        return self.prepare_response(request, cached)
from io import BytesIO


class CallbackFileWrapper(object):
    """
    Small wrapper around a fp object which will tee everything read into a
    buffer, and when that file is closed it will execute a callback with the
    contents of that buffer.

    All attributes are proxied to the underlying file object.

    This class uses members with a double underscore (__) leading prefix so as
    not to accidentally shadow an attribute.
    """

    def __init__(self, fp, callback):
        self.__buf = BytesIO()
        self.__fp = fp
        self.__callback = callback

    def __getattr__(self, name):
        # The vaguaries of garbage collection means that self.__fp is
        # not always set.  By using __getattribute__ and the private
        # name[0] allows looking up the attribute value and raising an
        # AttributeError when it doesn't exist. This stop thigns from
        # infinitely recursing calls to getattr in the case where
        # self.__fp hasn't been set.
        #
        # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
        fp = self.__getattribute__("_CallbackFileWrapper__fp")
        return getattr(fp, name)

    def __is_fp_closed(self):
        try:
            return self.__fp.fp is None

        except AttributeError:
            pass

        try:
            return self.__fp.closed

        except AttributeError:
            pass

        # We just don't cache it then.
        # TODO: Add some logging here...
        return False

    def _close(self):
        if self.__callback:
            self.__callback(self.__buf.getvalue())

        # We assign this to None here, because otherwise we can get into
        # really tricky problems where the CPython interpreter dead locks
        # because the callback is holding a reference to something which
        # has a __del__ method. Setting this to None breaks the cycle
        # and allows the garbage collector to do it's thing normally.
        self.__callback = None

    def read(self, amt=None):
        data = self.__fp.read(amt)
        self.__buf.write(data)
        if self.__is_fp_closed():
            self._close()

        return data

    def _safe_read(self, amt):
        data = self.__fp._safe_read(amt)
        if amt == 2 and data == b"\r\n":
            # urllib executes this read to toss the CRLF at the end
            # of the chunk.
            return data

        self.__buf.write(data)
        if self.__is_fp_closed():
            self._close()

        return data
from .adapter import CacheControlAdapter
from .cache import DictCache


def CacheControl(
    sess,
    cache=None,
    cache_etags=True,
    serializer=None,
    heuristic=None,
    controller_class=None,
    adapter_class=None,
    cacheable_methods=None,
):

    cache = cache or DictCache()
    adapter_class = adapter_class or CacheControlAdapter
    adapter = adapter_class(
        cache,
        cache_etags=cache_etags,
        serializer=serializer,
        heuristic=heuristic,
        controller_class=controller_class,
        cacheable_methods=cacheable_methods,
    )
    sess.mount("http://", adapter)
    sess.mount("https://", adapter)

    return sess
U

��.e%�@s4dZddlmZGdd�de�ZGdd�de�ZdS)zb
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
�)�Lockc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�	BaseCachecCs
t��dS�N��NotImplementedError��self�key�r
�B/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/cache.py�get
sz
BaseCache.getcCs
t��dSrr�rr	�valuer
r
r�set
sz
BaseCache.setcCs
t��dSrrrr
r
r�deleteszBaseCache.deletecCsdSrr
)rr
r
r�closeszBaseCache.closeN)�__name__�
__module__�__qualname__rrrrr
r
r
rrsrc@s.eZdZd
dd�Zdd�Zdd�Zdd	�ZdS)�	DictCacheNcCst�|_|pi|_dSr)r�lock�data)rZ	init_dictr
r
r�__init__szDictCache.__init__cCs|j�|d�Sr)rrrr
r
rrsz
DictCache.getc	Cs&|j�|j�||i�W5QRXdSr)rr�updater
r
r
rr sz
DictCache.setc	Cs,|j�||jkr|j�|�W5QRXdSr)rr�poprr
r
rr$s
zDictCache.delete)N)rrrrrrrr
r
r
rrs
rN)�__doc__Z	threadingr�objectrrr
r
r
r�<module>sU

��.e��@s�zddlmZWn ek
r0ddlmZYnXzddlZWnek
rZddlZYnXzddlmZWn ek
r�ddlmZYnXzddl	m
Z
Wn ek
r�ddlm
Z
YnXzeZ
Wnek
r�eZ
YnXdS)�)�urljoinN)�HTTPResponse)�is_fp_closed)Zurllib.parser�ImportErrorZurlparseZcPickle�pickleZ.pip._vendor.requests.packages.urllib3.responserZpip._vendor.urllib3.responseZ*pip._vendor.requests.packages.urllib3.utilrZpip._vendor.urllib3.utilZunicodeZ	text_type�	NameError�str�r	r	�C/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/compat.py�<module>s&U

��.e�	�@s ddlmZGdd�de�ZdS)�)�BytesIOc@sBeZdZdZdd�Zdd�Zdd�Zdd	�Zddd�Zd
d�Z	d
S)�CallbackFileWrapperav
    Small wrapper around a fp object which will tee everything read into a
    buffer, and when that file is closed it will execute a callback with the
    contents of that buffer.

    All attributes are proxied to the underlying file object.

    This class uses members with a double underscore (__) leading prefix so as
    not to accidentally shadow an attribute.
    cCst�|_||_||_dS�N)r�_CallbackFileWrapper__buf�_CallbackFileWrapper__fp�_CallbackFileWrapper__callback)�self�fp�callback�r�H/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/filewrapper.py�__init__szCallbackFileWrapper.__init__cCs|�d�}t||�S)Nr)�__getattribute__�getattr)r�namer	rrr�__getattr__s	
zCallbackFileWrapper.__getattr__cCsHz|jjdkWStk
r"YnXz
|jjWStk
rBYnXdS)NF)rr	�AttributeError�closed�rrrrZ__is_fp_closed!s
z"CallbackFileWrapper.__is_fp_closedcCs |jr|�|j���d|_dSr)rr�getvaluerrrr�_close2szCallbackFileWrapper._closeNcCs,|j�|�}|j�|�|��r(|��|Sr)r�readr�write�"_CallbackFileWrapper__is_fp_closedr�rZamt�datarrrr=s
zCallbackFileWrapper.readcCs@|j�|�}|dkr |dkr |S|j�|�|��r<|��|S)N�s
)r�
_safe_readrrrrrrrrrEszCallbackFileWrapper._safe_read)N)
�__name__�
__module__�__qualname__�__doc__r
rrrrrrrrrrs
rN)�ior�objectrrrrr�<module>sU

��.e*�@spddlZddlZddlZddlZddlmZddlmZddlm	Z	m
Z
mZdd�Zdd	�Z
Gd
d�de�ZdS)�N)�msgpack)�CaseInsensitiveDict�)�HTTPResponse�pickle�	text_typecCst�|�d��S)N�ascii)�base64Z	b64decode�encode)�b�r�F/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/serialize.py�_b64_decode_bytessrcCst|��d�S)N�utf8)r�decode)�srrr
�_b64_decode_strsrc@sNeZdZddd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Z	dd�Z
dS)�
SerializerNc		Cs�t|j�}|dkr*|jdd�}t�|�|_d|tdd�|j��D��|j|j	t
|j�|j|j
d�i}i|d<d|kr�|d�d�}|D]:}t
|���}|j�|d�}|dk	r�t
|�}||d|<q�d	�d
tj|dd�g�S)
NF)�decode_content�responsecss"|]\}}t|�t|�fVqdS�N)r��.0�k�vrrr
�	<genexpr>2sz#Serializer.dumps.<locals>.<genexpr>)�body�headers�status�version�reason�strictr�vary�,�,scc=4T)Zuse_bin_type)rr�read�io�BytesIOZ_fp�dict�itemsrrrr r!r�split�strip�get�joinr�dumps)	�self�requestrrZresponse_headers�dataZvaried_headers�headerZheader_valuerrr
r.s4

���zSerializer.dumpscCs�|sdSz|�dd�\}}Wntk
r4d}YnX|dd�dkrR||}d}|�dd�d�d�}zt|d	�|��||�WStk
r�YdSXdS)
Nr$rscc=0�scc=�=���rz
_loads_v{})r*�
ValueErrorr�getattr�format�AttributeError)r/r0r1Zverrrr
�loadsJs
zSerializer.loadscCs�d|�di�krdS|�di���D] \}}|j�|d�|kr$dSq$|d�d�}t|ddd�}|�dd	�d
kr�|�d�||dd<zt�|�}Wn$tk
r�t�|�d��}YnXt	f|dd
�|d��S)z`Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        �*r"Nrrr)r1ztransfer-encoding�ZchunkedrF)rZpreload_content)
r,r)r�poprr&r'�	TypeErrorr
r)r/r0�cachedr2�valueZbody_rawrrrrr
�prepare_responsehs
zSerializer.prepare_responsecCsdSrr�r/r0r1rrr
�	_loads_v0�szSerializer._loads_v0cCs2zt�|�}Wntk
r$YdSX|�||�Sr)rr:r6rA�r/r0r1r?rrr
�	_loads_v1�s
zSerializer._loads_v1c	Cs�zt�t�|��d��}Wnttjfk
r6YdSXt|dd�|dd<tdd�|dd�	�D��|dd<t
|dd�|dd<tdd�|d	�	�D��|d	<|�||�S)
Nrrrcss"|]\}}t|�t|�fVqdSr�rrrrr
r�s�z'Serializer._loads_v2.<locals>.<genexpr>rr css.|]&\}}t|�|dk	r t|�n|fVqdSrrFrrrr
r�s�r")�jsonr:�zlib�
decompressrr6�errorrr(r)rrArDrrr
�	_loads_v2�s�
�zSerializer._loads_v2cCsdSrrrBrrr
�	_loads_v3�szSerializer._loads_v3cCs6ztj|dd�}Wntk
r(YdSX|�||�S)Nzutf-8)�encoding)rr:r6rArDrrr
�	_loads_v4�s
zSerializer._loads_v4)N)�__name__�
__module__�__qualname__r.r:rArCrErKrLrNrrrr
rs
4$r)r	r&rGrHZpip._vendorrZpip._vendor.requests.structuresr�compatrrrrr�objectrrrrr
�<module>sU

��.e�@sxddlZddlmZddlmZddlmZddlmZddl	m
Z
dd�Zd	d
�Zdd�Z
dd
d�Zedkrte�dS)�N)�requests)�CacheControlAdapter)�	DictCache)�logger)�ArgumentParsercCs"t�tj�t��}t�|�dS)N)rZsetLevel�logging�DEBUGZ
StreamHandlerZ
addHandler)Zhandler�r	�A/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/_cmd.py�
setup_loggingsrcCs>tt�dddd�}t��}|�d|�|�d|�|j|_|S)NT)Zcache_etagsZ
serializerZ	heuristiczhttp://zhttps://)rrrZSessionZmountZ
controller�cache_controller)Zadapter�sessr	r	r
�get_sessions�rcCst�}|jddd�|��S)N�urlzThe URL to try and cache)�help)r�add_argument�
parse_args)�parserr	r	r
�get_argssrcCsTt�}t�}|�|j�}t�|j�|j|j�|j�	|j�rHt
d�nt
d�dS)NzCached!z
Not cached :()rr�getrrrZcache_responseZrequest�rawZcached_request�print)�argsr
Zrespr	r	r
�main$s
r�__main__)N)rZpip._vendorrZ pip._vendor.cachecontrol.adapterrZpip._vendor.cachecontrol.cacherZ#pip._vendor.cachecontrol.controllerr�argparserrrrr�__name__r	r	r	r
�<module>s
U

��.e�	�@s ddlmZGdd�de�ZdS)�)�BytesIOc@sBeZdZdZdd�Zdd�Zdd�Zdd	�Zddd�Zd
d�Z	d
S)�CallbackFileWrapperav
    Small wrapper around a fp object which will tee everything read into a
    buffer, and when that file is closed it will execute a callback with the
    contents of that buffer.

    All attributes are proxied to the underlying file object.

    This class uses members with a double underscore (__) leading prefix so as
    not to accidentally shadow an attribute.
    cCst�|_||_||_dS�N)r�_CallbackFileWrapper__buf�_CallbackFileWrapper__fp�_CallbackFileWrapper__callback)�self�fp�callback�r�H/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/filewrapper.py�__init__szCallbackFileWrapper.__init__cCs|�d�}t||�S)Nr)�__getattribute__�getattr)r�namer	rrr�__getattr__s	
zCallbackFileWrapper.__getattr__cCsHz|jjdkWStk
r"YnXz
|jjWStk
rBYnXdS)NF)rr	�AttributeError�closed�rrrrZ__is_fp_closed!s
z"CallbackFileWrapper.__is_fp_closedcCs |jr|�|j���d|_dSr)rr�getvaluerrrr�_close2szCallbackFileWrapper._closeNcCs,|j�|�}|j�|�|��r(|��|Sr)r�readr�write�"_CallbackFileWrapper__is_fp_closedr�rZamt�datarrrr=s
zCallbackFileWrapper.readcCs@|j�|�}|dkr |dkr |S|j�|�|��r<|��|S)N�s
)r�
_safe_readrrrrrrrrrEszCallbackFileWrapper._safe_read)N)
�__name__�
__module__�__qualname__�__doc__r
rrrrrrrrrrs
rN)�ior�objectrrrrr�<module>sU

��.e��@s&ddlmZddlmZddd�ZdS)�)�CacheControlAdapter)�	DictCacheNTc	CsB|pt�}|pt}|||||||d�}|�d|�|�d|�|S)N)�cache_etags�
serializer�	heuristic�controller_class�cacheable_methodszhttp://zhttps://)rrZmount)	Zsess�cacherrrrZ
adapter_classr�adapter�r�D/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/wrapper.py�CacheControls
�r
)NTNNNNN)r
rr	rr
rrrr�<module>s�U

��.e�5�@s�dZddlZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZe�
e�Ze�d�Zd	d
�ZGdd�de�ZdS)
z7
The httplib2 algorithms ported for use with requests.
�N)�parsedate_tz)�CaseInsensitiveDict�)�	DictCache)�
Serializerz9^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?cCs0t�|���}|d|d|d|d|dfS)z�Parses a URI using the regex given in Appendix B of RFC 3986.

        (scheme, authority, path, query, fragment) = parse_uri(uri)
    r����)�URI�match�groups)�urir
�r�G/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/controller.py�	parse_urisrc@s\eZdZdZddd�Zedd��Zedd	��Zd
d�Zdd
�Z	dd�Z
ddd�Zdd�ZdS)�CacheControllerz9An interface to see if request should cached or not.
    NTcCs,|pt�|_||_|pt�|_|p$d|_dS)N)����i,�-)r�cache�cache_etagsr�
serializer�cacheable_status_codes)�selfrrr�status_codesrrr�__init__"szCacheController.__init__c	Csht|�\}}}}}|r|s&td|��|��}|��}|s>d}|rPd�||g�pR|}|d||}|S)z4Normalize the URL to create a safe key for the cachez(Only absolute URIs are allowed. uri = %s�/�?z://)r�	Exception�lower�join)	�clsrZschemeZ	authority�pathZqueryZfragmentZrequest_uriZ
defrag_urirrr�_urlnorm*szCacheController._urlnormcCs
|�|�S�N)r$)r"rrrr�	cache_url>szCacheController.cache_urlc

Cs(tdftdftdfddddddddtdfd�}|�d|�dd��}i}|�d�D]�}|��s^qP|�d	d
�}|d��}z||\}}	Wn$tk
r�t�d|�YqPYnX|r�|	s�d||<|rPz||d
���||<WqPtk
r�|	r�t�d
|�YqPtk
�r t�d||j	�YqPXqP|S)NTF)NF)�max-agez	max-stale�	min-fresh�no-cache�no-storezno-transformzonly-if-cachedzmust-revalidateZpublicZprivatezproxy-revalidatezs-maxagez
cache-controlz
Cache-Control��,�=rrz,Ignoring unknown cache-control directive: %sz-Missing value for cache-control directive: %sz8Invalid value for cache-control directive %s, must be %s)
�int�get�split�strip�KeyError�logger�debug�
IndexError�
ValueError�__name__)
r�headersZknown_directivesZ
cc_headersZretvalZcc_directive�partsZ	directive�typZrequiredrrr�parse_cache_controlBsV�
��z#CacheController.parse_cache_controlcCs0|�|j�}t�d|�|�|j�}d|kr:t�d�dSd|kr\|ddkr\t�d�dS|j�|�}|dkr~t�d	�dS|j�	||�}|s�t�
d
�dS|jdkr�d}t�|�|St|j�}|r�d
|kr�d|kr�t�d�|j�
|�t�d�dSt��}t�t|d
��}	td||	�}
t�d|
�|�|�}d}d|k�r^|d}t�d|�nDd|k�r�t|d�}
|
dk	�r�t�|
�|	}td|�}t�d|�d|k�r�|d}t�d|�d|k�r�|d}|
|7}
t�d|
�||
k�rt�d�t�d||
�|Sd|k�r,t�d�|j�
|�dS)ze
        Return a cached response if it exists in the cache, otherwise
        return False.
        zLooking up "%s" in the cacher)z-Request header has "no-cache", cache bypassedFr'rz1Request header has "max_age" as 0, cache bypassedNzNo cache entry availablez1Cache entry deserialization failed, entry ignoredrzVReturning cached "301 Moved Permanently" response (ignoring date and etag information)�date�etagz(Purging cached response: no date or etagz!Ignoring cached response: no datezCurrent age based on date: %iz#Freshness lifetime from max-age: %i�expiresz#Freshness lifetime from expires: %iz+Freshness lifetime from request max-age: %ir(z'Adjusted current age from min-fresh: %iz2The response is "fresh", returning cached responsez%i > %iz4The cached response is "stale" with no etag, purging)r&�urlr3r4r;r8rr/r�loadsZwarning�statusr�delete�time�calendarZtimegmr�max)r�requestr&�ccZ
cache_data�resp�msgr8Znowr<Zcurrent_ageZresp_ccZfreshness_lifetimer>Zexpire_timeZ	min_freshrrr�cached_requestxs|





�









�




zCacheController.cached_requestcCs`|�|j�}|j�||j�|��}i}|r\t|j�}d|krH|d|d<d|kr\|d|d<|S)Nr=ZETagz
If-None-Matchz
last-modifiedz
Last-ModifiedzIf-Modified-Since)r&r?rr@rr/rr8)rrFr&rHZnew_headersr8rrr�conditional_headers�s
z#CacheController.conditional_headerscCs�|p|j}|j|kr(t�d|j|�dSt|j�}|dk	rfd|krf|d��rft|d�t|�krfdS|�	|j�}|�	|�}|�
|j�}	t�d|	�d}
d|kr�d}
t�d�d|kr�d}
t�d	�|
r�|j�
|	�r�t�d
�|j�|	�|
r�dS|j�r,d|k�r,t�d�|j�|	|jj|||d
��n�|jdk�r\t�d�|j�|	|j�||��n�d|k�r�d|k�r�|ddk�r�t�d�|j�|	|jj|||d
��n:d|k�r�|d�r�t�d�|j�|	|jj|||d
��dS)zc
        Algorithm for caching requests.

        This assumes a requests Response object.
        zStatus code %s not in %sN�content-lengthz&Updating cache with response from "%s"Fr*TzResponse header has "no-store"zRequest header has "no-store"z0Purging existing cache entry to honor "no-store"r=zCaching due to etag)�bodyrzCaching permanant redirectr<r'rz'Caching b/c date exists and max-age > 0r>zCaching b/c of expires header)rrAr3r4rr8�isdigitr.�lenr;r&r?rr/rBr�setr�dumps)rrF�responserMrrZresponse_headersZcc_reqrGr&Zno_storerrr�cache_response�sr

�
��
��




�


�


�zCacheController.cache_responsecsv|�|j�}|j�||j�|��}|s*|Sdg�|j�t�fdd�|j�	�D���d|_
|j�||j�||��|S)z�On a 304 we will get a new set of headers that we want to
        update our cached value with, assuming we have one.

        This should only ever be called when we've sent an ETag and
        gotten a 304 as the response.
        rLc3s&|]\}}|���kr||fVqdSr%)r )�.0�k�v�Zexcluded_headersrr�	<genexpr>bs�z9CacheController.update_cached_response.<locals>.<genexpr>r)
r&r?rr@rr/r8�update�dict�itemsrArPrQ)rrFrRr&Zcached_responserrWr�update_cached_responseIs	��	z&CacheController.update_cached_response)NTNN)NN)
r7�
__module__�__qualname__�__doc__r�classmethodr$r&r;rJrKrSr\rrrrrs�


6o
Rr)r_Zlogging�rerDrCZemail.utilsrZpip._vendor.requests.structuresrrrZ	serializerZ	getLoggerr7r3�compilerr�objectrrrrr�<module>s

	U

��.e�5�@s�dZddlZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZe�
e�Ze�d�Zd	d
�ZGdd�de�ZdS)
z7
The httplib2 algorithms ported for use with requests.
�N)�parsedate_tz)�CaseInsensitiveDict�)�	DictCache)�
Serializerz9^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?cCs0t�|���}|d|d|d|d|dfS)z�Parses a URI using the regex given in Appendix B of RFC 3986.

        (scheme, authority, path, query, fragment) = parse_uri(uri)
    r����)�URI�match�groups)�urir
�r�G/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/controller.py�	parse_urisrc@s\eZdZdZddd�Zedd��Zedd	��Zd
d�Zdd
�Z	dd�Z
ddd�Zdd�ZdS)�CacheControllerz9An interface to see if request should cached or not.
    NTcCs,|pt�|_||_|pt�|_|p$d|_dS)N)����i,�-)r�cache�cache_etagsr�
serializer�cacheable_status_codes)�selfrrr�status_codesrrr�__init__"szCacheController.__init__c	Csht|�\}}}}}|r|s&td|��|��}|��}|s>d}|rPd�||g�pR|}|d||}|S)z4Normalize the URL to create a safe key for the cachez(Only absolute URIs are allowed. uri = %s�/�?z://)r�	Exception�lower�join)	�clsrZschemeZ	authority�pathZqueryZfragmentZrequest_uriZ
defrag_urirrr�_urlnorm*szCacheController._urlnormcCs
|�|�S�N)r$)r"rrrr�	cache_url>szCacheController.cache_urlc

Cs(tdftdftdfddddddddtdfd�}|�d|�dd��}i}|�d�D]�}|��s^qP|�d	d
�}|d��}z||\}}	Wn$tk
r�t�d|�YqPYnX|r�|	s�d||<|rPz||d
���||<WqPtk
r�|	r�t�d
|�YqPtk
�r t�d||j	�YqPXqP|S)NTF)NF)�max-agez	max-stale�	min-fresh�no-cache�no-storezno-transformzonly-if-cachedzmust-revalidateZpublicZprivatezproxy-revalidatezs-maxagez
cache-controlz
Cache-Control��,�=rrz,Ignoring unknown cache-control directive: %sz-Missing value for cache-control directive: %sz8Invalid value for cache-control directive %s, must be %s)
�int�get�split�strip�KeyError�logger�debug�
IndexError�
ValueError�__name__)
r�headersZknown_directivesZ
cc_headersZretvalZcc_directive�partsZ	directive�typZrequiredrrr�parse_cache_controlBsV�
��z#CacheController.parse_cache_controlcCs0|�|j�}t�d|�|�|j�}d|kr:t�d�dSd|kr\|ddkr\t�d�dS|j�|�}|dkr~t�d	�dS|j�	||�}|s�t�
d
�dS|jdkr�d}t�|�|St|j�}|r�d
|kr�d|kr�t�d�|j�
|�t�d�dSt��}t�t|d
��}	td||	�}
t�d|
�|�|�}d}d|k�r^|d}t�d|�nDd|k�r�t|d�}
|
dk	�r�t�|
�|	}td|�}t�d|�d|k�r�|d}t�d|�d|k�r�|d}|
|7}
t�d|
�||
k�rt�d�t�d||
�|Sd|k�r,t�d�|j�
|�dS)ze
        Return a cached response if it exists in the cache, otherwise
        return False.
        zLooking up "%s" in the cacher)z-Request header has "no-cache", cache bypassedFr'rz1Request header has "max_age" as 0, cache bypassedNzNo cache entry availablez1Cache entry deserialization failed, entry ignoredrzVReturning cached "301 Moved Permanently" response (ignoring date and etag information)�date�etagz(Purging cached response: no date or etagz!Ignoring cached response: no datezCurrent age based on date: %iz#Freshness lifetime from max-age: %i�expiresz#Freshness lifetime from expires: %iz+Freshness lifetime from request max-age: %ir(z'Adjusted current age from min-fresh: %iz2The response is "fresh", returning cached responsez%i > %iz4The cached response is "stale" with no etag, purging)r&�urlr3r4r;r8rr/r�loadsZwarning�statusr�delete�time�calendarZtimegmr�max)r�requestr&�ccZ
cache_data�resp�msgr8Znowr<Zcurrent_ageZresp_ccZfreshness_lifetimer>Zexpire_timeZ	min_freshrrr�cached_requestxs|





�









�




zCacheController.cached_requestcCs`|�|j�}|j�||j�|��}i}|r\t|j�}d|krH|d|d<d|kr\|d|d<|S)Nr=ZETagz
If-None-Matchz
last-modifiedz
Last-ModifiedzIf-Modified-Since)r&r?rr@rr/rr8)rrFr&rHZnew_headersr8rrr�conditional_headers�s
z#CacheController.conditional_headerscCs�|p|j}|j|kr(t�d|j|�dSt|j�}|dk	rfd|krf|d��rft|d�t|�krfdS|�	|j�}|�	|�}|�
|j�}	t�d|	�d}
d|kr�d}
t�d�d|kr�d}
t�d	�|
r�|j�
|	�r�t�d
�|j�|	�|
r�dS|j�r,d|k�r,t�d�|j�|	|jj|||d
��n�|jdk�r\t�d�|j�|	|j�||��n�d|k�r�d|k�r�|ddk�r�t�d�|j�|	|jj|||d
��n:d|k�r�|d�r�t�d�|j�|	|jj|||d
��dS)zc
        Algorithm for caching requests.

        This assumes a requests Response object.
        zStatus code %s not in %sN�content-lengthz&Updating cache with response from "%s"Fr*TzResponse header has "no-store"zRequest header has "no-store"z0Purging existing cache entry to honor "no-store"r=zCaching due to etag)�bodyrzCaching permanant redirectr<r'rz'Caching b/c date exists and max-age > 0r>zCaching b/c of expires header)rrAr3r4rr8�isdigitr.�lenr;r&r?rr/rBr�setr�dumps)rrF�responserMrrZresponse_headersZcc_reqrGr&Zno_storerrr�cache_response�sr

�
��
��




�


�


�zCacheController.cache_responsecsv|�|j�}|j�||j�|��}|s*|Sdg�|j�t�fdd�|j�	�D���d|_
|j�||j�||��|S)z�On a 304 we will get a new set of headers that we want to
        update our cached value with, assuming we have one.

        This should only ever be called when we've sent an ETag and
        gotten a 304 as the response.
        rLc3s&|]\}}|���kr||fVqdSr%)r )�.0�k�v�Zexcluded_headersrr�	<genexpr>bs�z9CacheController.update_cached_response.<locals>.<genexpr>r)
r&r?rr@rr/r8�update�dict�itemsrArPrQ)rrFrRr&Zcached_responserrWr�update_cached_responseIs	��	z&CacheController.update_cached_response)NTNN)NN)
r7�
__module__�__qualname__�__doc__r�classmethodr$r&r;rJrKrSr\rrrrrs�


6o
Rr)r_Zlogging�rerDrCZemail.utilsrZpip._vendor.requests.structuresrrrZ	serializerZ	getLoggerr7r3�compilerr�objectrrrrr�<module>s

	U

��.e%�@s4dZddlmZGdd�de�ZGdd�de�ZdS)zb
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
�)�Lockc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�	BaseCachecCs
t��dS�N��NotImplementedError��self�key�r
�B/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/cache.py�get
sz
BaseCache.getcCs
t��dSrr�rr	�valuer
r
r�set
sz
BaseCache.setcCs
t��dSrrrr
r
r�deleteszBaseCache.deletecCsdSrr
)rr
r
r�closeszBaseCache.closeN)�__name__�
__module__�__qualname__rrrrr
r
r
rrsrc@s.eZdZd
dd�Zdd�Zdd�Zdd	�ZdS)�	DictCacheNcCst�|_|pi|_dSr)r�lock�data)rZ	init_dictr
r
r�__init__szDictCache.__init__cCs|j�|d�Sr)rrrr
r
rrsz
DictCache.getc	Cs&|j�|j�||i�W5QRXdSr)rr�updater
r
r
rr sz
DictCache.setc	Cs,|j�||jkr|j�|�W5QRXdSr)rr�poprr
r
rr$s
zDictCache.delete)N)rrrrrrrr
r
r
rrs
rN)�__doc__Z	threadingr�objectrrr
r
r
r�<module>sU

��.e�@sxddlZddlmZddlmZddlmZddlmZddl	m
Z
dd�Zd	d
�Zdd�Z
dd
d�Zedkrte�dS)�N)�requests)�CacheControlAdapter)�	DictCache)�logger)�ArgumentParsercCs"t�tj�t��}t�|�dS)N)rZsetLevel�logging�DEBUGZ
StreamHandlerZ
addHandler)Zhandler�r	�A/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/_cmd.py�
setup_loggingsrcCs>tt�dddd�}t��}|�d|�|�d|�|j|_|S)NT)Zcache_etagsZ
serializerZ	heuristiczhttp://zhttps://)rrrZSessionZmountZ
controller�cache_controller)Zadapter�sessr	r	r
�get_sessions�rcCst�}|jddd�|��S)N�urlzThe URL to try and cache)�help)r�add_argument�
parse_args)�parserr	r	r
�get_argssrcCsTt�}t�}|�|j�}t�|j�|j|j�|j�	|j�rHt
d�nt
d�dS)NzCached!z
Not cached :()rr�getrrrZcache_responseZrequest�rawZcached_request�print)�argsr
Zrespr	r	r
�main$s
r�__main__)N)rZpip._vendorrZ pip._vendor.cachecontrol.adapterrZpip._vendor.cachecontrol.cacherZ#pip._vendor.cachecontrol.controllerr�argparserrrrr�__name__r	r	r	r
�<module>s
U

��.e.�@s8dZdZdZdZddlmZddlmZddlm	Z	dS)	zbCacheControl import Interface.

Make it easy to import from cachecontrol without long namespaces.
zEric Larsonzeric@ionrock.orgz0.12.5�)�CacheControl)�CacheControlAdapter)�CacheControllerN)
�__doc__�
__author__Z	__email__�__version__�wrapperrZadapterrZ
controllerr�r	r	�E/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/__init__.py�<module>sU

��.e.�@s8dZdZdZdZddlmZddlmZddlm	Z	dS)	zbCacheControl import Interface.

Make it easy to import from cachecontrol without long namespaces.
zEric Larsonzeric@ionrock.orgz0.12.5�)�CacheControl)�CacheControlAdapter)�CacheControllerN)
�__doc__�
__author__Z	__email__�__version__�wrapperrZadapterrZ
controllerr�r	r	�E/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/__init__.py�<module>sU

��.e��@s\ddlZddlZddlZddlmZddlmZddlmZddl	m
Z
Gdd�de�ZdS)	�N)�HTTPAdapter�)�CacheController)�	DictCache)�CallbackFileWrappercsNeZdZddhZd�fdd�	Zd�fdd�	Zd�fd
d�	Z�fdd
�Z�ZS)�CacheControlAdapterZPUTZDELETENTc
sLtt|�j||�|pt�|_||_|p*d|_|p4t}	|	|j||d�|_dS)N)ZGET)�cache_etags�
serializer)	�superr�__init__r�cache�	heuristic�cacheable_methodsr�
controller)
�selfrrZcontroller_classr	r
r�args�kwZcontroller_factory��	__class__��D/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/adapter.pyrs
�zCacheControlAdapter.__init__cs�|p|j}|j|krhz|j�|�}Wntjk
r>d}YnX|rT|j||dd�S|j�|j�	|��t
t|�j|f|�}|S)z�
        Send a request. Use the request information to see if it
        exists in the cache and cache the response if we need to and can.
        NT)�
from_cache)
r�methodrZcached_request�zlib�error�build_responseZheaders�updateZconditional_headersr
r�send)r�requestrr�	cacheable�cached_response�resprrrr$s


zCacheControlAdapter.sendFc
s|p|j}|s�|j|kr�|jr*|j�|�}|jdkrh|j�||�}||k	rNd}|jdd�|��|}n\|jdkr�|j�	||�nBt
|jt�
|jj	||��|_|jr�|j��fdd�}t�||�|_tt|��||�}|j|jk�r|j�r|j�|j�}	|j�|	�||_|S)z�
        Build a response by making a request or using the cache.

        This will end up calling send and returning a potentially
        cached response
        i0TF)Zdecode_contenti-cs��|jdkr|j��dS)Nr)Z
chunk_left�_fpZ_close�r�Zsuper_update_chunk_lengthrr�_update_chunk_lengthns
z@CacheControlAdapter.build_response.<locals>._update_chunk_length)rrr
ZapplyZstatusrZupdate_cached_response�readZrelease_connZcache_responserr"�	functools�partialZchunkedr%�types�
MethodTyper
rr�invalidating_methods�ok�	cache_urlZurlr�deleter)
rrZresponserrrr r%r!r-rr$rr9sJ	

�
���z"CacheControlAdapter.build_responsecs|j��tt|���dS)N)r�closer
rr#rrrr/�s
zCacheControlAdapter.close)NTNNNN)N)FN)	�__name__�
__module__�__qualname__r+rrrr/�
__classcell__rrrrrs��Jr)r)r'rZpip._vendor.requests.adaptersrrrrrZfilewrapperrrrrrr�<module>sU

��.e��@s�ddlZddlZddlmZmZmZddlmZmZdZddd�Z	dd�Z
Gd	d
�d
e�ZGdd�de�Z
Gd
d�de�ZGdd�de�ZdS)�N)�
formatdate�	parsedate�parsedate_tz)�datetime�	timedeltaz%a, %d %b %Y %H:%M:%S GMTcCs|p
t��}||S�N)rZutcnow)�delta�date�r
�G/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/heuristics.py�expire_aftersrcCstt�|����Sr)r�calendar�timegmZ	timetuple)Zdtr
r
r�datetime_to_headersrc@s$eZdZdd�Zdd�Zdd�ZdS)�
BaseHeuristiccCsdS)a!
        Return a valid 1xx warning header value describing the cache
        adjustments.

        The response is provided too allow warnings like 113
        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
        to explicitly say response is over 24 hours old.
        z110 - "Response is Stale"r
��self�responser
r
r�warnings	zBaseHeuristic.warningcCsiS)z�Update the response headers with any new headers.

        NOTE: This SHOULD always include some Warning header to
              signify that the response was cached by the client, not
              by way of the provided headers.
        r
rr
r
r�update_headers!szBaseHeuristic.update_headerscCs@|�|�}|r<|j�|�|�|�}|dk	r<|j�d|i�|S)N�Warning)r�headers�updater)rrZupdated_headersZwarning_header_valuer
r
r�apply*s

zBaseHeuristic.applyN)�__name__�
__module__�__qualname__rrrr
r
r
rrs	rc@seZdZdZdd�ZdS)�OneDayCachezM
    Cache the response by providing an expires 1 day in the
    future.
    cCsRi}d|jkrNt|jd�}ttdd�t|dd��d�}t|�|d<d|d<|S)	N�expiresr	�)Zdays�)r	�public�
cache-control)rrrrrr)rrrr	rr
r
rr<s
zOneDayCache.update_headersN)rrr�__doc__rr
r
r
rr6src@s(eZdZdZdd�Zdd�Zdd�ZdS)	�ExpiresAfterz;
    Cache **all** requests for a defined time period.
    cKstf|�|_dSr)rr)r�kwr
r
r�__init__LszExpiresAfter.__init__cCst|j�}t|�dd�S)Nr!)rr")rrr)rrrr
r
rrOs
zExpiresAfter.update_headerscCsd}||jS)Nz:110 - Automatically cached for %s. Response might be stale)r)rrZtmplr
r
rrSszExpiresAfter.warningN)rrrr#r&rrr
r
r
rr$Gsr$c@s:eZdZdZdddddddd	d
ddhZd
d�Zdd�ZdS)�LastModifieda�
    If there is no Expires header already, fall back on Last-Modified
    using the heuristic from
    http://tools.ietf.org/html/rfc7234#section-4.2.2
    to calculate a reasonable value.

    Firefox also does something like this per
    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
    Unlike mozilla we limit this to 24-hr.
    ��������i,i-i�i�i�i�i�c
Cs�|j}d|kriSd|kr*|ddkr*iS|j|jkr:iSd|ksJd|krNiSt�t|d��}t|d�}|dks||dkr�iSt��}td||�}|t�|�}tdt	|dd��}||kr�iS||}	dt�
tt�|	��iS)	Nrr"r!r	z
last-modifiedr�
i�Q)
rZstatus�cacheable_by_default_statusesr
rrr�time�max�min�strftime�TIME_FMT�gmtime)
r�resprr	Z
last_modifiedZnowZcurrent_agerZfreshness_lifetimerr
r
rrhs*zLastModified.update_headerscCsdSrr
)rr4r
r
rr�szLastModified.warningN)rrrr#r-rrr
r
r
rr'Xs�r')N)r
r.Zemail.utilsrrrrrr2rr�objectrrr$r'r
r
r
r�<module>s
"U

��.e*�@spddlZddlZddlZddlZddlmZddlmZddlm	Z	m
Z
mZdd�Zdd	�Z
Gd
d�de�ZdS)�N)�msgpack)�CaseInsensitiveDict�)�HTTPResponse�pickle�	text_typecCst�|�d��S)N�ascii)�base64Z	b64decode�encode)�b�r�F/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/serialize.py�_b64_decode_bytessrcCst|��d�S)N�utf8)r�decode)�srrr
�_b64_decode_strsrc@sNeZdZddd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Z	dd�Z
dS)�
SerializerNc		Cs�t|j�}|dkr*|jdd�}t�|�|_d|tdd�|j��D��|j|j	t
|j�|j|j
d�i}i|d<d|kr�|d�d�}|D]:}t
|���}|j�|d�}|dk	r�t
|�}||d|<q�d	�d
tj|dd�g�S)
NF)�decode_content�responsecss"|]\}}t|�t|�fVqdS�N)r��.0�k�vrrr
�	<genexpr>2sz#Serializer.dumps.<locals>.<genexpr>)�body�headers�status�version�reason�strictr�vary�,�,scc=4T)Zuse_bin_type)rr�read�io�BytesIOZ_fp�dict�itemsrrrr r!r�split�strip�get�joinr�dumps)	�self�requestrrZresponse_headers�dataZvaried_headers�headerZheader_valuerrr
r.s4

���zSerializer.dumpscCs�|sdSz|�dd�\}}Wntk
r4d}YnX|dd�dkrR||}d}|�dd�d�d�}zt|d	�|��||�WStk
r�YdSXdS)
Nr$rscc=0�scc=�=���rz
_loads_v{})r*�
ValueErrorr�getattr�format�AttributeError)r/r0r1Zverrrr
�loadsJs
zSerializer.loadscCs�d|�di�krdS|�di���D] \}}|j�|d�|kr$dSq$|d�d�}t|ddd�}|�dd	�d
kr�|�d�||dd<zt�|�}Wn$tk
r�t�|�d��}YnXt	f|dd
�|d��S)z`Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        �*r"Nrrr)r1ztransfer-encoding�ZchunkedrF)rZpreload_content)
r,r)r�poprr&r'�	TypeErrorr
r)r/r0�cachedr2�valueZbody_rawrrrrr
�prepare_responsehs
zSerializer.prepare_responsecCsdSrr�r/r0r1rrr
�	_loads_v0�szSerializer._loads_v0cCs2zt�|�}Wntk
r$YdSX|�||�Sr)rr:r6rA�r/r0r1r?rrr
�	_loads_v1�s
zSerializer._loads_v1c	Cs�zt�t�|��d��}Wnttjfk
r6YdSXt|dd�|dd<tdd�|dd�	�D��|dd<t
|dd�|dd<tdd�|d	�	�D��|d	<|�||�S)
Nrrrcss"|]\}}t|�t|�fVqdSr�rrrrr
r�s�z'Serializer._loads_v2.<locals>.<genexpr>rr css.|]&\}}t|�|dk	r t|�n|fVqdSrrFrrrr
r�s�r")�jsonr:�zlib�
decompressrr6�errorrr(r)rrArDrrr
�	_loads_v2�s�
�zSerializer._loads_v2cCsdSrrrBrrr
�	_loads_v3�szSerializer._loads_v3cCs6ztj|dd�}Wntk
r(YdSX|�||�S)Nzutf-8)�encoding)rr:r6rArDrrr
�	_loads_v4�s
zSerializer._loads_v4)N)�__name__�
__module__�__qualname__r.r:rArCrErKrLrNrrrr
rs
4$r)r	r&rGrHZpip._vendorrZpip._vendor.requests.structuresr�compatrrrrr�objectrrrrr
�<module>sU

��.e��@s�ddlZddlZddlmZmZmZddlmZmZdZddd�Z	dd�Z
Gd	d
�d
e�ZGdd�de�Z
Gd
d�de�ZGdd�de�ZdS)�N)�
formatdate�	parsedate�parsedate_tz)�datetime�	timedeltaz%a, %d %b %Y %H:%M:%S GMTcCs|p
t��}||S�N)rZutcnow)�delta�date�r
�G/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/heuristics.py�expire_aftersrcCstt�|����Sr)r�calendar�timegmZ	timetuple)Zdtr
r
r�datetime_to_headersrc@s$eZdZdd�Zdd�Zdd�ZdS)�
BaseHeuristiccCsdS)a!
        Return a valid 1xx warning header value describing the cache
        adjustments.

        The response is provided too allow warnings like 113
        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
        to explicitly say response is over 24 hours old.
        z110 - "Response is Stale"r
��self�responser
r
r�warnings	zBaseHeuristic.warningcCsiS)z�Update the response headers with any new headers.

        NOTE: This SHOULD always include some Warning header to
              signify that the response was cached by the client, not
              by way of the provided headers.
        r
rr
r
r�update_headers!szBaseHeuristic.update_headerscCs@|�|�}|r<|j�|�|�|�}|dk	r<|j�d|i�|S)N�Warning)r�headers�updater)rrZupdated_headersZwarning_header_valuer
r
r�apply*s

zBaseHeuristic.applyN)�__name__�
__module__�__qualname__rrrr
r
r
rrs	rc@seZdZdZdd�ZdS)�OneDayCachezM
    Cache the response by providing an expires 1 day in the
    future.
    cCsRi}d|jkrNt|jd�}ttdd�t|dd��d�}t|�|d<d|d<|S)	N�expiresr	�)Zdays�)r	�public�
cache-control)rrrrrr)rrrr	rr
r
rr<s
zOneDayCache.update_headersN)rrr�__doc__rr
r
r
rr6src@s(eZdZdZdd�Zdd�Zdd�ZdS)	�ExpiresAfterz;
    Cache **all** requests for a defined time period.
    cKstf|�|_dSr)rr)r�kwr
r
r�__init__LszExpiresAfter.__init__cCst|j�}t|�dd�S)Nr!)rr")rrr)rrrr
r
rrOs
zExpiresAfter.update_headerscCsd}||jS)Nz:110 - Automatically cached for %s. Response might be stale)r)rrZtmplr
r
rrSszExpiresAfter.warningN)rrrr#r&rrr
r
r
rr$Gsr$c@s:eZdZdZdddddddd	d
ddhZd
d�Zdd�ZdS)�LastModifieda�
    If there is no Expires header already, fall back on Last-Modified
    using the heuristic from
    http://tools.ietf.org/html/rfc7234#section-4.2.2
    to calculate a reasonable value.

    Firefox also does something like this per
    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
    Unlike mozilla we limit this to 24-hr.
    ��������i,i-i�i�i�i�i�c
Cs�|j}d|kriSd|kr*|ddkr*iS|j|jkr:iSd|ksJd|krNiSt�t|d��}t|d�}|dks||dkr�iSt��}td||�}|t�|�}tdt	|dd��}||kr�iS||}	dt�
tt�|	��iS)	Nrr"r!r	z
last-modifiedr�
i�Q)
rZstatus�cacheable_by_default_statusesr
rrr�time�max�min�strftime�TIME_FMT�gmtime)
r�resprr	Z
last_modifiedZnowZcurrent_agerZfreshness_lifetimerr
r
rrhs*zLastModified.update_headerscCsdSrr
)rr4r
r
rr�szLastModified.warningN)rrrr#r-rrr
r
r
rr'Xs�r')N)r
r.Zemail.utilsrrrrrr2rr�objectrrr$r'r
r
r
r�<module>s
"U

��.e��@s\ddlZddlZddlZddlmZddlmZddlmZddl	m
Z
Gdd�de�ZdS)	�N)�HTTPAdapter�)�CacheController)�	DictCache)�CallbackFileWrappercsNeZdZddhZd�fdd�	Zd�fdd�	Zd�fd
d�	Z�fdd
�Z�ZS)�CacheControlAdapterZPUTZDELETENTc
sLtt|�j||�|pt�|_||_|p*d|_|p4t}	|	|j||d�|_dS)N)ZGET)�cache_etags�
serializer)	�superr�__init__r�cache�	heuristic�cacheable_methodsr�
controller)
�selfrrZcontroller_classr	r
r�args�kwZcontroller_factory��	__class__��D/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/adapter.pyrs
�zCacheControlAdapter.__init__cs�|p|j}|j|krhz|j�|�}Wntjk
r>d}YnX|rT|j||dd�S|j�|j�	|��t
t|�j|f|�}|S)z�
        Send a request. Use the request information to see if it
        exists in the cache and cache the response if we need to and can.
        NT)�
from_cache)
r�methodrZcached_request�zlib�error�build_responseZheaders�updateZconditional_headersr
r�send)r�requestrr�	cacheable�cached_response�resprrrr$s


zCacheControlAdapter.sendFc
s|p|j}|s�|j|kr�|jr*|j�|�}|jdkrh|j�||�}||k	rNd}|jdd�|��|}n\|jdkr�|j�	||�nBt
|jt�
|jj	||��|_|jr�|j��fdd�}t�||�|_tt|��||�}|j|jk�r|j�r|j�|j�}	|j�|	�||_|S)z�
        Build a response by making a request or using the cache.

        This will end up calling send and returning a potentially
        cached response
        i0TF)Zdecode_contenti-cs��|jdkr|j��dS)Nr)Z
chunk_left�_fpZ_close�r�Zsuper_update_chunk_lengthrr�_update_chunk_lengthns
z@CacheControlAdapter.build_response.<locals>._update_chunk_length)rrr
ZapplyZstatusrZupdate_cached_response�readZrelease_connZcache_responserr"�	functools�partialZchunkedr%�types�
MethodTyper
rr�invalidating_methods�ok�	cache_urlZurlr�deleter)
rrZresponserrrr r%r!r-rr$rr9sJ	

�
���z"CacheControlAdapter.build_responsecs|j��tt|���dS)N)r�closer
rr#rrrr/�s
zCacheControlAdapter.close)NTNNNN)N)FN)	�__name__�
__module__�__qualname__r+rrrr/�
__classcell__rrrrrs��Jr)r)r'rZpip._vendor.requests.adaptersrrrrrZfilewrapperrrrrrr�<module>sU

��.e��@s&ddlmZddlmZddd�ZdS)�)�CacheControlAdapter)�	DictCacheNTc	CsB|pt�}|pt}|||||||d�}|�d|�|�d|�|S)N)�cache_etags�
serializer�	heuristic�controller_class�cacheable_methodszhttp://zhttps://)rrZmount)	Zsess�cacherrrrZ
adapter_classr�adapter�r�D/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/wrapper.py�CacheControls
�r
)NTNNNNN)r
rr	rr
rrrr�<module>s�U

��.e��@s�zddlmZWn ek
r0ddlmZYnXzddlZWnek
rZddlZYnXzddlmZWn ek
r�ddlmZYnXzddl	m
Z
Wn ek
r�ddlm
Z
YnXzeZ
Wnek
r�eZ
YnXdS)�)�urljoinN)�HTTPResponse)�is_fp_closed)Zurllib.parser�ImportErrorZurlparseZcPickle�pickleZ.pip._vendor.requests.packages.urllib3.responserZpip._vendor.urllib3.responseZ*pip._vendor.requests.packages.urllib3.utilrZpip._vendor.urllib3.utilZunicodeZ	text_type�	NameError�str�r	r	�C/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/compat.py�<module>s&import calendar
import time

from email.utils import formatdate, parsedate, parsedate_tz

from datetime import datetime, timedelta

TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"


def expire_after(delta, date=None):
    date = date or datetime.utcnow()
    return date + delta


def datetime_to_header(dt):
    return formatdate(calendar.timegm(dt.timetuple()))


class BaseHeuristic(object):

    def warning(self, response):
        """
        Return a valid 1xx warning header value describing the cache
        adjustments.

        The response is provided too allow warnings like 113
        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
        to explicitly say response is over 24 hours old.
        """
        return '110 - "Response is Stale"'

    def update_headers(self, response):
        """Update the response headers with any new headers.

        NOTE: This SHOULD always include some Warning header to
              signify that the response was cached by the client, not
              by way of the provided headers.
        """
        return {}

    def apply(self, response):
        updated_headers = self.update_headers(response)

        if updated_headers:
            response.headers.update(updated_headers)
            warning_header_value = self.warning(response)
            if warning_header_value is not None:
                response.headers.update({"Warning": warning_header_value})

        return response


class OneDayCache(BaseHeuristic):
    """
    Cache the response by providing an expires 1 day in the
    future.
    """

    def update_headers(self, response):
        headers = {}

        if "expires" not in response.headers:
            date = parsedate(response.headers["date"])
            expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
            headers["expires"] = datetime_to_header(expires)
            headers["cache-control"] = "public"
        return headers


class ExpiresAfter(BaseHeuristic):
    """
    Cache **all** requests for a defined time period.
    """

    def __init__(self, **kw):
        self.delta = timedelta(**kw)

    def update_headers(self, response):
        expires = expire_after(self.delta)
        return {"expires": datetime_to_header(expires), "cache-control": "public"}

    def warning(self, response):
        tmpl = "110 - Automatically cached for %s. Response might be stale"
        return tmpl % self.delta


class LastModified(BaseHeuristic):
    """
    If there is no Expires header already, fall back on Last-Modified
    using the heuristic from
    http://tools.ietf.org/html/rfc7234#section-4.2.2
    to calculate a reasonable value.

    Firefox also does something like this per
    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
    Unlike mozilla we limit this to 24-hr.
    """
    cacheable_by_default_statuses = {
        200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
    }

    def update_headers(self, resp):
        headers = resp.headers

        if "expires" in headers:
            return {}

        if "cache-control" in headers and headers["cache-control"] != "public":
            return {}

        if resp.status not in self.cacheable_by_default_statuses:
            return {}

        if "date" not in headers or "last-modified" not in headers:
            return {}

        date = calendar.timegm(parsedate_tz(headers["date"]))
        last_modified = parsedate(headers["last-modified"])
        if date is None or last_modified is None:
            return {}

        now = time.time()
        current_age = max(0, now - date)
        delta = date - calendar.timegm(last_modified)
        freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
        if freshness_lifetime <= current_age:
            return {}

        expires = date + freshness_lifetime
        return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}

    def warning(self, resp):
        return None
"""CacheControl import Interface.

Make it easy to import from cachecontrol without long namespaces.
"""
__author__ = "Eric Larson"
__email__ = "eric@ionrock.org"
__version__ = "0.12.5"

from .wrapper import CacheControl
from .adapter import CacheControlAdapter
from .controller import CacheController
import types
import functools
import zlib

from pip._vendor.requests.adapters import HTTPAdapter

from .controller import CacheController
from .cache import DictCache
from .filewrapper import CallbackFileWrapper


class CacheControlAdapter(HTTPAdapter):
    invalidating_methods = {"PUT", "DELETE"}

    def __init__(
        self,
        cache=None,
        cache_etags=True,
        controller_class=None,
        serializer=None,
        heuristic=None,
        cacheable_methods=None,
        *args,
        **kw
    ):
        super(CacheControlAdapter, self).__init__(*args, **kw)
        self.cache = cache or DictCache()
        self.heuristic = heuristic
        self.cacheable_methods = cacheable_methods or ("GET",)

        controller_factory = controller_class or CacheController
        self.controller = controller_factory(
            self.cache, cache_etags=cache_etags, serializer=serializer
        )

    def send(self, request, cacheable_methods=None, **kw):
        """
        Send a request. Use the request information to see if it
        exists in the cache and cache the response if we need to and can.
        """
        cacheable = cacheable_methods or self.cacheable_methods
        if request.method in cacheable:
            try:
                cached_response = self.controller.cached_request(request)
            except zlib.error:
                cached_response = None
            if cached_response:
                return self.build_response(request, cached_response, from_cache=True)

            # check for etags and add headers if appropriate
            request.headers.update(self.controller.conditional_headers(request))

        resp = super(CacheControlAdapter, self).send(request, **kw)

        return resp

    def build_response(
        self, request, response, from_cache=False, cacheable_methods=None
    ):
        """
        Build a response by making a request or using the cache.

        This will end up calling send and returning a potentially
        cached response
        """
        cacheable = cacheable_methods or self.cacheable_methods
        if not from_cache and request.method in cacheable:
            # Check for any heuristics that might update headers
            # before trying to cache.
            if self.heuristic:
                response = self.heuristic.apply(response)

            # apply any expiration heuristics
            if response.status == 304:
                # We must have sent an ETag request. This could mean
                # that we've been expired already or that we simply
                # have an etag. In either case, we want to try and
                # update the cache if that is the case.
                cached_response = self.controller.update_cached_response(
                    request, response
                )

                if cached_response is not response:
                    from_cache = True

                # We are done with the server response, read a
                # possible response body (compliant servers will
                # not return one, but we cannot be 100% sure) and
                # release the connection back to the pool.
                response.read(decode_content=False)
                response.release_conn()

                response = cached_response

            # We always cache the 301 responses
            elif response.status == 301:
                self.controller.cache_response(request, response)
            else:
                # Wrap the response file with a wrapper that will cache the
                #   response when the stream has been consumed.
                response._fp = CallbackFileWrapper(
                    response._fp,
                    functools.partial(
                        self.controller.cache_response, request, response
                    ),
                )
                if response.chunked:
                    super_update_chunk_length = response._update_chunk_length

                    def _update_chunk_length(self):
                        super_update_chunk_length()
                        if self.chunk_left == 0:
                            self._fp._close()

                    response._update_chunk_length = types.MethodType(
                        _update_chunk_length, response
                    )

        resp = super(CacheControlAdapter, self).build_response(request, response)

        # See if we should invalidate the cache.
        if request.method in self.invalidating_methods and resp.ok:
            cache_url = self.controller.cache_url(request.url)
            self.cache.delete(cache_url)

        # Give the request a from_cache attr to let people use it
        resp.from_cache = from_cache

        return resp

    def close(self):
        self.cache.close()
        super(CacheControlAdapter, self).close()
import hashlib
import os
from textwrap import dedent

from ..cache import BaseCache
from ..controller import CacheController

try:
    FileNotFoundError
except NameError:
    # py2.X
    FileNotFoundError = (IOError, OSError)


def _secure_open_write(filename, fmode):
    # We only want to write to this file, so open it in write only mode
    flags = os.O_WRONLY

    # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
    #  will open *new* files.
    # We specify this because we want to ensure that the mode we pass is the
    # mode of the file.
    flags |= os.O_CREAT | os.O_EXCL

    # Do not follow symlinks to prevent someone from making a symlink that
    # we follow and insecurely open a cache file.
    if hasattr(os, "O_NOFOLLOW"):
        flags |= os.O_NOFOLLOW

    # On Windows we'll mark this file as binary
    if hasattr(os, "O_BINARY"):
        flags |= os.O_BINARY

    # Before we open our file, we want to delete any existing file that is
    # there
    try:
        os.remove(filename)
    except (IOError, OSError):
        # The file must not exist already, so we can just skip ahead to opening
        pass

    # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
    # race condition happens between the os.remove and this line, that an
    # error will be raised. Because we utilize a lockfile this should only
    # happen if someone is attempting to attack us.
    fd = os.open(filename, flags, fmode)
    try:
        return os.fdopen(fd, "wb")

    except:
        # An error occurred wrapping our FD in a file object
        os.close(fd)
        raise


class FileCache(BaseCache):

    def __init__(
        self,
        directory,
        forever=False,
        filemode=0o0600,
        dirmode=0o0700,
        use_dir_lock=None,
        lock_class=None,
    ):

        if use_dir_lock is not None and lock_class is not None:
            raise ValueError("Cannot use use_dir_lock and lock_class together")

        try:
            from lockfile import LockFile
            from lockfile.mkdirlockfile import MkdirLockFile
        except ImportError:
            notice = dedent(
                """
            NOTE: In order to use the FileCache you must have
            lockfile installed. You can install it via pip:
              pip install lockfile
            """
            )
            raise ImportError(notice)

        else:
            if use_dir_lock:
                lock_class = MkdirLockFile

            elif lock_class is None:
                lock_class = LockFile

        self.directory = directory
        self.forever = forever
        self.filemode = filemode
        self.dirmode = dirmode
        self.lock_class = lock_class

    @staticmethod
    def encode(x):
        return hashlib.sha224(x.encode()).hexdigest()

    def _fn(self, name):
        # NOTE: This method should not change as some may depend on it.
        #       See: https://github.com/ionrock/cachecontrol/issues/63
        hashed = self.encode(name)
        parts = list(hashed[:5]) + [hashed]
        return os.path.join(self.directory, *parts)

    def get(self, key):
        name = self._fn(key)
        try:
            with open(name, "rb") as fh:
                return fh.read()

        except FileNotFoundError:
            return None

    def set(self, key, value):
        name = self._fn(key)

        # Make sure the directory exists
        try:
            os.makedirs(os.path.dirname(name), self.dirmode)
        except (IOError, OSError):
            pass

        with self.lock_class(name) as lock:
            # Write our actual file
            with _secure_open_write(lock.path, self.filemode) as fh:
                fh.write(value)

    def delete(self, key):
        name = self._fn(key)
        if not self.forever:
            try:
                os.remove(name)
            except FileNotFoundError:
                pass


def url_to_file_path(url, filecache):
    """Return the file cache path based on the URL.

    This does not ensure the file exists!
    """
    key = CacheController.cache_url(url)
    return filecache._fn(key)
from __future__ import division

from datetime import datetime
from pip._vendor.cachecontrol.cache import BaseCache


class RedisCache(BaseCache):

    def __init__(self, conn):
        self.conn = conn

    def get(self, key):
        return self.conn.get(key)

    def set(self, key, value, expires=None):
        if not expires:
            self.conn.set(key, value)
        else:
            expires = expires - datetime.utcnow()
            self.conn.setex(key, int(expires.total_seconds()), value)

    def delete(self, key):
        self.conn.delete(key)

    def clear(self):
        """Helper for clearing all the keys in a database. Use with
        caution!"""
        for key in self.conn.keys():
            self.conn.delete(key)

    def close(self):
        """Redis uses connection pooling, no need to close the connection."""
        pass
U

��.e9�@s~ddlZddlZddlmZddlmZddlmZzeWne	k
rXe
efZYnXdd�ZGdd	�d	e�Z
d
d�ZdS)�N)�dedent�)�	BaseCache)�CacheControllerc	Cs�tj}|tjtjBO}ttd�r*|tjO}ttd�r>|tjO}zt�|�Wntt	fk
rdYnXt�
|||�}zt�|d�WSt�|��YnXdS)N�
O_NOFOLLOW�O_BINARY�wb)
�os�O_WRONLY�O_CREAT�O_EXCL�hasattrrr�remove�IOError�OSError�open�fdopen�close)�filenameZfmode�flags�fd�r�N/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py�_secure_open_writes 




rc@sBeZdZddd�Zedd��Zd	d
�Zdd�Zd
d�Zdd�Z	dS)�	FileCacheF��Nc
Cs�|dk	r|dk	rtd��zddlm}ddlm}Wn$tk
rXtd�}	t|	��YnX|rd|}n|dkrp|}||_||_||_	||_
||_dS)Nz/Cannot use use_dir_lock and lock_class togetherr)�LockFile)�
MkdirLockFilez�
            NOTE: In order to use the FileCache you must have
            lockfile installed. You can install it via pip:
              pip install lockfile
            )�
ValueErrorZlockfilerZlockfile.mkdirlockfiler�ImportErrorr�	directory�forever�filemode�dirmode�
lock_class)
�selfr!r"r#r$Zuse_dir_lockr%rrZnoticerrr�__init__:s&
�zFileCache.__init__cCst�|�����S�N)�hashlibZsha224�encodeZ	hexdigest)�xrrrr*aszFileCache.encodecCs4|�|�}t|dd��|g}tjj|jf|��S)N�)r*�listr	�path�joinr!)r&�nameZhashed�partsrrr�_fnes
z
FileCache._fnc
CsR|�|�}z,t|d��}|��W5QR�WSQRXWntk
rLYdSXdS)N�rb)r2r�read�FileNotFoundError)r&�keyr0�fhrrr�getls
 z
FileCache.getc
Cs||�|�}zt�tj�|�|j�Wnttfk
r<YnX|�|��*}t	|j|j
��}|�|�W5QRXW5QRXdSr()r2r	�makedirsr.�dirnamer$rrr%rr#�write)r&r6�valuer0�lockr7rrr�setus
z
FileCache.setcCs8|�|�}|js4zt�|�Wntk
r2YnXdSr()r2r"r	rr5)r&r6r0rrr�delete�s
zFileCache.delete)FrrNN)
�__name__�
__module__�__qualname__r'�staticmethodr*r2r8r>r?rrrrr8s�
'
	rcCst�|�}|�|�S)z\Return the file cache path based on the URL.

    This does not ensure the file exists!
    )rZ	cache_urlr2)ZurlZ	filecacher6rrr�url_to_file_path�s
rD)r)r	�textwrapr�cacherZ
controllerrr5�	NameErrorrrrrrDrrrr�<module>s)TU

��.eX�@s8ddlmZddlmZddlmZGdd�de�ZdS)�)�division)�datetime)�	BaseCachec@s>eZdZdd�Zdd�Zddd�Zdd	�Zd
d�Zdd
�ZdS)�
RedisCachecCs
||_dS�N)�conn)�selfr�r	�O/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py�__init__	szRedisCache.__init__cCs|j�|�Sr)r�get�r�keyr	r	r
rszRedisCache.getNcCs<|s|j�||�n$|t��}|j�|t|���|�dSr)r�setrZutcnowZsetex�intZ
total_seconds)rr�valueZexpiresr	r	r
rszRedisCache.setcCs|j�|�dSr)r�deleter
r	r	r
rszRedisCache.deletecCs |j��D]}|j�|�q
dS)zIHelper for clearing all the keys in a database. Use with
        caution!N)r�keysrr
r	r	r
�clearszRedisCache.clearcCsdS)z?Redis uses connection pooling, no need to close the connection.Nr	)rr	r	r
�closeszRedisCache.close)N)	�__name__�
__module__�__qualname__rrrrrrr	r	r	r
rs
rN)Z
__future__rrZpip._vendor.cachecontrol.cacherrr	r	r	r
�<module>sU

��.eV�@sddlmZddlmZdS)�)�	FileCache)�
RedisCacheN)Z
file_cacherZredis_cacher�rr�L/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/__init__.py�<module>sU

��.eV�@sddlmZddlmZdS)�)�	FileCache)�
RedisCacheN)Z
file_cacherZredis_cacher�rr�L/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/__init__.py�<module>sU

��.e9�@s~ddlZddlZddlmZddlmZddlmZzeWne	k
rXe
efZYnXdd�ZGdd	�d	e�Z
d
d�ZdS)�N)�dedent�)�	BaseCache)�CacheControllerc	Cs�tj}|tjtjBO}ttd�r*|tjO}ttd�r>|tjO}zt�|�Wntt	fk
rdYnXt�
|||�}zt�|d�WSt�|��YnXdS)N�
O_NOFOLLOW�O_BINARY�wb)
�os�O_WRONLY�O_CREAT�O_EXCL�hasattrrr�remove�IOError�OSError�open�fdopen�close)�filenameZfmode�flags�fd�r�N/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py�_secure_open_writes 




rc@sBeZdZddd�Zedd��Zd	d
�Zdd�Zd
d�Zdd�Z	dS)�	FileCacheF��Nc
Cs�|dk	r|dk	rtd��zddlm}ddlm}Wn$tk
rXtd�}	t|	��YnX|rd|}n|dkrp|}||_||_||_	||_
||_dS)Nz/Cannot use use_dir_lock and lock_class togetherr)�LockFile)�
MkdirLockFilez�
            NOTE: In order to use the FileCache you must have
            lockfile installed. You can install it via pip:
              pip install lockfile
            )�
ValueErrorZlockfilerZlockfile.mkdirlockfiler�ImportErrorr�	directory�forever�filemode�dirmode�
lock_class)
�selfr!r"r#r$Zuse_dir_lockr%rrZnoticerrr�__init__:s&
�zFileCache.__init__cCst�|�����S�N)�hashlibZsha224�encodeZ	hexdigest)�xrrrr*aszFileCache.encodecCs4|�|�}t|dd��|g}tjj|jf|��S)N�)r*�listr	�path�joinr!)r&�nameZhashed�partsrrr�_fnes
z
FileCache._fnc
CsR|�|�}z,t|d��}|��W5QR�WSQRXWntk
rLYdSXdS)N�rb)r2r�read�FileNotFoundError)r&�keyr0�fhrrr�getls
 z
FileCache.getc
Cs||�|�}zt�tj�|�|j�Wnttfk
r<YnX|�|��*}t	|j|j
��}|�|�W5QRXW5QRXdSr()r2r	�makedirsr.�dirnamer$rrr%rr#�write)r&r6�valuer0�lockr7rrr�setus
z
FileCache.setcCs8|�|�}|js4zt�|�Wntk
r2YnXdSr()r2r"r	rr5)r&r6r0rrr�delete�s
zFileCache.delete)FrrNN)
�__name__�
__module__�__qualname__r'�staticmethodr*r2r8r>r?rrrrr8s�
'
	rcCst�|�}|�|�S)z\Return the file cache path based on the URL.

    This does not ensure the file exists!
    )rZ	cache_urlr2)ZurlZ	filecacher6rrr�url_to_file_path�s
rD)r)r	�textwrapr�cacherZ
controllerrr5�	NameErrorrrrrrDrrrr�<module>s)TU

��.eX�@s8ddlmZddlmZddlmZGdd�de�ZdS)�)�division)�datetime)�	BaseCachec@s>eZdZdd�Zdd�Zddd�Zdd	�Zd
d�Zdd
�ZdS)�
RedisCachecCs
||_dS�N)�conn)�selfr�r	�O/usr/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py�__init__	szRedisCache.__init__cCs|j�|�Sr)r�get�r�keyr	r	r
rszRedisCache.getNcCs<|s|j�||�n$|t��}|j�|t|���|�dSr)r�setrZutcnowZsetex�intZ
total_seconds)rr�valueZexpiresr	r	r
rszRedisCache.setcCs|j�|�dSr)r�deleter
r	r	r
rszRedisCache.deletecCs |j��D]}|j�|�q
dS)zIHelper for clearing all the keys in a database. Use with
        caution!N)r�keysrr
r	r	r
�clearszRedisCache.clearcCsdS)z?Redis uses connection pooling, no need to close the connection.Nr	)rr	r	r
�closeszRedisCache.close)N)	�__name__�
__module__�__qualname__rrrrrrr	r	r	r
rs
rN)Z
__future__rrZpip._vendor.cachecontrol.cacherrr	r	r	r
�<module>sfrom .file_cache import FileCache  # noqa
from .redis_cache import RedisCache  # noqa
"""

    webencodings.mklabels
    ~~~~~~~~~~~~~~~~~~~~~

    Regenarate the webencodings.labels module.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

import json
try:
    from urllib import urlopen
except ImportError:
    from urllib.request import urlopen


def assert_lower(string):
    assert string == string.lower()
    return string


def generate(url):
    parts = ['''\
"""

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

# XXX Do not edit!
# This file is automatically generated by mklabels.py

LABELS = {
''']
    labels = [
        (repr(assert_lower(label)).lstrip('u'),
         repr(encoding['name']).lstrip('u'))
        for category in json.loads(urlopen(url).read().decode('ascii'))
        for encoding in category['encodings']
        for label in encoding['labels']]
    max_len = max(len(label) for label, name in labels)
    parts.extend(
        '    %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name)
        for label, name in labels)
    parts.append('}')
    return ''.join(parts)


if __name__ == '__main__':
    print(generate('http://encoding.spec.whatwg.org/encodings.json'))
U

��.e�@sfdZddlZzddlmZWn ek
r<ddlmZYnXdd�Zdd�Zedkrbe	ed	��dS)
z�

    webencodings.mklabels
    ~~~~~~~~~~~~~~~~~~~~~

    Regenarate the webencodings.labels module.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�N)�urlopencCs|S�N�)�stringrr�E/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/mklabels.py�assert_lowersrcsfdg}dd�t�t|����d��D�}tdd�|D���|��fdd�|D��|�d�d	�|�S)
Na"""

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

# XXX Do not edit!
# This file is automatically generated by mklabels.py

LABELS = {
cSsLg|]D}|dD]6}|dD](}tt|���d�t|d��d�f�qqqS)Z	encodings�labels�u�name)�reprr�lstrip)�.0�category�encoding�labelrrr�
<listcomp>,s

��zgenerate.<locals>.<listcomp>�asciicss|]\}}t|�VqdSr��len�r
rr
rrr�	<genexpr>2szgenerate.<locals>.<genexpr>c3s,|]$\}}d|d�t|�|fVqdS)z    %s:%s %s,
� Nrr�Zmax_lenrrr3s��}�)	�json�loadsr�read�decode�max�extend�append�join)Zurl�partsrrrr�generates��
r$�__main__z.http://encoding.spec.whatwg.org/encodings.json)
�__doc__rZurllibr�ImportErrorZurllib.requestrr$�__name__�printrrrr�<module>s!U

��.e��@s�dZddlmZddlmZmZmZmZmZm	Z	m
Z
mZmZdd�Z
dd�Zd	d
�Zdd�Zd
d�Zdd�Zdd�Zdd�Zdd�ZdS)z�

    webencodings.tests
    ~~~~~~~~~~~~~~~~~~

    A basic test suite for Encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literals�)	�lookup�LABELS�decode�encode�iter_decode�iter_encode�IncrementalDecoder�IncrementalEncoder�UTF8cOs6z|||�Wn|k
r$YdSXtd|��dS)NzDid not raise %s.)�AssertionError)Z	exceptionZfunction�args�kwargs�r�B/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/tests.py�
assert_raisess
rcCsdS�Nrrrrr�test_labelssrcCsNtD]2}dD]}tdg||�\}}qt|�}t|�}qtt���D]}qDdS)N)rr��)rrr
r�set�values)Zlabel�repeat�output�_�decoder�encoder�namerrr�test_all_labels0srcCsTtttdd�tttdd�tttgd�tttgd�tttd�tttd�dS)NséZinvalid�é)r�LookupErrorrrrr	r
rrrrr�test_invalid_labelCsr"cCsdSrrrrrr�test_decodeLsr#cCsdSrrrrrr�test_encodebsr$cCsdd�}dS)NcSst||�\}}d�|�S)N�)r�join)�inputZfallback_encodingrZ	_encodingrrr�iter_decode_to_stringlsz/test_iter_decode.<locals>.iter_decode_to_stringr)r(rrr�test_iter_decodeksr)cCsdSrrrrrr�test_iter_encode�sr*cCsd}d}d}d}dS)Ns2,O�#�ɻtϨ�u2,O#tsaaZaar)ZencodedZdecodedrrr�test_x_user_defined�s
r+N)�__doc__Z
__future__rr%rrrrrr	r
rrrrrr"r#r$r)r*r+rrrr�<module>s,			U

��.eS)�@s�dZddlmZddlZddlmZdZddd	d
d�ZiZdd
�Z	dd�Z
dd�ZGdd�de�Z
e
d�Ze
d�Ze
d�Zd+dd�Zdd�Zedfdd�Zd,dd �Zd!d"�Zedfd#d$�Zd%d&�ZGd'd(�d(e�ZGd)d*�d*e�ZdS)-a

    webencodings
    ~~~~~~~~~~~~

    This is a Python implementation of the `WHATWG Encoding standard
    <http://encoding.spec.whatwg.org/>`. See README for details.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literalsN�)�LABELSz0.5.1z
iso-8859-8zmac-cyrillicz	mac-romanZcp874)ziso-8859-8-izx-mac-cyrillic�	macintoshzwindows-874cCs|�d����d�S)a9Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.

    :param string: An Unicode string.
    :returns: A new Unicode string.

    This is used for `ASCII case-insensitive
    <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
    matching of encoding labels.
    The same matching is also used, among other things,
    for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.

    This is different from the :meth:`~py:str.lower` method of Unicode strings
    which also affect non-ASCII characters,
    sometimes mapping them into the ASCII range:

        >>> keyword = u'Bac\N{KELVIN SIGN}ground'
        >>> assert keyword.lower() == u'background'
        >>> assert ascii_lower(keyword) != keyword.lower()
        >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'

    �utf8)�encode�lower�decode)�string�r�E/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/__init__.py�ascii_lower#sr
cCsxt|�d��}t�|�}|dkr$dSt�|�}|dkrt|dkrLddlm}nt�||�}t�	|�}t
||�}|t|<|S)u<
    Look for an encoding by its label.
    This is the spec’s `get an encoding
    <http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
    Supported labels are listed there.

    :param label: A string.
    :returns:
        An :class:`Encoding` object, or :obj:`None` for an unknown label.

    z	

 Nzx-user-definedr)�
codec_info)r
�stripr�get�CACHEZx_user_definedr�PYTHON_NAMES�codecs�lookup�Encoding)Zlabel�name�encodingrZpython_namerrrr=s




rcCs.t|d�r|St|�}|dkr*td|��|S)z�
    Accept either an encoding object or label.

    :param encoding: An :class:`Encoding` object or a label string.
    :returns: An :class:`Encoding` object.
    :raises: :exc:`~exceptions.LookupError` for an unknown label.

    rNzUnknown encoding label: %r)�hasattrr�LookupError)Zencoding_or_labelrrrr�
_get_encoding[s	
rc@s eZdZdZdd�Zdd�ZdS)raOReresents a character encoding such as UTF-8,
    that can be used for decoding or encoding.

    .. attribute:: name

        Canonical name of the encoding

    .. attribute:: codec_info

        The actual implementation of the encoding,
        a stdlib :class:`~codecs.CodecInfo` object.
        See :func:`codecs.register`.

    cCs||_||_dS�N)rr)�selfrrrrr�__init__|szEncoding.__init__cCs
d|jS)Nz
<Encoding %s>)r)rrrr�__repr__�szEncoding.__repr__N)�__name__�
__module__�__qualname__�__doc__rrrrrrrmsrzutf-8zutf-16lezutf-16be�replacecCs2t|�}t|�\}}|p|}|j�||�d|fS)a�
    Decode a single string.

    :param input: A byte string
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return:
        A ``(output, encoding)`` tuple of an Unicode string
        and an :obj:`Encoding`.

    r)r�_detect_bomrr	)�input�fallback_encoding�errorsZbom_encodingrrrrr	�sr	cCsV|�d�rt|dd�fS|�d�r4t|dd�fS|�d�rNt|dd�fSd|fS)zBReturn (bom_encoding, input), with any BOM removed from the input.s���Ns��s�)�
startswith�_UTF16LE�_UTF16BE�UTF8)r%rrrr$�s


r$�strictcCst|�j�||�dS)a;
    Encode a single string.

    :param input: An Unicode string.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return: A byte string.

    r)rrr)r%rr'rrrr�srcCs$t||�}t||�}t|�}||fS)a�
    "Pull"-based decoder.

    :param input:
        An iterable of byte strings.

        The input is first consumed just enough to determine the encoding
        based on the precense of a BOM,
        then consumed on demand when the return value is.
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns:
        An ``(output, encoding)`` tuple.
        :obj:`output` is an iterable of Unicode strings,
        :obj:`encoding` is the :obj:`Encoding` that is being used.

    )�IncrementalDecoder�_iter_decode_generator�next)r%r&r'�decoder�	generatorrrrr�iter_decode�s

r4ccs�|j}t|�}|D]"}||�}|r|jV|VqXq|ddd�}|jV|rT|VdS|D]}||�}|r\|Vq\|ddd�}|r�|VdS)zqReturn a generator that first yields the :obj:`Encoding`,
    then yields output chukns as Unicode strings.

    �T��finalN)r	�iterr)r%r2r	�chunck�outputrrrr0�s(r0cCst||�j}t||�S)uY
    “Pull”-based encoder.

    :param input: An iterable of Unicode strings.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns: An iterable of byte strings.

    )�IncrementalEncoderr�_iter_encode_generator)r%rr'rrrr�iter_encode�sr=ccs6|D]}||�}|r|Vq|ddd�}|r2|VdS)N�Tr6r)r%rr9r:rrrr<sr<c@s$eZdZdZd	dd�Zd
dd�ZdS)r/uO
    “Push”-based decoder.

    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    r#cCs&t|�|_||_d|_d|_d|_dS)Nr5)r�_fallback_encoding�_errors�_buffer�_decoderr)rr&r'rrrrs

zIncrementalDecoder.__init__FcCs||j}|dk	r|||�S|j|}t|�\}}|dkrVt|�dkrP|sP||_dS|j}|j�|j�j}||_||_	|||�S)z�Decode one chunk of the input.

        :param input: A byte string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: An Unicode string.

        Nr)r>)
rBrAr$�lenr?r�incrementaldecoderr@r	r)rr%r7r2rrrrr	's


zIncrementalDecoder.decodeN)r#)F)rr r!r"rr	rrrrr/s

r/c@seZdZdZedfdd�ZdS)r;u�
    “Push”-based encoder.

    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    .. method:: encode(input, final=False)

        :param input: An Unicode string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: A byte string.

    r.cCst|�}|j�|�j|_dSr)rr�incrementalencoderr)rrr'rrrrTszIncrementalEncoder.__init__N)rr r!r"r-rrrrrr;Csr;)r#)r#)r"Z
__future__rr�labelsrZVERSIONrrr
rr�objectrr-r+r,r	r$rr4r0r=r<r/r;rrrr�<module>s4
�

 
3U

��.eS)�@s�dZddlmZddlZddlmZdZddd	d
d�ZiZdd
�Z	dd�Z
dd�ZGdd�de�Z
e
d�Ze
d�Ze
d�Zd+dd�Zdd�Zedfdd�Zd,dd �Zd!d"�Zedfd#d$�Zd%d&�ZGd'd(�d(e�ZGd)d*�d*e�ZdS)-a

    webencodings
    ~~~~~~~~~~~~

    This is a Python implementation of the `WHATWG Encoding standard
    <http://encoding.spec.whatwg.org/>`. See README for details.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literalsN�)�LABELSz0.5.1z
iso-8859-8zmac-cyrillicz	mac-romanZcp874)ziso-8859-8-izx-mac-cyrillic�	macintoshzwindows-874cCs|�d����d�S)a9Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.

    :param string: An Unicode string.
    :returns: A new Unicode string.

    This is used for `ASCII case-insensitive
    <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
    matching of encoding labels.
    The same matching is also used, among other things,
    for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.

    This is different from the :meth:`~py:str.lower` method of Unicode strings
    which also affect non-ASCII characters,
    sometimes mapping them into the ASCII range:

        >>> keyword = u'Bac\N{KELVIN SIGN}ground'
        >>> assert keyword.lower() == u'background'
        >>> assert ascii_lower(keyword) != keyword.lower()
        >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'

    �utf8)�encode�lower�decode)�string�r�E/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/__init__.py�ascii_lower#sr
cCsxt|�d��}t�|�}|dkr$dSt�|�}|dkrt|dkrLddlm}nt�||�}t�	|�}t
||�}|t|<|S)u<
    Look for an encoding by its label.
    This is the spec’s `get an encoding
    <http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
    Supported labels are listed there.

    :param label: A string.
    :returns:
        An :class:`Encoding` object, or :obj:`None` for an unknown label.

    z	

 Nzx-user-definedr)�
codec_info)r
�stripr�get�CACHEZx_user_definedr�PYTHON_NAMES�codecs�lookup�Encoding)Zlabel�name�encodingrZpython_namerrrr=s




rcCs.t|d�r|St|�}|dkr*td|��|S)z�
    Accept either an encoding object or label.

    :param encoding: An :class:`Encoding` object or a label string.
    :returns: An :class:`Encoding` object.
    :raises: :exc:`~exceptions.LookupError` for an unknown label.

    rNzUnknown encoding label: %r)�hasattrr�LookupError)Zencoding_or_labelrrrr�
_get_encoding[s	
rc@s eZdZdZdd�Zdd�ZdS)raOReresents a character encoding such as UTF-8,
    that can be used for decoding or encoding.

    .. attribute:: name

        Canonical name of the encoding

    .. attribute:: codec_info

        The actual implementation of the encoding,
        a stdlib :class:`~codecs.CodecInfo` object.
        See :func:`codecs.register`.

    cCs||_||_dS�N)rr)�selfrrrrr�__init__|szEncoding.__init__cCs
d|jS)Nz
<Encoding %s>)r)rrrr�__repr__�szEncoding.__repr__N)�__name__�
__module__�__qualname__�__doc__rrrrrrrmsrzutf-8zutf-16lezutf-16be�replacecCs2t|�}t|�\}}|p|}|j�||�d|fS)a�
    Decode a single string.

    :param input: A byte string
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return:
        A ``(output, encoding)`` tuple of an Unicode string
        and an :obj:`Encoding`.

    r)r�_detect_bomrr	)�input�fallback_encoding�errorsZbom_encodingrrrrr	�sr	cCsV|�d�rt|dd�fS|�d�r4t|dd�fS|�d�rNt|dd�fSd|fS)zBReturn (bom_encoding, input), with any BOM removed from the input.s���Ns��s�)�
startswith�_UTF16LE�_UTF16BE�UTF8)r%rrrr$�s


r$�strictcCst|�j�||�dS)a;
    Encode a single string.

    :param input: An Unicode string.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return: A byte string.

    r)rrr)r%rr'rrrr�srcCs$t||�}t||�}t|�}||fS)a�
    "Pull"-based decoder.

    :param input:
        An iterable of byte strings.

        The input is first consumed just enough to determine the encoding
        based on the precense of a BOM,
        then consumed on demand when the return value is.
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns:
        An ``(output, encoding)`` tuple.
        :obj:`output` is an iterable of Unicode strings,
        :obj:`encoding` is the :obj:`Encoding` that is being used.

    )�IncrementalDecoder�_iter_decode_generator�next)r%r&r'�decoder�	generatorrrrr�iter_decode�s

r4ccs�|j}t|�}|D]0}||�}|r|jdk	s0t�|jV|Vqtq|ddd�}|jdk	s^t�|jV|rp|VdS|D]}||�}|rx|Vqx|ddd�}|r�|VdS)zqReturn a generator that first yields the :obj:`Encoding`,
    then yields output chukns as Unicode strings.

    N�T��final)r	�iterr�AssertionError)r%r2r	�chunck�outputrrrr0�s,r0cCst||�j}t||�S)uY
    “Pull”-based encoder.

    :param input: An iterable of Unicode strings.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns: An iterable of byte strings.

    )�IncrementalEncoderr�_iter_encode_generator)r%rr'rrrr�iter_encode�sr>ccs6|D]}||�}|r|Vq|ddd�}|r2|VdS)N�Tr6r)r%rr:r;rrrr=sr=c@s$eZdZdZd	dd�Zd
dd�ZdS)r/uO
    “Push”-based decoder.

    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    r#cCs&t|�|_||_d|_d|_d|_dS)Nr5)r�_fallback_encoding�_errors�_buffer�_decoderr)rr&r'rrrrs

zIncrementalDecoder.__init__FcCs||j}|dk	r|||�S|j|}t|�\}}|dkrVt|�dkrP|sP||_dS|j}|j�|j�j}||_||_	|||�S)z�Decode one chunk of the input.

        :param input: A byte string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: An Unicode string.

        Nr)r?)
rCrBr$�lenr@r�incrementaldecoderrAr	r)rr%r7r2rrrrr	's


zIncrementalDecoder.decodeN)r#)F)rr r!r"rr	rrrrr/s

r/c@seZdZdZedfdd�ZdS)r<u�
    “Push”-based encoder.

    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    .. method:: encode(input, final=False)

        :param input: An Unicode string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: A byte string.

    r.cCst|�}|j�|�j|_dSr)rr�incrementalencoderr)rrr'rrrrTszIncrementalEncoder.__init__N)rr r!r"r-rrrrrr<Csr<)r#)r#)r"Z
__future__rr�labelsrZVERSIONrrr
rr�objectrr-r+r,r	r$rr4r0r>r=r/r<rrrr�<module>s4
�

 
3U

��.e#��@s�dZddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd	d	d	d	d	d	d	d	d	d	d	d
d
d
ddddddddddd
d
d
dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd d!d!d!d!d!d"d"d"d#d#d$d$d$d$d$d$d$d%d%d%d%d%d%d%d%d%d%d&d&d'd(d(d)d*��Zd+S),z�

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�utf-8�ibm866�
iso-8859-2�
iso-8859-3�
iso-8859-4�
iso-8859-5�
iso-8859-6�
iso-8859-7�
iso-8859-8�iso-8859-8-i�iso-8859-10�iso-8859-13�iso-8859-14�iso-8859-15�iso-8859-16�koi8-r�koi8-u�	macintosh�windows-874�windows-1250�windows-1251�windows-1252�windows-1253�windows-1254�windows-1255�windows-1256�windows-1257�windows-1258�x-mac-cyrillic�gbk�gb18030�
hz-gb-2312�big5�euc-jp�iso-2022-jp�	shift_jis�euc-kr�iso-2022-kr�utf-16be�utf-16le�x-user-defined)�zunicode-1-1-utf-8r�utf8�866�cp866�csibm866r�csisolatin2rz
iso-ir-101z	iso8859-2Ziso88592z
iso_8859-2ziso_8859-2:1987�l2�latin2�csisolatin3rz
iso-ir-109z	iso8859-3Ziso88593z
iso_8859-3ziso_8859-3:1988�l3�latin3�csisolatin4rz
iso-ir-110z	iso8859-4Ziso88594z
iso_8859-4ziso_8859-4:1988�l4�latin4�csisolatincyrillic�cyrillicrz
iso-ir-144z	iso8859-5Ziso88595z
iso_8859-5ziso_8859-5:1988�arabiczasmo-708Zcsiso88596eZcsiso88596i�csisolatinarabiczecma-114rziso-8859-6-eziso-8859-6-iz
iso-ir-127z	iso8859-6Ziso88596z
iso_8859-6ziso_8859-6:1987�csisolatingreekzecma-118�elot_928�greek�greek8rz
iso-ir-126z	iso8859-7Ziso88597z
iso_8859-7ziso_8859-7:1987Zsun_eu_greekZcsiso88598e�csisolatinhebrew�hebrewr	ziso-8859-8-ez
iso-ir-138z	iso8859-8Ziso88598z
iso_8859-8ziso_8859-8:1988ZvisualZcsiso88598ir
Zlogical�csisolatin6rz
iso-ir-157z
iso8859-10Z	iso885910�l6�latin6rz
iso8859-13Z	iso885913r
z
iso8859-14Z	iso885914Zcsisolatin9rz
iso8859-15Z	iso885915ziso_8859-15�l9r�cskoi8rZkoiZkoi8r�koi8_rrZcsmacintoshZmacrzx-mac-romanzdos-874ziso-8859-11z
iso8859-11Z	iso885911ztis-620r�cp1250rzx-cp1250�cp1251rzx-cp1251zansi_x3.4-1968�ascii�cp1252�cp819�csisolatin1�ibm819z
iso-8859-1z
iso-ir-100z	iso8859-1Ziso88591z
iso_8859-1ziso_8859-1:1987�l1�latin1zus-asciirzx-cp1252�cp1253rzx-cp1253�cp1254�csisolatin5z
iso-8859-9z
iso-ir-148z	iso8859-9Ziso88599z
iso_8859-9ziso_8859-9:1989�l5�latin5rzx-cp1254�cp1255rzx-cp1255�cp1256rzx-cp1256�cp1257rzx-cp1257�cp1258rzx-cp1258rzx-mac-ukrainian�chineseZcsgb2312�csiso58gb231280�gb2312Zgb_2312z
gb_2312-80rz	iso-ir-58zx-gbkrr r!z
big5-hkscszcn-big5�csbig5zx-x-big5Zcseucpkdfmtjapaneser"zx-euc-jp�csiso2022jpr#�
csshiftjis�ms_kanjiz	shift-jisr$�sjiszwindows-31jzx-sjisZcseuckrZ
csksc56011987r%z
iso-ir-149�koreanzks_c_5601-1987zks_c_5601-1989�ksc5601Zksc_5601zwindows-949�csiso2022krr&r'zutf-16r(r)N)�__doc__ZLABELS�rere�C/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/labels.py�<module>s���U

��.e#��@s�dZddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd	d	d	d	d	d	d	d	d	d	d	d
d
d
ddddddddddd
d
d
dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd d!d!d!d!d!d"d"d"d#d#d$d$d$d$d$d$d$d%d%d%d%d%d%d%d%d%d%d&d&d'd(d(d)d*��Zd+S),z�

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�utf-8�ibm866�
iso-8859-2�
iso-8859-3�
iso-8859-4�
iso-8859-5�
iso-8859-6�
iso-8859-7�
iso-8859-8�iso-8859-8-i�iso-8859-10�iso-8859-13�iso-8859-14�iso-8859-15�iso-8859-16�koi8-r�koi8-u�	macintosh�windows-874�windows-1250�windows-1251�windows-1252�windows-1253�windows-1254�windows-1255�windows-1256�windows-1257�windows-1258�x-mac-cyrillic�gbk�gb18030�
hz-gb-2312�big5�euc-jp�iso-2022-jp�	shift_jis�euc-kr�iso-2022-kr�utf-16be�utf-16le�x-user-defined)�zunicode-1-1-utf-8r�utf8�866�cp866�csibm866r�csisolatin2rz
iso-ir-101z	iso8859-2Ziso88592z
iso_8859-2ziso_8859-2:1987�l2�latin2�csisolatin3rz
iso-ir-109z	iso8859-3Ziso88593z
iso_8859-3ziso_8859-3:1988�l3�latin3�csisolatin4rz
iso-ir-110z	iso8859-4Ziso88594z
iso_8859-4ziso_8859-4:1988�l4�latin4�csisolatincyrillic�cyrillicrz
iso-ir-144z	iso8859-5Ziso88595z
iso_8859-5ziso_8859-5:1988�arabiczasmo-708Zcsiso88596eZcsiso88596i�csisolatinarabiczecma-114rziso-8859-6-eziso-8859-6-iz
iso-ir-127z	iso8859-6Ziso88596z
iso_8859-6ziso_8859-6:1987�csisolatingreekzecma-118�elot_928�greek�greek8rz
iso-ir-126z	iso8859-7Ziso88597z
iso_8859-7ziso_8859-7:1987Zsun_eu_greekZcsiso88598e�csisolatinhebrew�hebrewr	ziso-8859-8-ez
iso-ir-138z	iso8859-8Ziso88598z
iso_8859-8ziso_8859-8:1988ZvisualZcsiso88598ir
Zlogical�csisolatin6rz
iso-ir-157z
iso8859-10Z	iso885910�l6�latin6rz
iso8859-13Z	iso885913r
z
iso8859-14Z	iso885914Zcsisolatin9rz
iso8859-15Z	iso885915ziso_8859-15�l9r�cskoi8rZkoiZkoi8r�koi8_rrZcsmacintoshZmacrzx-mac-romanzdos-874ziso-8859-11z
iso8859-11Z	iso885911ztis-620r�cp1250rzx-cp1250�cp1251rzx-cp1251zansi_x3.4-1968�ascii�cp1252�cp819�csisolatin1�ibm819z
iso-8859-1z
iso-ir-100z	iso8859-1Ziso88591z
iso_8859-1ziso_8859-1:1987�l1�latin1zus-asciirzx-cp1252�cp1253rzx-cp1253�cp1254�csisolatin5z
iso-8859-9z
iso-ir-148z	iso8859-9Ziso88599z
iso_8859-9ziso_8859-9:1989�l5�latin5rzx-cp1254�cp1255rzx-cp1255�cp1256rzx-cp1256�cp1257rzx-cp1257�cp1258rzx-cp1258rzx-mac-ukrainian�chineseZcsgb2312�csiso58gb231280�gb2312Zgb_2312z
gb_2312-80rz	iso-ir-58zx-gbkrr r!z
big5-hkscszcn-big5�csbig5zx-x-big5Zcseucpkdfmtjapaneser"zx-euc-jp�csiso2022jpr#�
csshiftjis�ms_kanjiz	shift-jisr$�sjiszwindows-31jzx-sjisZcseuckrZ
csksc56011987r%z
iso-ir-149�koreanzks_c_5601-1987zks_c_5601-1989�ksc5601Zksc_5601zwindows-949�csiso2022krr&r'zutf-16r(r)N)�__doc__ZLABELS�rere�C/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/labels.py�<module>s���U

��.e�@sfdZddlZzddlmZWn ek
r<ddlmZYnXdd�Zdd�Zedkrbe	ed	��dS)
z�

    webencodings.mklabels
    ~~~~~~~~~~~~~~~~~~~~~

    Regenarate the webencodings.labels module.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�N)�urlopencCs||��kst�|S�N)�lower�AssertionError)�string�r�E/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/mklabels.py�assert_lowersr	csfdg}dd�t�t|����d��D�}tdd�|D���|��fdd�|D��|�d�d	�|�S)
Na"""

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

# XXX Do not edit!
# This file is automatically generated by mklabels.py

LABELS = {
cSsLg|]D}|dD]6}|dD](}tt|���d�t|d��d�f�qqqS)Z	encodings�labels�u�name)�reprr	�lstrip)�.0�category�encoding�labelrrr�
<listcomp>,s

��zgenerate.<locals>.<listcomp>�asciicss|]\}}t|�VqdSr��len�rrrrrr�	<genexpr>2szgenerate.<locals>.<genexpr>c3s,|]$\}}d|d�t|�|fVqdS)z    %s:%s %s,
� Nrr�Zmax_lenrrr3s��}�)	�json�loadsr�read�decode�max�extend�append�join)Zurl�partsr
rrr�generates��
r&�__main__z.http://encoding.spec.whatwg.org/encodings.json)
�__doc__rZurllibr�ImportErrorZurllib.requestr	r&�__name__�printrrrr�<module>s!U

��.e��	@s�dZddlmZddlZGdd�dej�ZGdd�dej�ZGdd	�d	ej�ZGd
d�deej�ZGdd
�d
eej�Zej	de�j
e�jeeeed�ZdZ
e�e
�ZdS)z�

    webencodings.x_user_defined
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~

    An implementation of the x-user-defined encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literalsNc@s eZdZddd�Zddd�ZdS)	�Codec�strictcCst�||t�S�N)�codecs�charmap_encode�encoding_table��self�input�errors�r
�K/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/x_user_defined.py�encodeszCodec.encodecCst�||t�Sr)r�charmap_decode�decoding_tabler	r
r
r�decodeszCodec.decodeN)r)r)�__name__�
__module__�__qualname__rrr
r
r
rrs
rc@seZdZddd�ZdS)�IncrementalEncoderFcCst�||jt�dS�Nr)rrrr�r
r�finalr
r
rrszIncrementalEncoder.encodeN)F)rrrrr
r
r
rrsrc@seZdZddd�ZdS)�IncrementalDecoderFcCst�||jt�dSr)rrrrrr
r
rr$szIncrementalDecoder.decodeN)F)rrrrr
r
r
rr#src@seZdZdS)�StreamWriterN�rrrr
r
r
rr(src@seZdZdS)�StreamReaderNrr
r
r
rr,srzx-user-defined)�namerr�incrementalencoder�incrementaldecoder�streamreader�streamwriteru	

 !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~)�__doc__Z
__future__rrrrrrr�	CodecInforrZ
codec_infor�
charmap_buildrr
r
r
r�<module>s*	��U

��.e��@s�dZddlmZddlmZmZmZmZmZm	Z	m
Z
mZmZdd�Z
dd�Zd	d
�Zdd�Zd
d�Zdd�Zdd�Zdd�Zdd�ZdS)z�

    webencodings.tests
    ~~~~~~~~~~~~~~~~~~

    A basic test suite for Encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literals�)	�lookup�LABELS�decode�encode�iter_decode�iter_encode�IncrementalDecoder�IncrementalEncoder�UTF8cOs6z|||�Wn|k
r$YdSXtd|��dS)NzDid not raise %s.��AssertionError)Z	exceptionZfunction�args�kwargs�r�B/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/tests.py�
assert_raisess
rcCstd�jdkst�td�jdks$t�td�jdks6t�td�jdksHt�td�jdksZt�td�jdkslt�td�jdks~t�td�dks�t�td�dks�t�td	�jd
ks�t�td�jd
ks�t�td�jd
ks�t�td
�jd
ks�t�td�dks�t�td�dk�st�dS)Nzutf-8zUtf-8zUTF-8�utf8zutf8 z 
utf8	�u8uutf-8 zUS-ASCIIzwindows-1252z
iso-8859-1�latin1ZLATIN1zlatin-1uLATİN1)r�namerrrrr�test_labelssrcCstD]�}td|�dt|�fks"t�td|�dks4t�dD]D}tdg||�\}}t|�gks`t�ttdg||��gks8t�q8t|�}|�d�dks�t�|jddd�dks�t�t	|�}|�d�dks�t�|jddd�dkst�qt
t���D]}t|�j|ks�t�q�dS)N��)rr�T)�final)
rrrrrr�listr	r
r�set�valuesr)Zlabel�repeat�output�_�decoder�encoderrrrr�test_all_labels0sr%cCsTtttdd�tttdd�tttgd�tttgd�tttd�tttd�dS)N�éZinvalid�é)r�LookupErrorrrrr	r
rrrrr�test_invalid_labelCsr)cCs�tdd�dtd�fkst�tdtd��dtd�fks8t�tdd�dtd�fksRt�tdt�dtd�fkslt�tdd�dtd�fks�t�td	d�dtd�fks�t�td
d�dtd�fks�t�tdd�dtd
�fks�t�tdd�dtd�fks�t�tdd�dtd
�fk�s
t�tdd�dtd�fk�s&t�tdd�dtd
�fk�sBt�tdd�dtd
�fk�s^t�tdd�dtd�fk�szt�tdd�dtd
�fk�s�t�tdd�dtd
�fk�s�t�dS)N��ru€�érr'�ascii�ér&����utf-16be�����utf-16les���us�����zUTF-16BE��zUTF-16LEzUTF-16)rrrrrrrr�test_decodeLs r4cCsptdd�dkst�tdd�dks$t�tdd�dks6t�tdd�dksHt�tdd�dksZt�tdd	�d
kslt�dS)Nr'r��rr+�utf-16r3r1r/r2)rrrrrr�test_encodebsr7cCs�dd�}|gd�dkst�|dgd�dks.t�|dgd�dksBt�|dgd�d	ksVt�|d
dgd�d	kslt�|dd
gd�d	ks�t�|dgd�dks�t�|dgd�dks�t�|dddgd�dks�t�|dddgd�dks�t�|ddddddgd�dks�t�|dgd�dk�st�|dgd�dk�s$t�|dgd�dk�s:t�|dgd�dk�sPt�|ddddddgd�dk�spt�|dddgd �d!k�s�t�dS)"NcSst||�\}}d�|�S)Nr)r�join)�inputZfallback_encodingr!Z	_encodingrrr�iter_decode_to_stringlsz/test_iter_decode.<locals>.iter_decode_to_stringrrrr5r'shelloZhellosheslloshell�or+r-r&s�����aua���s���s�uï»r.r0��s���sh�x-user-defineduhllor
)r:rrr�test_iter_decodeks�������������
���rCcCs�d�tgd��dkst�d�tdgd��dks2t�d�tdgd��dksLt�d�tddddgd��dkslt�d�tddddgd��dks�t�d�tddddgd��dks�t�d�tddddgd	��d
ks�t�d�tddddgd
��dks�t�dS)Nrrrr'r5r6r3r1r/r2uhZllorBsh�llo)r8r	rrrrr�test_iter_encode�s"    ���rDcCs@d}d}d}d}t|d�|td�fks*t�t|d�|ks<t�dS)Ns2,O�#�ɻtϨ�u2,O#tsaaZaarB)rrrr)ZencodedZdecodedrrr�test_x_user_defined�srEN)�__doc__Z
__future__rrrrrrrr	r
rrrrr%r)r4r7rCrDrErrrr�<module>s,			U

��.e��	@s�dZddlmZddlZGdd�dej�ZGdd�dej�ZGdd	�d	ej�ZGd
d�deej�ZGdd
�d
eej�Zej	de�j
e�jeeeed�ZdZ
e�e
�ZdS)z�

    webencodings.x_user_defined
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~

    An implementation of the x-user-defined encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literalsNc@s eZdZddd�Zddd�ZdS)	�Codec�strictcCst�||t�S�N)�codecs�charmap_encode�encoding_table��self�input�errors�r
�K/usr/lib/python3.8/site-packages/pip/_vendor/webencodings/x_user_defined.py�encodeszCodec.encodecCst�||t�Sr)r�charmap_decode�decoding_tabler	r
r
r�decodeszCodec.decodeN)r)r)�__name__�
__module__�__qualname__rrr
r
r
rrs
rc@seZdZddd�ZdS)�IncrementalEncoderFcCst�||jt�dS�Nr)rrrr�r
r�finalr
r
rrszIncrementalEncoder.encodeN)F)rrrrr
r
r
rrsrc@seZdZddd�ZdS)�IncrementalDecoderFcCst�||jt�dSr)rrrrrr
r
rr$szIncrementalDecoder.decodeN)F)rrrrr
r
r
rr#src@seZdZdS)�StreamWriterN�rrrr
r
r
rr(src@seZdZdS)�StreamReaderNrr
r
r
rr,srzx-user-defined)�namerr�incrementalencoder�incrementaldecoder�streamreader�streamwriteru	

 !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~)�__doc__Z
__future__rrrrrrr�	CodecInforrZ
codec_infor�
charmap_buildrr
r
r
r�<module>s*	��# coding: utf-8
"""

    webencodings
    ~~~~~~~~~~~~

    This is a Python implementation of the `WHATWG Encoding standard
    <http://encoding.spec.whatwg.org/>`. See README for details.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

from __future__ import unicode_literals

import codecs

from .labels import LABELS


VERSION = '0.5.1'


# Some names in Encoding are not valid Python aliases. Remap these.
PYTHON_NAMES = {
    'iso-8859-8-i': 'iso-8859-8',
    'x-mac-cyrillic': 'mac-cyrillic',
    'macintosh': 'mac-roman',
    'windows-874': 'cp874'}

CACHE = {}


def ascii_lower(string):
    r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.

    :param string: An Unicode string.
    :returns: A new Unicode string.

    This is used for `ASCII case-insensitive
    <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
    matching of encoding labels.
    The same matching is also used, among other things,
    for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.

    This is different from the :meth:`~py:str.lower` method of Unicode strings
    which also affect non-ASCII characters,
    sometimes mapping them into the ASCII range:

        >>> keyword = u'Bac\N{KELVIN SIGN}ground'
        >>> assert keyword.lower() == u'background'
        >>> assert ascii_lower(keyword) != keyword.lower()
        >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'

    """
    # This turns out to be faster than unicode.translate()
    return string.encode('utf8').lower().decode('utf8')


def lookup(label):
    """
    Look for an encoding by its label.
    This is the spec’s `get an encoding
    <http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
    Supported labels are listed there.

    :param label: A string.
    :returns:
        An :class:`Encoding` object, or :obj:`None` for an unknown label.

    """
    # Only strip ASCII whitespace: U+0009, U+000A, U+000C, U+000D, and U+0020.
    label = ascii_lower(label.strip('\t\n\f\r '))
    name = LABELS.get(label)
    if name is None:
        return None
    encoding = CACHE.get(name)
    if encoding is None:
        if name == 'x-user-defined':
            from .x_user_defined import codec_info
        else:
            python_name = PYTHON_NAMES.get(name, name)
            # Any python_name value that gets to here should be valid.
            codec_info = codecs.lookup(python_name)
        encoding = Encoding(name, codec_info)
        CACHE[name] = encoding
    return encoding


def _get_encoding(encoding_or_label):
    """
    Accept either an encoding object or label.

    :param encoding: An :class:`Encoding` object or a label string.
    :returns: An :class:`Encoding` object.
    :raises: :exc:`~exceptions.LookupError` for an unknown label.

    """
    if hasattr(encoding_or_label, 'codec_info'):
        return encoding_or_label

    encoding = lookup(encoding_or_label)
    if encoding is None:
        raise LookupError('Unknown encoding label: %r' % encoding_or_label)
    return encoding


class Encoding(object):
    """Reresents a character encoding such as UTF-8,
    that can be used for decoding or encoding.

    .. attribute:: name

        Canonical name of the encoding

    .. attribute:: codec_info

        The actual implementation of the encoding,
        a stdlib :class:`~codecs.CodecInfo` object.
        See :func:`codecs.register`.

    """
    def __init__(self, name, codec_info):
        self.name = name
        self.codec_info = codec_info

    def __repr__(self):
        return '<Encoding %s>' % self.name


#: The UTF-8 encoding. Should be used for new content and formats.
UTF8 = lookup('utf-8')

_UTF16LE = lookup('utf-16le')
_UTF16BE = lookup('utf-16be')


def decode(input, fallback_encoding, errors='replace'):
    """
    Decode a single string.

    :param input: A byte string
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return:
        A ``(output, encoding)`` tuple of an Unicode string
        and an :obj:`Encoding`.

    """
    # Fail early if `encoding` is an invalid label.
    fallback_encoding = _get_encoding(fallback_encoding)
    bom_encoding, input = _detect_bom(input)
    encoding = bom_encoding or fallback_encoding
    return encoding.codec_info.decode(input, errors)[0], encoding


def _detect_bom(input):
    """Return (bom_encoding, input), with any BOM removed from the input."""
    if input.startswith(b'\xFF\xFE'):
        return _UTF16LE, input[2:]
    if input.startswith(b'\xFE\xFF'):
        return _UTF16BE, input[2:]
    if input.startswith(b'\xEF\xBB\xBF'):
        return UTF8, input[3:]
    return None, input


def encode(input, encoding=UTF8, errors='strict'):
    """
    Encode a single string.

    :param input: An Unicode string.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return: A byte string.

    """
    return _get_encoding(encoding).codec_info.encode(input, errors)[0]


def iter_decode(input, fallback_encoding, errors='replace'):
    """
    "Pull"-based decoder.

    :param input:
        An iterable of byte strings.

        The input is first consumed just enough to determine the encoding
        based on the precense of a BOM,
        then consumed on demand when the return value is.
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns:
        An ``(output, encoding)`` tuple.
        :obj:`output` is an iterable of Unicode strings,
        :obj:`encoding` is the :obj:`Encoding` that is being used.

    """

    decoder = IncrementalDecoder(fallback_encoding, errors)
    generator = _iter_decode_generator(input, decoder)
    encoding = next(generator)
    return generator, encoding


def _iter_decode_generator(input, decoder):
    """Return a generator that first yields the :obj:`Encoding`,
    then yields output chukns as Unicode strings.

    """
    decode = decoder.decode
    input = iter(input)
    for chunck in input:
        output = decode(chunck)
        if output:
            assert decoder.encoding is not None
            yield decoder.encoding
            yield output
            break
    else:
        # Input exhausted without determining the encoding
        output = decode(b'', final=True)
        assert decoder.encoding is not None
        yield decoder.encoding
        if output:
            yield output
        return

    for chunck in input:
        output = decode(chunck)
        if output:
            yield output
    output = decode(b'', final=True)
    if output:
        yield output


def iter_encode(input, encoding=UTF8, errors='strict'):
    """
    “Pull”-based encoder.

    :param input: An iterable of Unicode strings.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns: An iterable of byte strings.

    """
    # Fail early if `encoding` is an invalid label.
    encode = IncrementalEncoder(encoding, errors).encode
    return _iter_encode_generator(input, encode)


def _iter_encode_generator(input, encode):
    for chunck in input:
        output = encode(chunck)
        if output:
            yield output
    output = encode('', final=True)
    if output:
        yield output


class IncrementalDecoder(object):
    """
    “Push”-based decoder.

    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    """
    def __init__(self, fallback_encoding, errors='replace'):
        # Fail early if `encoding` is an invalid label.
        self._fallback_encoding = _get_encoding(fallback_encoding)
        self._errors = errors
        self._buffer = b''
        self._decoder = None
        #: The actual :class:`Encoding` that is being used,
        #: or :obj:`None` if that is not determined yet.
        #: (Ie. if there is not enough input yet to determine
        #: if there is a BOM.)
        self.encoding = None  # Not known yet.

    def decode(self, input, final=False):
        """Decode one chunk of the input.

        :param input: A byte string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: An Unicode string.

        """
        decoder = self._decoder
        if decoder is not None:
            return decoder(input, final)

        input = self._buffer + input
        encoding, input = _detect_bom(input)
        if encoding is None:
            if len(input) < 3 and not final:  # Not enough data yet.
                self._buffer = input
                return ''
            else:  # No BOM
                encoding = self._fallback_encoding
        decoder = encoding.codec_info.incrementaldecoder(self._errors).decode
        self._decoder = decoder
        self.encoding = encoding
        return decoder(input, final)


class IncrementalEncoder(object):
    """
    “Push”-based encoder.

    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    .. method:: encode(input, final=False)

        :param input: An Unicode string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: A byte string.

    """
    def __init__(self, encoding=UTF8, errors='strict'):
        encoding = _get_encoding(encoding)
        self.encode = encoding.codec_info.incrementalencoder(errors).encode
# coding: utf-8
"""

    webencodings.tests
    ~~~~~~~~~~~~~~~~~~

    A basic test suite for Encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

from __future__ import unicode_literals

from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode,
               IncrementalDecoder, IncrementalEncoder, UTF8)


def assert_raises(exception, function, *args, **kwargs):
    try:
        function(*args, **kwargs)
    except exception:
        return
    else:  # pragma: no cover
        raise AssertionError('Did not raise %s.' % exception)


def test_labels():
    assert lookup('utf-8').name == 'utf-8'
    assert lookup('Utf-8').name == 'utf-8'
    assert lookup('UTF-8').name == 'utf-8'
    assert lookup('utf8').name == 'utf-8'
    assert lookup('utf8').name == 'utf-8'
    assert lookup('utf8 ').name == 'utf-8'
    assert lookup(' \r\nutf8\t').name == 'utf-8'
    assert lookup('u8') is None  # Python label.
    assert lookup('utf-8 ') is None  # Non-ASCII white space.

    assert lookup('US-ASCII').name == 'windows-1252'
    assert lookup('iso-8859-1').name == 'windows-1252'
    assert lookup('latin1').name == 'windows-1252'
    assert lookup('LATIN1').name == 'windows-1252'
    assert lookup('latin-1') is None
    assert lookup('LATİN1') is None  # ASCII-only case insensitivity.


def test_all_labels():
    for label in LABELS:
        assert decode(b'', label) == ('', lookup(label))
        assert encode('', label) == b''
        for repeat in [0, 1, 12]:
            output, _ = iter_decode([b''] * repeat, label)
            assert list(output) == []
            assert list(iter_encode([''] * repeat, label)) == []
        decoder = IncrementalDecoder(label)
        assert decoder.decode(b'') == ''
        assert decoder.decode(b'', final=True) == ''
        encoder = IncrementalEncoder(label)
        assert encoder.encode('') == b''
        assert encoder.encode('', final=True) == b''
    # All encoding names are valid labels too:
    for name in set(LABELS.values()):
        assert lookup(name).name == name


def test_invalid_label():
    assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid')
    assert_raises(LookupError, encode, 'é', 'invalid')
    assert_raises(LookupError, iter_decode, [], 'invalid')
    assert_raises(LookupError, iter_encode, [], 'invalid')
    assert_raises(LookupError, IncrementalDecoder, 'invalid')
    assert_raises(LookupError, IncrementalEncoder, 'invalid')


def test_decode():
    assert decode(b'\x80', 'latin1') == ('€', lookup('latin1'))
    assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1'))
    assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8'))
    assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8'))
    assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii'))
    assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8'))  # UTF-8 with BOM

    assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be'))  # UTF-16-BE with BOM
    assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le'))  # UTF-16-LE with BOM
    assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be'))
    assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le'))

    assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be'))
    assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le'))
    assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le'))

    assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be'))
    assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le'))
    assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le'))


def test_encode():
    assert encode('é', 'latin1') == b'\xe9'
    assert encode('é', 'utf8') == b'\xc3\xa9'
    assert encode('é', 'utf8') == b'\xc3\xa9'
    assert encode('é', 'utf-16') == b'\xe9\x00'
    assert encode('é', 'utf-16le') == b'\xe9\x00'
    assert encode('é', 'utf-16be') == b'\x00\xe9'


def test_iter_decode():
    def iter_decode_to_string(input, fallback_encoding):
        output, _encoding = iter_decode(input, fallback_encoding)
        return ''.join(output)
    assert iter_decode_to_string([], 'latin1') == ''
    assert iter_decode_to_string([b''], 'latin1') == ''
    assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é'
    assert iter_decode_to_string([b'hello'], 'latin1') == 'hello'
    assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello'
    assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello'
    assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é'
    assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é'
    assert iter_decode_to_string([
        b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é'
    assert iter_decode_to_string([
        b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD'
    assert iter_decode_to_string([
        b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é'
    assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == ''
    assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»'
    assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é'
    assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é'
    assert iter_decode_to_string([
        b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é'
    assert iter_decode_to_string([
        b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo'


def test_iter_encode():
    assert b''.join(iter_encode([], 'latin1')) == b''
    assert b''.join(iter_encode([''], 'latin1')) == b''
    assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9'
    assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9'
    assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00'
    assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00'
    assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9'
    assert b''.join(iter_encode([
        '', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo'


def test_x_user_defined():
    encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca'
    decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca'
    encoded = b'aa'
    decoded = 'aa'
    assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined'))
    assert encode(decoded, 'x-user-defined') == encoded
"""

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

# XXX Do not edit!
# This file is automatically generated by mklabels.py

LABELS = {
    'unicode-1-1-utf-8':   'utf-8',
    'utf-8':               'utf-8',
    'utf8':                'utf-8',
    '866':                 'ibm866',
    'cp866':               'ibm866',
    'csibm866':            'ibm866',
    'ibm866':              'ibm866',
    'csisolatin2':         'iso-8859-2',
    'iso-8859-2':          'iso-8859-2',
    'iso-ir-101':          'iso-8859-2',
    'iso8859-2':           'iso-8859-2',
    'iso88592':            'iso-8859-2',
    'iso_8859-2':          'iso-8859-2',
    'iso_8859-2:1987':     'iso-8859-2',
    'l2':                  'iso-8859-2',
    'latin2':              'iso-8859-2',
    'csisolatin3':         'iso-8859-3',
    'iso-8859-3':          'iso-8859-3',
    'iso-ir-109':          'iso-8859-3',
    'iso8859-3':           'iso-8859-3',
    'iso88593':            'iso-8859-3',
    'iso_8859-3':          'iso-8859-3',
    'iso_8859-3:1988':     'iso-8859-3',
    'l3':                  'iso-8859-3',
    'latin3':              'iso-8859-3',
    'csisolatin4':         'iso-8859-4',
    'iso-8859-4':          'iso-8859-4',
    'iso-ir-110':          'iso-8859-4',
    'iso8859-4':           'iso-8859-4',
    'iso88594':            'iso-8859-4',
    'iso_8859-4':          'iso-8859-4',
    'iso_8859-4:1988':     'iso-8859-4',
    'l4':                  'iso-8859-4',
    'latin4':              'iso-8859-4',
    'csisolatincyrillic':  'iso-8859-5',
    'cyrillic':            'iso-8859-5',
    'iso-8859-5':          'iso-8859-5',
    'iso-ir-144':          'iso-8859-5',
    'iso8859-5':           'iso-8859-5',
    'iso88595':            'iso-8859-5',
    'iso_8859-5':          'iso-8859-5',
    'iso_8859-5:1988':     'iso-8859-5',
    'arabic':              'iso-8859-6',
    'asmo-708':            'iso-8859-6',
    'csiso88596e':         'iso-8859-6',
    'csiso88596i':         'iso-8859-6',
    'csisolatinarabic':    'iso-8859-6',
    'ecma-114':            'iso-8859-6',
    'iso-8859-6':          'iso-8859-6',
    'iso-8859-6-e':        'iso-8859-6',
    'iso-8859-6-i':        'iso-8859-6',
    'iso-ir-127':          'iso-8859-6',
    'iso8859-6':           'iso-8859-6',
    'iso88596':            'iso-8859-6',
    'iso_8859-6':          'iso-8859-6',
    'iso_8859-6:1987':     'iso-8859-6',
    'csisolatingreek':     'iso-8859-7',
    'ecma-118':            'iso-8859-7',
    'elot_928':            'iso-8859-7',
    'greek':               'iso-8859-7',
    'greek8':              'iso-8859-7',
    'iso-8859-7':          'iso-8859-7',
    'iso-ir-126':          'iso-8859-7',
    'iso8859-7':           'iso-8859-7',
    'iso88597':            'iso-8859-7',
    'iso_8859-7':          'iso-8859-7',
    'iso_8859-7:1987':     'iso-8859-7',
    'sun_eu_greek':        'iso-8859-7',
    'csiso88598e':         'iso-8859-8',
    'csisolatinhebrew':    'iso-8859-8',
    'hebrew':              'iso-8859-8',
    'iso-8859-8':          'iso-8859-8',
    'iso-8859-8-e':        'iso-8859-8',
    'iso-ir-138':          'iso-8859-8',
    'iso8859-8':           'iso-8859-8',
    'iso88598':            'iso-8859-8',
    'iso_8859-8':          'iso-8859-8',
    'iso_8859-8:1988':     'iso-8859-8',
    'visual':              'iso-8859-8',
    'csiso88598i':         'iso-8859-8-i',
    'iso-8859-8-i':        'iso-8859-8-i',
    'logical':             'iso-8859-8-i',
    'csisolatin6':         'iso-8859-10',
    'iso-8859-10':         'iso-8859-10',
    'iso-ir-157':          'iso-8859-10',
    'iso8859-10':          'iso-8859-10',
    'iso885910':           'iso-8859-10',
    'l6':                  'iso-8859-10',
    'latin6':              'iso-8859-10',
    'iso-8859-13':         'iso-8859-13',
    'iso8859-13':          'iso-8859-13',
    'iso885913':           'iso-8859-13',
    'iso-8859-14':         'iso-8859-14',
    'iso8859-14':          'iso-8859-14',
    'iso885914':           'iso-8859-14',
    'csisolatin9':         'iso-8859-15',
    'iso-8859-15':         'iso-8859-15',
    'iso8859-15':          'iso-8859-15',
    'iso885915':           'iso-8859-15',
    'iso_8859-15':         'iso-8859-15',
    'l9':                  'iso-8859-15',
    'iso-8859-16':         'iso-8859-16',
    'cskoi8r':             'koi8-r',
    'koi':                 'koi8-r',
    'koi8':                'koi8-r',
    'koi8-r':              'koi8-r',
    'koi8_r':              'koi8-r',
    'koi8-u':              'koi8-u',
    'csmacintosh':         'macintosh',
    'mac':                 'macintosh',
    'macintosh':           'macintosh',
    'x-mac-roman':         'macintosh',
    'dos-874':             'windows-874',
    'iso-8859-11':         'windows-874',
    'iso8859-11':          'windows-874',
    'iso885911':           'windows-874',
    'tis-620':             'windows-874',
    'windows-874':         'windows-874',
    'cp1250':              'windows-1250',
    'windows-1250':        'windows-1250',
    'x-cp1250':            'windows-1250',
    'cp1251':              'windows-1251',
    'windows-1251':        'windows-1251',
    'x-cp1251':            'windows-1251',
    'ansi_x3.4-1968':      'windows-1252',
    'ascii':               'windows-1252',
    'cp1252':              'windows-1252',
    'cp819':               'windows-1252',
    'csisolatin1':         'windows-1252',
    'ibm819':              'windows-1252',
    'iso-8859-1':          'windows-1252',
    'iso-ir-100':          'windows-1252',
    'iso8859-1':           'windows-1252',
    'iso88591':            'windows-1252',
    'iso_8859-1':          'windows-1252',
    'iso_8859-1:1987':     'windows-1252',
    'l1':                  'windows-1252',
    'latin1':              'windows-1252',
    'us-ascii':            'windows-1252',
    'windows-1252':        'windows-1252',
    'x-cp1252':            'windows-1252',
    'cp1253':              'windows-1253',
    'windows-1253':        'windows-1253',
    'x-cp1253':            'windows-1253',
    'cp1254':              'windows-1254',
    'csisolatin5':         'windows-1254',
    'iso-8859-9':          'windows-1254',
    'iso-ir-148':          'windows-1254',
    'iso8859-9':           'windows-1254',
    'iso88599':            'windows-1254',
    'iso_8859-9':          'windows-1254',
    'iso_8859-9:1989':     'windows-1254',
    'l5':                  'windows-1254',
    'latin5':              'windows-1254',
    'windows-1254':        'windows-1254',
    'x-cp1254':            'windows-1254',
    'cp1255':              'windows-1255',
    'windows-1255':        'windows-1255',
    'x-cp1255':            'windows-1255',
    'cp1256':              'windows-1256',
    'windows-1256':        'windows-1256',
    'x-cp1256':            'windows-1256',
    'cp1257':              'windows-1257',
    'windows-1257':        'windows-1257',
    'x-cp1257':            'windows-1257',
    'cp1258':              'windows-1258',
    'windows-1258':        'windows-1258',
    'x-cp1258':            'windows-1258',
    'x-mac-cyrillic':      'x-mac-cyrillic',
    'x-mac-ukrainian':     'x-mac-cyrillic',
    'chinese':             'gbk',
    'csgb2312':            'gbk',
    'csiso58gb231280':     'gbk',
    'gb2312':              'gbk',
    'gb_2312':             'gbk',
    'gb_2312-80':          'gbk',
    'gbk':                 'gbk',
    'iso-ir-58':           'gbk',
    'x-gbk':               'gbk',
    'gb18030':             'gb18030',
    'hz-gb-2312':          'hz-gb-2312',
    'big5':                'big5',
    'big5-hkscs':          'big5',
    'cn-big5':             'big5',
    'csbig5':              'big5',
    'x-x-big5':            'big5',
    'cseucpkdfmtjapanese': 'euc-jp',
    'euc-jp':              'euc-jp',
    'x-euc-jp':            'euc-jp',
    'csiso2022jp':         'iso-2022-jp',
    'iso-2022-jp':         'iso-2022-jp',
    'csshiftjis':          'shift_jis',
    'ms_kanji':            'shift_jis',
    'shift-jis':           'shift_jis',
    'shift_jis':           'shift_jis',
    'sjis':                'shift_jis',
    'windows-31j':         'shift_jis',
    'x-sjis':              'shift_jis',
    'cseuckr':             'euc-kr',
    'csksc56011987':       'euc-kr',
    'euc-kr':              'euc-kr',
    'iso-ir-149':          'euc-kr',
    'korean':              'euc-kr',
    'ks_c_5601-1987':      'euc-kr',
    'ks_c_5601-1989':      'euc-kr',
    'ksc5601':             'euc-kr',
    'ksc_5601':            'euc-kr',
    'windows-949':         'euc-kr',
    'csiso2022kr':         'iso-2022-kr',
    'iso-2022-kr':         'iso-2022-kr',
    'utf-16be':            'utf-16be',
    'utf-16':              'utf-16le',
    'utf-16le':            'utf-16le',
    'x-user-defined':      'x-user-defined',
}
# coding: utf-8
"""

    webencodings.x_user_defined
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~

    An implementation of the x-user-defined encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

from __future__ import unicode_literals

import codecs


### Codec APIs

class Codec(codecs.Codec):

    def encode(self, input, errors='strict'):
        return codecs.charmap_encode(input, errors, encoding_table)

    def decode(self, input, errors='strict'):
        return codecs.charmap_decode(input, errors, decoding_table)


class IncrementalEncoder(codecs.IncrementalEncoder):
    def encode(self, input, final=False):
        return codecs.charmap_encode(input, self.errors, encoding_table)[0]


class IncrementalDecoder(codecs.IncrementalDecoder):
    def decode(self, input, final=False):
        return codecs.charmap_decode(input, self.errors, decoding_table)[0]


class StreamWriter(Codec, codecs.StreamWriter):
    pass


class StreamReader(Codec, codecs.StreamReader):
    pass


### encodings module API

codec_info = codecs.CodecInfo(
    name='x-user-defined',
    encode=Codec().encode,
    decode=Codec().decode,
    incrementalencoder=IncrementalEncoder,
    incrementaldecoder=IncrementalDecoder,
    streamreader=StreamReader,
    streamwriter=StreamWriter,
)


### Decoding Table

# Python 3:
# for c in range(256): print('    %r' % chr(c if c < 128 else c + 0xF700))
decoding_table = (
    '\x00'
    '\x01'
    '\x02'
    '\x03'
    '\x04'
    '\x05'
    '\x06'
    '\x07'
    '\x08'
    '\t'
    '\n'
    '\x0b'
    '\x0c'
    '\r'
    '\x0e'
    '\x0f'
    '\x10'
    '\x11'
    '\x12'
    '\x13'
    '\x14'
    '\x15'
    '\x16'
    '\x17'
    '\x18'
    '\x19'
    '\x1a'
    '\x1b'
    '\x1c'
    '\x1d'
    '\x1e'
    '\x1f'
    ' '
    '!'
    '"'
    '#'
    '$'
    '%'
    '&'
    "'"
    '('
    ')'
    '*'
    '+'
    ','
    '-'
    '.'
    '/'
    '0'
    '1'
    '2'
    '3'
    '4'
    '5'
    '6'
    '7'
    '8'
    '9'
    ':'
    ';'
    '<'
    '='
    '>'
    '?'
    '@'
    'A'
    'B'
    'C'
    'D'
    'E'
    'F'
    'G'
    'H'
    'I'
    'J'
    'K'
    'L'
    'M'
    'N'
    'O'
    'P'
    'Q'
    'R'
    'S'
    'T'
    'U'
    'V'
    'W'
    'X'
    'Y'
    'Z'
    '['
    '\\'
    ']'
    '^'
    '_'
    '`'
    'a'
    'b'
    'c'
    'd'
    'e'
    'f'
    'g'
    'h'
    'i'
    'j'
    'k'
    'l'
    'm'
    'n'
    'o'
    'p'
    'q'
    'r'
    's'
    't'
    'u'
    'v'
    'w'
    'x'
    'y'
    'z'
    '{'
    '|'
    '}'
    '~'
    '\x7f'
    '\uf780'
    '\uf781'
    '\uf782'
    '\uf783'
    '\uf784'
    '\uf785'
    '\uf786'
    '\uf787'
    '\uf788'
    '\uf789'
    '\uf78a'
    '\uf78b'
    '\uf78c'
    '\uf78d'
    '\uf78e'
    '\uf78f'
    '\uf790'
    '\uf791'
    '\uf792'
    '\uf793'
    '\uf794'
    '\uf795'
    '\uf796'
    '\uf797'
    '\uf798'
    '\uf799'
    '\uf79a'
    '\uf79b'
    '\uf79c'
    '\uf79d'
    '\uf79e'
    '\uf79f'
    '\uf7a0'
    '\uf7a1'
    '\uf7a2'
    '\uf7a3'
    '\uf7a4'
    '\uf7a5'
    '\uf7a6'
    '\uf7a7'
    '\uf7a8'
    '\uf7a9'
    '\uf7aa'
    '\uf7ab'
    '\uf7ac'
    '\uf7ad'
    '\uf7ae'
    '\uf7af'
    '\uf7b0'
    '\uf7b1'
    '\uf7b2'
    '\uf7b3'
    '\uf7b4'
    '\uf7b5'
    '\uf7b6'
    '\uf7b7'
    '\uf7b8'
    '\uf7b9'
    '\uf7ba'
    '\uf7bb'
    '\uf7bc'
    '\uf7bd'
    '\uf7be'
    '\uf7bf'
    '\uf7c0'
    '\uf7c1'
    '\uf7c2'
    '\uf7c3'
    '\uf7c4'
    '\uf7c5'
    '\uf7c6'
    '\uf7c7'
    '\uf7c8'
    '\uf7c9'
    '\uf7ca'
    '\uf7cb'
    '\uf7cc'
    '\uf7cd'
    '\uf7ce'
    '\uf7cf'
    '\uf7d0'
    '\uf7d1'
    '\uf7d2'
    '\uf7d3'
    '\uf7d4'
    '\uf7d5'
    '\uf7d6'
    '\uf7d7'
    '\uf7d8'
    '\uf7d9'
    '\uf7da'
    '\uf7db'
    '\uf7dc'
    '\uf7dd'
    '\uf7de'
    '\uf7df'
    '\uf7e0'
    '\uf7e1'
    '\uf7e2'
    '\uf7e3'
    '\uf7e4'
    '\uf7e5'
    '\uf7e6'
    '\uf7e7'
    '\uf7e8'
    '\uf7e9'
    '\uf7ea'
    '\uf7eb'
    '\uf7ec'
    '\uf7ed'
    '\uf7ee'
    '\uf7ef'
    '\uf7f0'
    '\uf7f1'
    '\uf7f2'
    '\uf7f3'
    '\uf7f4'
    '\uf7f5'
    '\uf7f6'
    '\uf7f7'
    '\uf7f8'
    '\uf7f9'
    '\uf7fa'
    '\uf7fb'
    '\uf7fc'
    '\uf7fd'
    '\uf7fe'
    '\uf7ff'
)

### Encoding table
encoding_table = codecs.charmap_build(decoding_table)
from __future__ import absolute_import, division, unicode_literals

from datrie import Trie as DATrie
from pip._vendor.six import text_type

from ._base import Trie as ABCTrie


class Trie(ABCTrie):
    def __init__(self, data):
        chars = set()
        for key in data.keys():
            if not isinstance(key, text_type):
                raise TypeError("All keys must be strings")
            for char in key:
                chars.add(char)

        self._data = DATrie("".join(chars))
        for key, value in data.items():
            self._data[key] = value

    def __contains__(self, key):
        return key in self._data

    def __len__(self):
        return len(self._data)

    def __iter__(self):
        raise NotImplementedError()

    def __getitem__(self, key):
        return self._data[key]

    def keys(self, prefix=None):
        return self._data.keys(prefix)

    def has_keys_with_prefix(self, prefix):
        return self._data.has_keys_with_prefix(prefix)

    def longest_prefix(self, prefix):
        return self._data.longest_prefix(prefix)

    def longest_prefix_item(self, prefix):
        return self._data.longest_prefix_item(prefix)
from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type

from bisect import bisect_left

from ._base import Trie as ABCTrie


class Trie(ABCTrie):
    def __init__(self, data):
        if not all(isinstance(x, text_type) for x in data.keys()):
            raise TypeError("All keys must be strings")

        self._data = data
        self._keys = sorted(data.keys())
        self._cachestr = ""
        self._cachepoints = (0, len(data))

    def __contains__(self, key):
        return key in self._data

    def __len__(self):
        return len(self._data)

    def __iter__(self):
        return iter(self._data)

    def __getitem__(self, key):
        return self._data[key]

    def keys(self, prefix=None):
        if prefix is None or prefix == "" or not self._keys:
            return set(self._keys)

        if prefix.startswith(self._cachestr):
            lo, hi = self._cachepoints
            start = i = bisect_left(self._keys, prefix, lo, hi)
        else:
            start = i = bisect_left(self._keys, prefix)

        keys = set()
        if start == len(self._keys):
            return keys

        while self._keys[i].startswith(prefix):
            keys.add(self._keys[i])
            i += 1

        self._cachestr = prefix
        self._cachepoints = (start, i)

        return keys

    def has_keys_with_prefix(self, prefix):
        if prefix in self._data:
            return True

        if prefix.startswith(self._cachestr):
            lo, hi = self._cachepoints
            i = bisect_left(self._keys, prefix, lo, hi)
        else:
            i = bisect_left(self._keys, prefix)

        if i == len(self._keys):
            return False

        return self._keys[i].startswith(prefix)
from __future__ import absolute_import, division, unicode_literals

try:
    from collections.abc import Mapping
except ImportError:  # Python 2.7
    from collections import Mapping


class Trie(Mapping):
    """Abstract base class for tries"""

    def keys(self, prefix=None):
        # pylint:disable=arguments-differ
        keys = super(Trie, self).keys()

        if prefix is None:
            return set(keys)

        return {x for x in keys if x.startswith(prefix)}

    def has_keys_with_prefix(self, prefix):
        for key in self.keys():
            if key.startswith(prefix):
                return True

        return False

    def longest_prefix(self, prefix):
        if prefix in self:
            return prefix

        for i in range(1, len(prefix) + 1):
            if prefix[:-i] in self:
                return prefix[:-i]

        raise KeyError(prefix)

    def longest_prefix_item(self, prefix):
        lprefix = self.longest_prefix(prefix)
        return (lprefix, self[lprefix])
U

��.e��@sLddlmZmZmZddlmZddlmZddl	mZ
Gdd�de
�ZdS)�)�absolute_import�division�unicode_literals)�Trie)�	text_type�c@sVeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dd�Z
dd�ZdS)rcCsjt�}|��D]*}t|t�s$td��|D]}|�|�q(qtd�|��|_|�	�D]\}}||j|<qRdS)NzAll keys must be strings�)
�set�keys�
isinstancer�	TypeError�add�DATrie�join�_data�items)�self�data�chars�key�char�value�r�E/usr/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/datrie.py�__init__
s
z
Trie.__init__cCs
||jkS�N�r�rrrrr�__contains__szTrie.__contains__cCs
t|j�Sr)�lenr�rrrr�__len__szTrie.__len__cCs
t��dSr)�NotImplementedErrorr rrr�__iter__sz
Trie.__iter__cCs
|j|Srrrrrr�__getitem__szTrie.__getitem__NcCs|j�|�Sr)rr
�r�prefixrrrr
"sz	Trie.keyscCs|j�|�Sr)r�has_keys_with_prefixr%rrrr'%szTrie.has_keys_with_prefixcCs|j�|�Sr)r�longest_prefixr%rrrr((szTrie.longest_prefixcCs|j�|�Sr)r�longest_prefix_itemr%rrrr)+szTrie.longest_prefix_item)N)�__name__�
__module__�__qualname__rrr!r#r$r
r'r(r)rrrrr	s
rN)Z
__future__rrrZdatrierrZpip._vendor.sixrZ_baseZABCTrierrrr�<module>sU

��.e��@sLddlmZmZmZddlmZddlmZddl	mZ
Gdd�de
�ZdS)�)�absolute_import�division�unicode_literals)�Trie)�	text_type�c@sVeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dd�Z
dd�ZdS)rcCsjt�}|��D]*}t|t�s$td��|D]}|�|�q(qtd�|��|_|�	�D]\}}||j|<qRdS)NzAll keys must be strings�)
�set�keys�
isinstancer�	TypeError�add�DATrie�join�_data�items)�self�data�chars�key�char�value�r�E/usr/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/datrie.py�__init__
s
z
Trie.__init__cCs
||jkS�N�r�rrrrr�__contains__szTrie.__contains__cCs
t|j�Sr)�lenr�rrrr�__len__szTrie.__len__cCs
t��dSr)�NotImplementedErrorr rrr�__iter__sz
Trie.__iter__cCs
|j|Srrrrrr�__getitem__szTrie.__getitem__NcCs|j�|�Sr)rr
�r�prefixrrrr
"sz	Trie.keyscCs|j�|�Sr)r�has_keys_with_prefixr%rrrr'%szTrie.has_keys_with_prefixcCs|j�|�Sr)r�longest_prefixr%rrrr((szTrie.longest_prefixcCs|j�|�Sr)r�longest_prefix_itemr%rrrr)+szTrie.longest_prefix_item)N)�__name__�
__module__�__qualname__rrr!r#r$r
r'r(r)rrrrr	s
rN)Z
__future__rrrZdatrierrZpip._vendor.sixrZ_baseZABCTrierrrr�<module>sU

��.e��@sZddlmZmZmZzddlmZWn ek
rDddlmZYnXGdd�de�ZdS)�)�absolute_import�division�unicode_literals)�Mappingcs:eZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Z�ZS)�TriezAbstract base class for triesNcs0tt|���}�dkrt|�S�fdd�|D�S)Ncsh|]}|���r|�qS�)�
startswith)�.0�x��prefixr�D/usr/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/_base.py�	<setcomp>s
zTrie.keys.<locals>.<setcomp>)�superr�keys�set)�selfrr��	__class__rr
rsz	Trie.keyscCs"|��D]}|�|�rdSqdS)NTF)rr)rr�keyrrr
�has_keys_with_prefixs
zTrie.has_keys_with_prefixcCsT||kr|Stdt|�d�D](}|d|�|kr|d|�Sqt|��dS)N�)�range�len�KeyError)rr�irrr
�longest_prefixszTrie.longest_prefixcCs|�|�}|||fS)N)r)rrZlprefixrrr
�longest_prefix_item&s
zTrie.longest_prefix_item)N)	�__name__�
__module__�__qualname__�__doc__rrrr�
__classcell__rrrr
r	s
	
rN)	Z
__future__rrrZcollections.abcr�ImportError�collectionsrrrrr
�<module>s
U

��.e!�@sRddlmZmZmZddlmZeZzddlmZWne	k
rHYnXeZdS)�)�absolute_import�division�unicode_literals�)�TrieN)
Z
__future__rrr�pyrZPyTrieZdatrieZDATrie�ImportError�r	r	�G/usr/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/__init__.py�<module>sU

��.e!�@sRddlmZmZmZddlmZeZzddlmZWne	k
rHYnXeZdS)�)�absolute_import�division�unicode_literals�)�TrieN)
Z
__future__rrr�pyrZPyTrieZdatrieZDATrie�ImportError�r	r	�G/usr/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/__init__.py�<module>sU

��.e��@sLddlmZmZmZddlmZddlmZddlm	Z
Gdd�de
�Z	dS)	�)�absolute_import�division�unicode_literals)�	text_type)�bisect_left�)�Triec@sFeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dS)rcCsJtdd�|��D��std��||_t|���|_d|_dt|�f|_dS)Ncss|]}t|t�VqdS�N)�
isinstancer)�.0�x�r
�A/usr/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/py.py�	<genexpr>sz Trie.__init__.<locals>.<genexpr>zAll keys must be strings�r)	�all�keys�	TypeError�_data�sorted�_keys�	_cachestr�len�_cachepoints)�self�datar
r
r�__init__
sz
Trie.__init__cCs
||jkSr	�r�r�keyr
r
r�__contains__szTrie.__contains__cCs
t|j�Sr	)rr�rr
r
r�__len__szTrie.__len__cCs
t|j�Sr	)�iterrr!r
r
r�__iter__sz
Trie.__iter__cCs
|j|Sr	rrr
r
r�__getitem__szTrie.__getitem__NcCs�|dks|dks|js t|j�S|�|j�rL|j\}}t|j|||�}}nt|j|�}}t�}|t|j�krt|S|j|�|�r�|�|j|�|d7}qt||_||f|_|S)Nrr)r�set�
startswithrrrr�add)r�prefix�lo�hi�start�irr
r
rrs



z	Trie.keyscCsd||jkrdS|�|j�r6|j\}}t|j|||�}nt|j|�}|t|j�krTdS|j|�|�S)NTF)rr'rrrrr)rr)r*r+r-r
r
r�has_keys_with_prefix6s

zTrie.has_keys_with_prefix)N)
�__name__�
__module__�__qualname__rr r"r$r%rr.r
r
r
rr	s	
rN)Z
__future__rrrZpip._vendor.sixrZbisectrZ_baserZABCTrier
r
r
r�<module>sU

��.e��@sLddlmZmZmZddlmZddlmZddlm	Z
Gdd�de
�Z	dS)	�)�absolute_import�division�unicode_literals)�	text_type)�bisect_left�)�Triec@sFeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dS)rcCsJtdd�|��D��std��||_t|���|_d|_dt|�f|_dS)Ncss|]}t|t�VqdS�N)�
isinstancer)�.0�x�r
�A/usr/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/py.py�	<genexpr>sz Trie.__init__.<locals>.<genexpr>zAll keys must be strings�r)	�all�keys�	TypeError�_data�sorted�_keys�	_cachestr�len�_cachepoints)�self�datar
r
r�__init__
sz
Trie.__init__cCs
||jkSr	�r�r�keyr
r
r�__contains__szTrie.__contains__cCs
t|j�Sr	)rr�rr
r
r�__len__szTrie.__len__cCs
t|j�Sr	)�iterrr!r
r
r�__iter__sz
Trie.__iter__cCs
|j|Sr	rrr
r
r�__getitem__szTrie.__getitem__NcCs�|dks|dks|js t|j�S|�|j�rL|j\}}t|j|||�}}nt|j|�}}t�}|t|j�krt|S|j|�|�r�|�|j|�|d7}qt||_||f|_|S)Nrr)r�set�
startswithrrrr�add)r�prefix�lo�hi�start�irr
r
rrs



z	Trie.keyscCsd||jkrdS|�|j�r6|j\}}t|j|||�}nt|j|�}|t|j�krTdS|j|�|�S)NTF)rr'rrrrr)rr)r*r+r-r
r
r�has_keys_with_prefix6s

zTrie.has_keys_with_prefix)N)
�__name__�
__module__�__qualname__rr r"r$r%rr.r
r
r
rr	s	
rN)Z
__future__rrrZpip._vendor.sixrZbisectrZ_baserZABCTrier
r
r
r�<module>sU

��.e��@sZddlmZmZmZzddlmZWn ek
rDddlmZYnXGdd�de�ZdS)�)�absolute_import�division�unicode_literals)�Mappingcs:eZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Z�ZS)�TriezAbstract base class for triesNcs0tt|���}�dkrt|�S�fdd�|D�S)Ncsh|]}|���r|�qS�)�
startswith)�.0�x��prefixr�D/usr/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/_base.py�	<setcomp>s
zTrie.keys.<locals>.<setcomp>)�superr�keys�set)�selfrr��	__class__rr
rsz	Trie.keyscCs"|��D]}|�|�rdSqdS)NTF)rr)rr�keyrrr
�has_keys_with_prefixs
zTrie.has_keys_with_prefixcCsT||kr|Stdt|�d�D](}|d|�|kr|d|�Sqt|��dS)N�)�range�len�KeyError)rr�irrr
�longest_prefixszTrie.longest_prefixcCs|�|�}|||fS)N)r)rrZlprefixrrr
�longest_prefix_item&s
zTrie.longest_prefix_item)N)	�__name__�
__module__�__qualname__�__doc__rrrr�
__classcell__rrrr
r	s
	
rN)	Z
__future__rrrZcollections.abcr�ImportError�collectionsrrrrr
�<module>s
from __future__ import absolute_import, division, unicode_literals

from .py import Trie as PyTrie

Trie = PyTrie

# pylint:disable=wrong-import-position
try:
    from .datrie import Trie as DATrie
except ImportError:
    pass
else:
    Trie = DATrie
# pylint:enable=wrong-import-position


📤 Upload File


📁 Create Folder